mirror of
https://github.com/samsonjs/vdirsyncer.git
synced 2026-03-25 08:55:50 +00:00
Compare commits
512 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c3262d88cc | ||
| cbb4e314f6 | |||
|
|
ac9919d865 | ||
|
|
b124ce835b | ||
|
|
6708dbbbdc | ||
|
|
81d8444810 | ||
|
|
4990cdf229 | ||
|
|
4c2c60402e | ||
|
|
2f4f4ac72b | ||
|
|
6354db82c4 | ||
|
|
a9b6488dac | ||
|
|
a4ceabf80b | ||
|
|
3488f77cd6 | ||
|
|
19120422a7 | ||
|
|
2e619806a0 | ||
|
|
4669bede07 | ||
|
|
59c1c55407 | ||
|
|
1502f5b5f4 | ||
|
|
a4d4bf8fd1 | ||
|
|
aab70e9fb0 | ||
|
|
ed88406aec | ||
|
|
ffe883a2f1 | ||
|
|
e5f2869580 | ||
|
|
95bb7bd7f9 | ||
|
|
e3b2473383 | ||
|
|
424cfc5799 | ||
|
|
29312e87c5 | ||
|
|
c77b22334a | ||
|
|
02350c924b | ||
|
|
605f878f9b | ||
|
|
bb2b71da81 | ||
|
|
065ebe4752 | ||
|
|
0d741022a9 | ||
|
|
b5d3b7e578 | ||
|
|
9677cf9812 | ||
|
|
6da84c7881 | ||
|
|
dceb113334 | ||
|
|
01fa614b6b | ||
|
|
20cc1247ed | ||
|
|
2f548e048d | ||
|
|
5d343264f3 | ||
|
|
bc3fa8bd39 | ||
|
|
8803d5a086 | ||
|
|
96754a3d0a | ||
|
|
d42707c108 | ||
|
|
ddfe3cc749 | ||
|
|
84ff0ac943 | ||
|
|
388c16f188 | ||
|
|
78f41d32ce | ||
|
|
164559ad7a | ||
|
|
2c6dc4cddf | ||
|
|
9bbb7fa91a | ||
|
|
f8bcafa9d7 | ||
|
|
162879df21 | ||
|
|
3b9db0e4db | ||
|
|
63d2e6c795 | ||
|
|
03d1c4666d | ||
|
|
ecdd565be4 | ||
|
|
17e43fd633 | ||
|
|
2b4496fea4 | ||
|
|
fc4a02c0c9 | ||
|
|
c19802e4d8 | ||
|
|
cce8fef8de | ||
|
|
9a0dbc8cd0 | ||
|
|
32453cccfc | ||
|
|
057f3af293 | ||
|
|
e76d8a5b03 | ||
|
|
d8961232c4 | ||
|
|
646e0b48a5 | ||
|
|
fb6a859b88 | ||
|
|
ff999b5b74 | ||
|
|
41b48857eb | ||
|
|
70d09e6d5d | ||
|
|
8b063c39cb | ||
|
|
12a06917db | ||
|
|
2fee1d67f2 | ||
|
|
a934d5ec66 | ||
|
|
c79d3680cd | ||
|
|
cd050d57b9 | ||
|
|
8c98992f74 | ||
|
|
c2eed9fb59 | ||
|
|
a490544405 | ||
|
|
688d6f907f | ||
|
|
2e7e31fdbf | ||
|
|
616d7aacb0 | ||
|
|
89129e37b6 | ||
|
|
88722ef4b7 | ||
|
|
35f299679f | ||
|
|
67e1c0ded5 | ||
|
|
89a01631fa | ||
|
|
611b8667a3 | ||
|
|
8550475548 | ||
|
|
cd2445b991 | ||
|
|
5ca2742271 | ||
|
|
5ac9dcec29 | ||
|
|
a513a7e4fa | ||
|
|
5ae05245e6 | ||
|
|
055ed120dd | ||
|
|
31816dc652 | ||
|
|
2e023a5feb | ||
|
|
14afe16a13 | ||
|
|
5766e1c501 | ||
|
|
fade399a21 | ||
|
|
3433f8a034 | ||
|
|
6a3077f9dc | ||
|
|
42c5dba208 | ||
|
|
7991419ab1 | ||
|
|
03e6afe9dc | ||
|
|
762d369560 | ||
|
|
2396c46b04 | ||
|
|
b626236128 | ||
|
|
45b67122fe | ||
|
|
7a387b8efe | ||
|
|
889e1f9ea2 | ||
|
|
d1f93ea0be | ||
|
|
82fd03be64 | ||
|
|
b50f9def00 | ||
|
|
91c16b3215 | ||
|
|
d45ae04006 | ||
|
|
9abf9c8e45 | ||
|
|
0f0e5b97d3 | ||
|
|
301aa0e16f | ||
|
|
dcd3b7a359 | ||
|
|
df8c4a1cf5 | ||
|
|
5a17ec1bba | ||
|
|
ab3aa108fc | ||
|
|
f194bb0a4c | ||
|
|
c073d55b2f | ||
|
|
3611e7d62f | ||
|
|
adc974bdd1 | ||
|
|
efad9eb624 | ||
|
|
246568f149 | ||
|
|
439f1e6f50 | ||
|
|
ef8e8980d1 | ||
|
|
08616abbb5 | ||
|
|
4237ff863c | ||
|
|
1a6ad54543 | ||
|
|
203468fd25 | ||
|
|
6368af1365 | ||
|
|
b38306bdd0 | ||
|
|
d26557bee3 | ||
|
|
b9f749467c | ||
|
|
7e5910a341 | ||
|
|
7403182645 | ||
|
|
bad381e5ba | ||
|
|
700586d959 | ||
|
|
c1d3efb6b8 | ||
|
|
c55b969791 | ||
|
|
079a156bf8 | ||
|
|
242216d85a | ||
|
|
b1ef68089b | ||
|
|
85ae33955f | ||
|
|
54a90aa5dd | ||
|
|
443ae3d3e7 | ||
|
|
3bf9a3d684 | ||
|
|
2138c43456 | ||
|
|
5a46c93987 | ||
|
|
180f91f0fe | ||
|
|
6443d37c97 | ||
|
|
13ca008380 | ||
|
|
24cb49f64c | ||
|
|
defe8e2591 | ||
|
|
e11fa357ff | ||
|
|
e20a65793e | ||
|
|
df14865f43 | ||
|
|
f45ecf6ad0 | ||
|
|
72bcef282d | ||
|
|
3a56f26d05 | ||
|
|
4dd17c7f59 | ||
|
|
73f2554932 | ||
|
|
627f574777 | ||
|
|
37a7f9bea8 | ||
|
|
d2d1532883 | ||
|
|
0dcef26b9d | ||
|
|
d646357cd3 | ||
|
|
8c6c0be15a | ||
|
|
dfc29db312 | ||
|
|
a41cf64b6c | ||
|
|
a2eda52b71 | ||
|
|
61006f0685 | ||
|
|
9b48bccde2 | ||
|
|
7c72caef3f | ||
|
|
0045b23800 | ||
|
|
c07fbc2053 | ||
|
|
e3485beb45 | ||
|
|
0f83fd96d5 | ||
|
|
8980a80560 | ||
|
|
90b6ce1d04 | ||
|
|
7a801d3d5d | ||
|
|
2c44f7d773 | ||
|
|
6506c86f58 | ||
|
|
51b409017d | ||
|
|
84613e73b0 | ||
|
|
a4ef45095e | ||
|
|
63ba948241 | ||
|
|
3067b32de5 | ||
|
|
a87518c474 | ||
|
|
b26e771865 | ||
|
|
2fbb0ab7a5 | ||
|
|
60352f84fe | ||
|
|
b7201013bc | ||
|
|
b61095ad47 | ||
|
|
278e6de8b0 | ||
|
|
843c58b92e | ||
|
|
cd412aa161 | ||
|
|
c5f80d1644 | ||
|
|
c50eabc77e | ||
|
|
a88389c4f1 | ||
|
|
1f7497c9d1 | ||
|
|
baaf737873 | ||
|
|
7c2fed1ceb | ||
|
|
3be048be18 | ||
|
|
f103b10b2a | ||
|
|
e44c704ae3 | ||
|
|
f32e0a9c1f | ||
|
|
24e3625cc0 | ||
|
|
4df54b9231 | ||
|
|
8557c6e0bb | ||
|
|
9fdc93c140 | ||
|
|
f3f8eb6824 | ||
|
|
b18e1c78d2 | ||
|
|
0a4114ef9f | ||
|
|
06f8001d65 | ||
|
|
61f3785e6c | ||
|
|
b0020f9436 | ||
|
|
74d738ec80 | ||
|
|
711eccedab | ||
|
|
5d58a86ba0 | ||
|
|
60c3b59552 | ||
|
|
22a127191d | ||
|
|
dc6e4ba5af | ||
|
|
ea640001d0 | ||
|
|
545b8ce2f1 | ||
|
|
3035d9cfae | ||
|
|
68c5968be8 | ||
|
|
0d1ca319c0 | ||
|
|
817eab51f1 | ||
|
|
e8b72130c2 | ||
|
|
8a44b278d1 | ||
|
|
54a5bf4ad3 | ||
|
|
10659b80ba | ||
|
|
1c6beae9b4 | ||
|
|
7ce9466c46 | ||
|
|
9f0390ee21 | ||
|
|
4e3f39468b | ||
|
|
a7e984f013 | ||
|
|
7c7f97c6b2 | ||
|
|
7e9132b817 | ||
|
|
59b95d9999 | ||
|
|
4dd15716db | ||
|
|
ec101b20d6 | ||
|
|
2c551afafb | ||
|
|
ad7bb82f40 | ||
|
|
961203e865 | ||
|
|
d72536805c | ||
|
|
ac6e19261f | ||
|
|
cbb0cad827 | ||
|
|
2c69f865f0 | ||
|
|
63510414ae | ||
|
|
fce5062a12 | ||
|
|
c77d750ef6 | ||
|
|
02ee9f96e4 | ||
|
|
ddaeccb2ee | ||
|
|
63ef204835 | ||
|
|
7d61cd3e2e | ||
|
|
db6da70c26 | ||
|
|
bf95bf2941 | ||
|
|
b3c9df1b1d | ||
|
|
87574be547 | ||
|
|
2e35214421 | ||
|
|
f5c2026dcf | ||
|
|
acf29cf659 | ||
|
|
fbd5ff88d5 | ||
|
|
7605416054 | ||
|
|
7a12e6028c | ||
|
|
d6876c6bad | ||
|
|
09eb375c5b | ||
|
|
48747463ed | ||
|
|
955f434d9d | ||
|
|
889183ec89 | ||
|
|
0650cc3bc2 | ||
|
|
6281e7a237 | ||
|
|
dff48f101b | ||
|
|
1081a15895 | ||
|
|
cf1d082628 | ||
|
|
54e829262d | ||
|
|
8830307e38 | ||
|
|
7a7deffa2c | ||
|
|
ecb181d9d7 | ||
|
|
fed1ee69c3 | ||
|
|
48aa4912a2 | ||
|
|
8886854367 | ||
|
|
a910e9f446 | ||
|
|
f3714fc493 | ||
|
|
6af4dd124b | ||
|
|
bc5e03630e | ||
|
|
6491bc53fb | ||
|
|
21eccfc2ef | ||
|
|
7b493416f7 | ||
|
|
5b8f00e720 | ||
|
|
0556e53e0c | ||
|
|
7379a96f73 | ||
|
|
c0ccb3d1aa | ||
|
|
98fcd12fa7 | ||
|
|
f2a1afe6d3 | ||
|
|
e16f83c1c2 | ||
|
|
55c563ff8c | ||
|
|
a5731b269e | ||
|
|
459efbf7af | ||
|
|
58aa0a3a31 | ||
|
|
177748d3d1 | ||
|
|
61edfc090e | ||
|
|
b3bee77c17 | ||
|
|
21db2547cb | ||
|
|
be131a0063 | ||
|
|
71879045e4 | ||
|
|
54e0c114fa | ||
|
|
17f422c1b7 | ||
|
|
a9f1a5195a | ||
|
|
8dab258ef0 | ||
|
|
f09d060580 | ||
|
|
ef2419efa9 | ||
|
|
2eff8e08e1 | ||
|
|
152ebb05dd | ||
|
|
5a9fc2cc7e | ||
|
|
dfed9794cb | ||
|
|
8d69b73c9e | ||
|
|
1a1f6f0788 | ||
|
|
7c9170c677 | ||
|
|
623c0537e1 | ||
|
|
4930b5f389 | ||
|
|
25435ce11d | ||
|
|
1f6cc6f8be | ||
|
|
59b6e24795 | ||
|
|
722dace828 | ||
|
|
6cebba0853 | ||
|
|
29528123a3 | ||
|
|
72618e374d | ||
|
|
c254b4ad1d | ||
|
|
cfd5af832a | ||
|
|
342cb863cd | ||
|
|
e1c979751d | ||
|
|
3191886658 | ||
|
|
3260afb495 | ||
|
|
0231f3999e | ||
|
|
d1b148c919 | ||
|
|
e70e8c03e8 | ||
|
|
46a49e3481 | ||
|
|
5e36ca1334 | ||
|
|
910317d4bb | ||
|
|
29c2b6bb4b | ||
|
|
2e4fc7c65a | ||
|
|
87f3a594c6 | ||
|
|
d95a8264f4 | ||
|
|
77d64ddc2c | ||
|
|
808e01f9c8 | ||
|
|
86535a9db3 | ||
|
|
57d662cba1 | ||
|
|
8d62ac4279 | ||
|
|
3bf4bd079d | ||
|
|
39ccc168b2 | ||
|
|
7b0d6671da | ||
|
|
8e8c1d5719 | ||
|
|
299c699cb9 | ||
|
|
8cd4a44d02 | ||
|
|
b0f08e051a | ||
|
|
4450393d4f | ||
|
|
6c80293a98 | ||
|
|
a9fa61040f | ||
|
|
ee124f5c82 | ||
|
|
5a97307a2f | ||
|
|
e467809bb0 | ||
|
|
320ac6020b | ||
|
|
9d3ef030fa | ||
|
|
eec142ac15 | ||
|
|
96f1c41bee | ||
|
|
a0b814ec17 | ||
|
|
91ffb931e1 | ||
|
|
bba9d43caf | ||
|
|
ec221b52b4 | ||
|
|
2336076baf | ||
|
|
d747977af2 | ||
|
|
0e47775ce3 | ||
|
|
d6c1b1847c | ||
|
|
95bf683771 | ||
|
|
5844480588 | ||
|
|
b9f5d88af9 | ||
|
|
5c00cceeb4 | ||
|
|
5e3e57ffc2 | ||
|
|
addab47786 | ||
|
|
32bc8d9046 | ||
|
|
0d0f2974ae | ||
|
|
9a1582cc0f | ||
|
|
9b5e01ab38 | ||
|
|
81895c291e | ||
|
|
439e63f8ea | ||
|
|
804b9f0429 | ||
|
|
44e4beb06f | ||
|
|
94f8d608ec | ||
|
|
d2d41e5df1 | ||
|
|
abf199f21e | ||
|
|
75719ecc66 | ||
|
|
9513ee7f61 | ||
|
|
a68121e439 | ||
|
|
e355b3336b | ||
|
|
b435465bc7 | ||
|
|
56688a6c50 | ||
|
|
bc002a700e | ||
|
|
12c860978b | ||
|
|
fdc12d561c | ||
|
|
f549b1d706 | ||
|
|
90e8b7b0bc | ||
|
|
c5a59ab10b | ||
|
|
2685f8db68 | ||
|
|
98d28ea2c0 | ||
|
|
ed0b4bef10 | ||
|
|
b9c01f8756 | ||
|
|
de867fcda2 | ||
|
|
1f066ca6ca | ||
|
|
55af4eaf80 | ||
|
|
2161de30d0 | ||
|
|
ef34d77ab1 | ||
|
|
255ea8f9bf | ||
|
|
8608f37fbb | ||
|
|
5ed9c821b8 | ||
|
|
53878f001a | ||
|
|
9df587df26 | ||
|
|
8ac4a00306 | ||
|
|
7750dda980 | ||
|
|
bf67af609e | ||
|
|
50cb2def73 | ||
|
|
6897995080 | ||
|
|
31c60021fa | ||
|
|
a42906b0e8 | ||
|
|
f9c6602684 | ||
|
|
74bb2ffb66 | ||
|
|
b5d4d3f9a9 | ||
|
|
f79647b29c | ||
|
|
1de3632620 | ||
|
|
aeb46ab5a9 | ||
|
|
27ebb0902b | ||
|
|
f281f956f1 | ||
|
|
83e5361643 | ||
|
|
2650a7ed0b | ||
|
|
68ff37e677 | ||
|
|
14deb20ce5 | ||
|
|
5eef4b1ba1 | ||
|
|
7577fa2117 | ||
|
|
1031b07349 | ||
|
|
47caebe843 | ||
|
|
3eb9ce5ae4 | ||
|
|
b1b4dd92fe | ||
|
|
9cb1f8d704 | ||
|
|
56b1fc2187 | ||
|
|
b5dd0929d0 | ||
|
|
d854bd62eb | ||
|
|
f6e6b0b6c3 | ||
|
|
399274286e | ||
|
|
be59ba5ab4 | ||
|
|
6e59ee0b5f | ||
|
|
82375f20aa | ||
|
|
cd86ea7a62 | ||
|
|
354aaec2e0 | ||
|
|
af3659ac1a | ||
|
|
50eefa1816 | ||
|
|
53331fedee | ||
|
|
88f2cd5b53 | ||
|
|
4f894e04dd | ||
|
|
0319035688 | ||
|
|
6c6da2f613 | ||
|
|
b0d8fd34dc | ||
|
|
0f3b2e74c0 | ||
|
|
c410fbf331 | ||
|
|
f1f51ac3cf | ||
|
|
3037c15a65 | ||
|
|
e5caf6750d | ||
|
|
f0fe104427 | ||
|
|
5c3900500d | ||
|
|
6befffcc45 | ||
|
|
22717ee217 | ||
|
|
c78ec6b3bd | ||
|
|
289f60da44 | ||
|
|
69e235c35d | ||
|
|
5a2032d6d9 | ||
|
|
fb68a6c4aa | ||
|
|
107edfd52d | ||
|
|
60e2e9669e | ||
|
|
b1214cd693 | ||
|
|
3d7d92c2d6 | ||
|
|
59740b379f | ||
|
|
461e4c55b0 | ||
|
|
a5b98517e8 | ||
|
|
1e425a590a | ||
|
|
308289febf | ||
|
|
eece9a6bde | ||
|
|
a26d3bb58c | ||
|
|
e2d3c1add7 | ||
|
|
b4bbc5946a | ||
|
|
7e4a0be674 | ||
|
|
aafafaa501 | ||
|
|
9505430b83 | ||
|
|
216ce8d180 | ||
|
|
cb4ba5b38c | ||
|
|
72ea0a6ad3 | ||
|
|
810349eef0 | ||
|
|
65d17bdcbf | ||
|
|
470c2c6630 | ||
|
|
7c04289ed4 | ||
|
|
e987d6eb4a | ||
|
|
558da29e5e | ||
|
|
83fe7d2c8a | ||
|
|
78599a131d |
145 changed files with 6647 additions and 5397 deletions
49
.builds/archlinux-py313.yml
Normal file
49
.builds/archlinux-py313.yml
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
# Run tests using the packaged dependencies on ArchLinux.
|
||||
|
||||
image: archlinux
|
||||
packages:
|
||||
- docker
|
||||
- docker-compose
|
||||
# Build dependencies:
|
||||
- python-wheel
|
||||
- python-build
|
||||
- python-installer
|
||||
- python-setuptools-scm
|
||||
# Runtime dependencies:
|
||||
- python-click
|
||||
- python-click-log
|
||||
- python-click-threading
|
||||
- python-requests
|
||||
- python-aiohttp-oauthlib
|
||||
- python-tenacity
|
||||
# Test dependencies:
|
||||
- python-hypothesis
|
||||
- python-pytest-cov
|
||||
- python-pytest-httpserver
|
||||
- python-trustme
|
||||
- python-pytest-asyncio
|
||||
- python-aiohttp
|
||||
- python-aiostream
|
||||
- python-aioresponses
|
||||
sources:
|
||||
- https://github.com/pimutils/vdirsyncer
|
||||
environment:
|
||||
BUILD: test
|
||||
CI: true
|
||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
||||
DAV_SERVER: radicale xandikos
|
||||
REQUIREMENTS: release
|
||||
# TODO: ETESYNC_TESTS
|
||||
tasks:
|
||||
- check-python:
|
||||
python --version | grep 'Python 3.13'
|
||||
- docker: |
|
||||
sudo systemctl start docker
|
||||
- setup: |
|
||||
cd vdirsyncer
|
||||
python -m build --wheel --skip-dependency-check --no-isolation
|
||||
sudo python -m installer dist/*.whl
|
||||
- test: |
|
||||
cd vdirsyncer
|
||||
make -e ci-test
|
||||
make -e ci-test-storage
|
||||
36
.builds/tests-minimal.yml
Normal file
36
.builds/tests-minimal.yml
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# Run tests using oldest available dependency versions.
|
||||
#
|
||||
# TODO: It might make more sense to test with an older Ubuntu or Fedora version
|
||||
# here, and consider that our "oldest suppported environment".
|
||||
|
||||
image: alpine/3.19 # python 3.11
|
||||
packages:
|
||||
- docker
|
||||
- docker-cli
|
||||
- docker-compose
|
||||
- py3-pip
|
||||
- python3-dev
|
||||
sources:
|
||||
- https://github.com/pimutils/vdirsyncer
|
||||
environment:
|
||||
BUILD: test
|
||||
CI: true
|
||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
||||
DAV_SERVER: radicale xandikos
|
||||
REQUIREMENTS: minimal
|
||||
tasks:
|
||||
- venv: |
|
||||
python3 -m venv $HOME/venv
|
||||
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
||||
- docker: |
|
||||
sudo addgroup $(whoami) docker
|
||||
sudo service docker start
|
||||
- setup: |
|
||||
cd vdirsyncer
|
||||
# Hack, no idea why it's needed
|
||||
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
|
||||
make -e install-dev
|
||||
- test: |
|
||||
cd vdirsyncer
|
||||
make -e ci-test
|
||||
make -e ci-test-storage
|
||||
45
.builds/tests-pypi.yml
Normal file
45
.builds/tests-pypi.yml
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
# Run tests using latest dependencies from PyPI
|
||||
|
||||
image: archlinux
|
||||
packages:
|
||||
- docker
|
||||
- docker-compose
|
||||
- python-pip
|
||||
sources:
|
||||
- https://github.com/pimutils/vdirsyncer
|
||||
secrets:
|
||||
- 4d9a6dfe-5c8d-48bd-b864-a2f5d772c536
|
||||
environment:
|
||||
BUILD: test
|
||||
CI: true
|
||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
||||
DAV_SERVER: baikal radicale xandikos
|
||||
REQUIREMENTS: release
|
||||
# TODO: ETESYNC_TESTS
|
||||
tasks:
|
||||
- venv: |
|
||||
python -m venv $HOME/venv
|
||||
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
||||
- docker: |
|
||||
sudo systemctl start docker
|
||||
- setup: |
|
||||
cd vdirsyncer
|
||||
make -e install-dev
|
||||
- test: |
|
||||
cd vdirsyncer
|
||||
make -e ci-test
|
||||
make -e ci-test-storage
|
||||
- check: |
|
||||
cd vdirsyncer
|
||||
make check
|
||||
- check-secrets: |
|
||||
# Stop here if this is a PR. PRs can't run with the below secrets.
|
||||
[ -f ~/fastmail-secrets ] || complete-build
|
||||
- extra-storages: |
|
||||
set +x
|
||||
source ~/fastmail-secrets
|
||||
set -x
|
||||
|
||||
cd vdirsyncer
|
||||
export PATH=$PATH:~/.local/bin/
|
||||
DAV_SERVER=fastmail pytest tests/storage
|
||||
|
|
@ -2,10 +2,3 @@ comment: false
|
|||
coverage:
|
||||
status:
|
||||
patch: false
|
||||
project:
|
||||
unit:
|
||||
flags: unit
|
||||
system:
|
||||
flags: system
|
||||
storage:
|
||||
flags: storage
|
||||
|
|
|
|||
1
.envrc
Normal file
1
.envrc
Normal file
|
|
@ -0,0 +1 @@
|
|||
layout python3
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
|
|
@ -14,3 +14,4 @@ dist
|
|||
docs/_build/
|
||||
vdirsyncer/version.py
|
||||
.hypothesis
|
||||
coverage.xml
|
||||
|
|
|
|||
9
.gitmodules
vendored
9
.gitmodules
vendored
|
|
@ -1,9 +0,0 @@
|
|||
[submodule "tests/storage/servers/baikal"]
|
||||
path = tests/storage/servers/baikal
|
||||
url = https://github.com/vdirsyncer/baikal-testserver
|
||||
[submodule "tests/storage/servers/owncloud"]
|
||||
path = tests/storage/servers/owncloud
|
||||
url = https://github.com/vdirsyncer/owncloud-testserver
|
||||
[submodule "tests/storage/servers/nextcloud"]
|
||||
path = tests/storage/servers/nextcloud
|
||||
url = https://github.com/vdirsyncer/nextcloud-testserver
|
||||
39
.pre-commit-config.yaml
Normal file
39
.pre-commit-config.yaml
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
args: [--markdown-linebreak-ext=md]
|
||||
- id: end-of-file-fixer
|
||||
- id: check-toml
|
||||
- id: check-added-large-files
|
||||
- id: debug-statements
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: "v1.15.0"
|
||||
hooks:
|
||||
- id: mypy
|
||||
files: vdirsyncer/.*
|
||||
additional_dependencies:
|
||||
- types-setuptools
|
||||
- types-docutils
|
||||
- types-requests
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: 'v0.11.4'
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
- id: ruff-format
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: typos-syncroniz
|
||||
name: typos-syncroniz
|
||||
language: system
|
||||
# Not how you spell "synchronise"
|
||||
entry: sh -c "git grep -i syncroniz"
|
||||
files: ".*/.*"
|
||||
- id: typos-text-icalendar
|
||||
name: typos-text-icalendar
|
||||
language: system
|
||||
# It's "text/calendar", no "i".
|
||||
entry: sh -c "git grep -i 'text/icalendar'"
|
||||
files: ".*/.*"
|
||||
16
.readthedocs.yaml
Normal file
16
.readthedocs.yaml
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
version: 2
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
tools:
|
||||
python: "3.9"
|
||||
|
||||
python:
|
||||
install:
|
||||
- method: pip
|
||||
path: .
|
||||
extra_requirements:
|
||||
- docs
|
||||
120
.travis.yml
120
.travis.yml
|
|
@ -1,120 +0,0 @@
|
|||
{
|
||||
"branches": {
|
||||
"only": [
|
||||
"auto",
|
||||
"master",
|
||||
"/^.*-maintenance$/"
|
||||
]
|
||||
},
|
||||
"cache": "pip",
|
||||
"dist": "trusty",
|
||||
"git": {
|
||||
"submodules": false
|
||||
},
|
||||
"install": [
|
||||
". scripts/travis-install.sh",
|
||||
"pip install -U pip setuptools",
|
||||
"pip install wheel",
|
||||
"make -e install-dev",
|
||||
"make -e install-$BUILD"
|
||||
],
|
||||
"language": "python",
|
||||
"matrix": {
|
||||
"include": [
|
||||
{
|
||||
"env": "BUILD=style",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=devel ",
|
||||
"python": "3.4"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=devel ",
|
||||
"python": "3.4"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=release ",
|
||||
"python": "3.4"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=release ",
|
||||
"python": "3.4"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=minimal ",
|
||||
"python": "3.4"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=minimal ",
|
||||
"python": "3.4"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=devel ",
|
||||
"python": "3.5"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=devel ",
|
||||
"python": "3.5"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=release ",
|
||||
"python": "3.5"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=release ",
|
||||
"python": "3.5"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=minimal ",
|
||||
"python": "3.5"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=minimal ",
|
||||
"python": "3.5"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=devel ",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=devel ",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=release ",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=release ",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=fastmail REQUIREMENTS=release ",
|
||||
"if": "NOT (type IN (pull_request))",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=radicale REQUIREMENTS=minimal ",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test DAV_SERVER=xandikos REQUIREMENTS=minimal ",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test ETESYNC_TESTS=true REQUIREMENTS=latest",
|
||||
"python": "3.6"
|
||||
},
|
||||
{
|
||||
"env": "BUILD=test",
|
||||
"language": "generic",
|
||||
"os": "osx"
|
||||
}
|
||||
]
|
||||
},
|
||||
"script": [
|
||||
"make -e $BUILD"
|
||||
],
|
||||
"sudo": true
|
||||
}
|
||||
15
AUTHORS.rst
15
AUTHORS.rst
|
|
@ -4,15 +4,26 @@ Contributors
|
|||
In alphabetical order:
|
||||
|
||||
- Ben Boeckel
|
||||
- Bleala
|
||||
- Christian Geier
|
||||
- Clément Mondon
|
||||
- Corey Hinshaw
|
||||
- Kai Herlemann
|
||||
- Hugo Osvaldo Barrera
|
||||
- Jason Cox
|
||||
- Julian Mehne
|
||||
- Malte Kiefer
|
||||
- Marek Marczykowski-Górecki
|
||||
- Markus Unterwaditzer
|
||||
- Michael Adler
|
||||
- rEnr3n
|
||||
- Thomas Weißschuh
|
||||
- Witcher01
|
||||
- samm81
|
||||
|
||||
Additionally `FastMail sponsored a paid account for testing
|
||||
<https://github.com/pimutils/vdirsyncer/issues/571>`_. Thanks!
|
||||
Special thanks goes to:
|
||||
|
||||
* `FastMail <https://github.com/pimutils/vdirsyncer/issues/571>`_ sponsors a
|
||||
paid account for testing their servers.
|
||||
* `Packagecloud <https://packagecloud.io/>`_ provide repositories for
|
||||
vdirsyncer's Debian packages.
|
||||
|
|
|
|||
134
CHANGELOG.rst
134
CHANGELOG.rst
|
|
@ -9,10 +9,140 @@ Package maintainers and users who have to manually update their installation
|
|||
may want to subscribe to `GitHub's tag feed
|
||||
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
|
||||
|
||||
Version 0.21.0
|
||||
==============
|
||||
|
||||
- Implement retrying for ``google`` storage type when a rate limit is reached.
|
||||
- ``tenacity`` is now a required dependency.
|
||||
- Drop support for Python 3.8.
|
||||
- Retry transient network errors for nullipotent requests.
|
||||
|
||||
Version 0.20.0
|
||||
==============
|
||||
|
||||
- Remove dependency on abandoned ``atomicwrites`` library.
|
||||
- Implement ``filter_hook`` for the HTTP storage.
|
||||
- Drop support for Python 3.7.
|
||||
- Add support for Python 3.12 and Python 3.13.
|
||||
- Properly close the status database after using. This especially affects tests,
|
||||
where we were leaking a large amount of file descriptors.
|
||||
- Extend supported versions of ``aiostream`` to include 0.7.x.
|
||||
|
||||
Version 0.19.3
|
||||
==============
|
||||
|
||||
- Added a no_delete option to the storage configuration. :gh:`1090`
|
||||
- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
|
||||
- Add an option to request vCard v4.0. :gh:`1066`
|
||||
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
|
||||
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
|
||||
- Implement digest auth. :gh:`1137`
|
||||
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
|
||||
|
||||
Version 0.19.2
|
||||
==============
|
||||
|
||||
- Improve the performance of ``SingleFileStorage``. :gh:`818`
|
||||
- Properly document some caveats of the Google Contacts storage.
|
||||
- Fix crash when using auth certs. :gh:`1033`
|
||||
- The ``filesystem`` storage can be specified with ``type =
|
||||
"filesystem/icalendar"`` or ``type = "filesystem/vcard"``. This has not
|
||||
functional impact, and is merely for forward compatibility with the Rust
|
||||
implementation of vdirsyncer.
|
||||
- Python 3.10 and 3.11 are officially supported.
|
||||
- Instructions for integrating with Google CalDav/CardDav have changed.
|
||||
Applications now need to be registered as "Desktop applications". Using "Web
|
||||
application" no longer works due to changes on Google's side. :gh:`1078`
|
||||
|
||||
Version 0.19.1
|
||||
==============
|
||||
|
||||
- Fixed crash when operating on Google Contacts. :gh:`994`
|
||||
- The ``HTTP_PROXY`` and ``HTTPS_PROXY`` are now respected. :gh:`1031`
|
||||
- Instructions for integrating with Google CalDav/CardDav have changed.
|
||||
Applications now need to be registered as "Web Application". :gh:`975`
|
||||
- Various documentation updates.
|
||||
|
||||
Version 0.19.0
|
||||
==============
|
||||
|
||||
- Add "shell" password fetch strategy to pass command string to a shell.
|
||||
- Add "description" and "order" as metadata. These fetch the CalDAV:
|
||||
calendar-description, ``CardDAV:addressbook-description`` and
|
||||
``apple-ns:calendar-order`` properties respectively.
|
||||
- Add a new ``showconfig`` status. This prints *some* configuration values as
|
||||
JSON. This is intended to be used by external tools and helpers that interact
|
||||
with ``vdirsyncer``, and considered experimental.
|
||||
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
|
||||
"create", it implicitly creates missing collections during sync without user
|
||||
prompts. This simplifies workflows where collections should be automatically
|
||||
created on both sides.
|
||||
- Update TLS-related tests that were failing due to weak MDs. :gh:`903`
|
||||
- ``pytest-httpserver`` and ``trustme`` are now required for tests.
|
||||
- ``pytest-localserver`` is no longer required for tests.
|
||||
- Multithreaded support has been dropped. The ``"--max-workers`` has been removed.
|
||||
- A new ``asyncio`` backend is now used. So far, this shows substantial speed
|
||||
improvements in ``discovery`` and ``metasync``, but little change in `sync`.
|
||||
This will likely continue improving over time. :gh:`906`
|
||||
- The ``google`` storage types no longer require ``requests-oauthlib``, but
|
||||
require ``python-aiohttp-oauthlib`` instead.
|
||||
- Vdirsyncer no longer includes experimental support for `EteSync
|
||||
<https://www.etesync.com/>`_. The existing integration had not been supported
|
||||
for a long time and no longer worked. Support for external storages may be
|
||||
added if anyone is interested in maintaining an EteSync plugin. EteSync
|
||||
users should consider using `etesync-dav`_.
|
||||
- The ``plist`` for macOS has been dropped. It was broken and homebrew
|
||||
generates their own based on package metadata. macOS users are encouraged to
|
||||
use that as a reference.
|
||||
|
||||
.. _etesync-dav: https://github.com/etesync/etesync-dav
|
||||
|
||||
Changes to SSL configuration
|
||||
----------------------------
|
||||
|
||||
Support for ``md5`` and ``sha1`` certificate fingerprints has been dropped. If
|
||||
you're validating certificate fingerprints, use ``sha256`` instead.
|
||||
|
||||
When using a custom ``verify_fingerprint``, CA validation is always disabled.
|
||||
|
||||
If ``verify_fingerprint`` is unset, CA verification is always active. Disabling
|
||||
both features is insecure and no longer supported.
|
||||
|
||||
The ``verify`` parameter no longer takes boolean values, it is now optional and
|
||||
only takes a string to a custom CA for verification.
|
||||
|
||||
The ``verify`` and ``verify_fingerprint`` will likely be merged into a single
|
||||
parameter in future.
|
||||
|
||||
Version 0.18.0
|
||||
==============
|
||||
|
||||
Note: Version 0.17 has some alpha releases but ultimately was never finalised.
|
||||
0.18 actually continues where 0.16 left off.
|
||||
|
||||
- Support for Python 3.5 and 3.6 has been dropped. This release mostly focuses
|
||||
on keeping vdirsyncer compatible with newer environments.
|
||||
- click 8 and click-threading 0.5.0 are now required.
|
||||
- For those using ``pipsi``, we now recommend using ``pipx``, it's successor.
|
||||
- Python 3.9 is now supported.
|
||||
- Our Debian/Ubuntu build scripts have been updated. New versions should be
|
||||
pushed to those repositories soon.
|
||||
|
||||
Version 0.16.8
|
||||
==============
|
||||
|
||||
*released 09 June 2020*
|
||||
|
||||
- Support Python 3.7 and 3.8.
|
||||
|
||||
This release is functionally identical to 0.16.7.
|
||||
It's been tested with recent Python versions, and has been marked as supporting
|
||||
them. It will also be the final release supporting Python 3.5 and 3.6.
|
||||
|
||||
Version 0.16.7
|
||||
==============
|
||||
|
||||
*released on July 19*
|
||||
*released on 19 July 2018*
|
||||
|
||||
- Fixes for Python 3.7
|
||||
|
||||
|
|
@ -120,7 +250,7 @@ Version 0.14.0
|
|||
exit code in such situations is still non-zero.
|
||||
- Add ``partial_sync`` option to pair section. See :ref:`the config docs
|
||||
<partial_sync_def>`.
|
||||
- Vdirsyner will now warn if there's a string without quotes in your config.
|
||||
- Vdirsyncer will now warn if there's a string without quotes in your config.
|
||||
Please file issues if you find documentation that uses unquoted strings.
|
||||
- Fix an issue that would break khal's config setup wizard.
|
||||
|
||||
|
|
|
|||
2
LICENSE
2
LICENSE
|
|
@ -1,4 +1,4 @@
|
|||
Copyright (c) 2014-2016 by Markus Unterwaditzer & contributors. See
|
||||
Copyright (c) 2014-2020 by Markus Unterwaditzer & contributors. See
|
||||
AUTHORS.rst for more details.
|
||||
|
||||
Some rights reserved.
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
# setuptools-scm includes everything tracked by git
|
||||
prune contrib
|
||||
prune docker
|
||||
prune scripts
|
||||
prune tests/storage/servers
|
||||
prune tests/storage/etesync
|
||||
recursive-include tests/storage/servers/radicale *
|
||||
recursive-include tests/storage/servers/skip *
|
||||
|
||||
|
|
|
|||
117
Makefile
117
Makefile
|
|
@ -12,10 +12,7 @@ export REQUIREMENTS := release
|
|||
# Set this to true if you run vdirsyncer's test as part of e.g. packaging.
|
||||
export DETERMINISTIC_TESTS := false
|
||||
|
||||
# Run the etesync testsuite.
|
||||
export ETESYNC_TESTS := false
|
||||
|
||||
# Assume to run in Travis. Don't use this outside of a virtual machine. It will
|
||||
# Assume to run in CI. Don't use this outside of a virtual machine. It will
|
||||
# heavily "pollute" your system, such as attempting to install a new Python
|
||||
# systemwide.
|
||||
export CI := false
|
||||
|
|
@ -23,88 +20,30 @@ export CI := false
|
|||
# Whether to generate coverage data while running tests.
|
||||
export COVERAGE := $(CI)
|
||||
|
||||
# Additional arguments that should be passed to py.test.
|
||||
PYTEST_ARGS =
|
||||
|
||||
# Variables below this line are not very interesting for getting started.
|
||||
|
||||
TEST_EXTRA_PACKAGES =
|
||||
|
||||
ifeq ($(COVERAGE), true)
|
||||
TEST_EXTRA_PACKAGES += pytest-cov
|
||||
PYTEST_ARGS += --cov-config .coveragerc --cov vdirsyncer
|
||||
endif
|
||||
|
||||
ifeq ($(ETESYNC_TESTS), true)
|
||||
TEST_EXTRA_PACKAGES += git+https://github.com/etesync/journal-manager
|
||||
TEST_EXTRA_PACKAGES += django djangorestframework wsgi_intercept drf-nested-routers
|
||||
endif
|
||||
|
||||
PYTEST = py.test $(PYTEST_ARGS)
|
||||
|
||||
export TESTSERVER_BASE := ./tests/storage/servers/
|
||||
CODECOV_PATH = /tmp/codecov.sh
|
||||
|
||||
ifeq ($(CI), true)
|
||||
test:
|
||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
||||
$(PYTEST) tests/unit/
|
||||
bash $(CODECOV_PATH) -c -F unit
|
||||
$(PYTEST) tests/system/
|
||||
bash $(CODECOV_PATH) -c -F system
|
||||
$(PYTEST) tests/storage/
|
||||
bash $(CODECOV_PATH) -c -F storage
|
||||
else
|
||||
test:
|
||||
$(PYTEST)
|
||||
endif
|
||||
|
||||
all:
|
||||
$(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation)
|
||||
|
||||
install-servers:
|
||||
ci-test:
|
||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
||||
pytest --cov vdirsyncer --cov-append tests/unit/ tests/system/
|
||||
bash $(CODECOV_PATH) -c
|
||||
|
||||
ci-test-storage:
|
||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
||||
set -ex; \
|
||||
for server in $(DAV_SERVER); do \
|
||||
if [ ! "$$(ls $(TESTSERVER_BASE)$$server/)" ]; then \
|
||||
git submodule update --init -- "$(TESTSERVER_BASE)$$server"; \
|
||||
fi; \
|
||||
(cd $(TESTSERVER_BASE)$$server && sh install.sh); \
|
||||
DAV_SERVER=$$server pytest --cov vdirsyncer --cov-append tests/storage; \
|
||||
done
|
||||
bash $(CODECOV_PATH) -c
|
||||
|
||||
install-test: install-servers
|
||||
pip install -Ur test-requirements.txt
|
||||
set -xe && if [ "$$REQUIREMENTS" = "devel" ]; then \
|
||||
pip install -U --force-reinstall \
|
||||
git+https://github.com/DRMacIver/hypothesis \
|
||||
git+https://github.com/kennethreitz/requests \
|
||||
git+https://github.com/pytest-dev/pytest; \
|
||||
fi
|
||||
[ -z "$(TEST_EXTRA_PACKAGES)" ] || pip install $(TEST_EXTRA_PACKAGES)
|
||||
|
||||
install-style: install-docs
|
||||
pip install -U flake8 flake8-import-order 'flake8-bugbear>=17.3.0' autopep8
|
||||
|
||||
style:
|
||||
flake8
|
||||
! git grep -i syncroniz */*
|
||||
! git grep -i 'text/icalendar' */*
|
||||
sphinx-build -W -b html ./docs/ ./docs/_build/html/
|
||||
python3 scripts/make_travisconf.py | diff -b .travis.yml -
|
||||
|
||||
travis-conf:
|
||||
python3 scripts/make_travisconf.py > .travis.yml
|
||||
|
||||
install-docs:
|
||||
pip install -Ur docs-requirements.txt
|
||||
|
||||
docs:
|
||||
cd docs && make html
|
||||
|
||||
linkcheck:
|
||||
sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/
|
||||
|
||||
release:
|
||||
python setup.py sdist bdist_wheel upload
|
||||
check:
|
||||
ruff check
|
||||
ruff format --diff
|
||||
#mypy vdirsyncer
|
||||
|
||||
release-deb:
|
||||
sh scripts/release-deb.sh debian jessie
|
||||
|
|
@ -114,29 +53,11 @@ release-deb:
|
|||
sh scripts/release-deb.sh ubuntu zesty
|
||||
|
||||
install-dev:
|
||||
pip install -e .
|
||||
[ "$(ETESYNC_TESTS)" = "false" ] || pip install -Ue .[etesync]
|
||||
set -xe && if [ "$(REQUIREMENTS)" = "devel" ]; then \
|
||||
pip install -U --force-reinstall \
|
||||
git+https://github.com/mitsuhiko/click \
|
||||
git+https://github.com/kennethreitz/requests; \
|
||||
elif [ "$(REQUIREMENTS)" = "minimal" ]; then \
|
||||
pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \
|
||||
pip install -U pip setuptools wheel
|
||||
pip install -e '.[test,check,docs]'
|
||||
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
|
||||
pip install pyproject-dependencies && \
|
||||
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
|
||||
fi
|
||||
|
||||
install-git-hooks: install-style
|
||||
echo "make style-autocorrect" > .git/hooks/pre-commit
|
||||
chmod +x .git/hooks/pre-commit
|
||||
|
||||
style-autocorrect:
|
||||
git diff --cached --name-only | egrep '\.py$$' | xargs --no-run-if-empty autopep8 -ri
|
||||
|
||||
ssh-submodule-urls:
|
||||
git submodule foreach "\
|
||||
echo -n 'Old: '; \
|
||||
git remote get-url origin; \
|
||||
git remote set-url origin \$$(git remote get-url origin | sed -e 's/https:\/\/github\.com\//git@github.com:/g'); \
|
||||
echo -n 'New URL: '; \
|
||||
git remote get-url origin"
|
||||
|
||||
.PHONY: docs
|
||||
|
|
|
|||
44
README.rst
44
README.rst
|
|
@ -2,6 +2,30 @@
|
|||
vdirsyncer
|
||||
==========
|
||||
|
||||
.. image:: https://builds.sr.ht/~whynothugo/vdirsyncer.svg
|
||||
:target: https://builds.sr.ht/~whynothugo/vdirsyncer
|
||||
:alt: CI status
|
||||
|
||||
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=main
|
||||
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=main
|
||||
:alt: Codecov coverage report
|
||||
|
||||
.. image:: https://readthedocs.org/projects/vdirsyncer/badge/
|
||||
:target: https://vdirsyncer.rtfd.org/
|
||||
:alt: documentation
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/vdirsyncer.svg
|
||||
:target: https://pypi.python.org/pypi/vdirsyncer
|
||||
:alt: version on pypi
|
||||
|
||||
.. image:: https://img.shields.io/badge/deb-packagecloud.io-844fec.svg
|
||||
:target: https://packagecloud.io/pimutils/vdirsyncer
|
||||
:alt: Debian packages
|
||||
|
||||
.. image:: https://img.shields.io/pypi/l/vdirsyncer.svg
|
||||
:target: https://github.com/pimutils/vdirsyncer/blob/main/LICENCE
|
||||
:alt: licence: BSD
|
||||
|
||||
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
||||
- `Source code <https://github.com/pimutils/vdirsyncer>`_
|
||||
|
||||
|
|
@ -16,19 +40,10 @@ servers. It can also be used to synchronize calendars and/or addressbooks
|
|||
between two servers directly.
|
||||
|
||||
It aims to be for calendars and contacts what `OfflineIMAP
|
||||
<http://offlineimap.org/>`_ is for emails.
|
||||
<https://www.offlineimap.org/>`_ is for emails.
|
||||
|
||||
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
|
||||
|
||||
.. image:: https://travis-ci.org/pimutils/vdirsyncer.svg?branch=master
|
||||
:target: https://travis-ci.org/pimutils/vdirsyncer
|
||||
|
||||
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=master
|
||||
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=master
|
||||
|
||||
.. image:: https://badge.waffle.io/pimutils/vdirsyncer.svg?label=ready&title=Ready
|
||||
:target: https://waffle.io/pimutils/vdirsyncer
|
||||
|
||||
Links of interest
|
||||
=================
|
||||
|
||||
|
|
@ -44,6 +59,15 @@ Links of interest
|
|||
|
||||
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
|
||||
|
||||
Dockerized
|
||||
=================
|
||||
If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
|
||||
Docker environment, you can check out the following GitHub Repository:
|
||||
|
||||
* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
|
||||
|
||||
Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
|
||||
|
||||
License
|
||||
=======
|
||||
|
||||
|
|
|
|||
75
contrib/conflict_resolution/resolve_interactively.py
Executable file
75
contrib/conflict_resolution/resolve_interactively.py
Executable file
|
|
@ -0,0 +1,75 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Ask user to resolve a vdirsyncer sync conflict interactively.
|
||||
|
||||
Needs a way to ask the user.
|
||||
The use of https://apps.kde.org/kdialog/ for GNU/Linix is hardcoded.
|
||||
|
||||
Depends on python>3.5 and KDialog.
|
||||
|
||||
Usage:
|
||||
Ensure the file executable and use it in the vdirsyncer.conf file, e.g.
|
||||
|
||||
conflict_resolution = ["command", "/home/bern/vdirsyncer/resolve_interactively.py"]
|
||||
|
||||
This file is Free Software under the following license:
|
||||
SPDX-License-Identifier: BSD-3-Clause
|
||||
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
|
||||
Author: <bernhard.reiter@intevation.de>
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
KDIALOG = "/usr/bin/kdialog"
|
||||
|
||||
SUMMARY_PATTERN = re.compile("^(SUMMARY:.*)$", re.MULTILINE)
|
||||
|
||||
|
||||
def get_summary(icalendar_text: str):
|
||||
"""Get the first SUMMARY: line from an iCalendar text.
|
||||
|
||||
Do not care about the line being continued.
|
||||
"""
|
||||
match = re.search(SUMMARY_PATTERN, icalendar_text)
|
||||
return match[1]
|
||||
|
||||
|
||||
def main(ical1_filename, ical2_filename):
|
||||
ical1 = ical1_filename.read_text()
|
||||
ical2 = ical2_filename.read_text()
|
||||
|
||||
additional_args = ["--yes-label", "take first"] # return code == 0
|
||||
additional_args += ["--no-label", "take second"] # return code == 1
|
||||
additional_args += ["--cancel-label", "do not resolve"] # return code == 2
|
||||
|
||||
r = subprocess.run(
|
||||
args=[
|
||||
KDIALOG,
|
||||
"--warningyesnocancel",
|
||||
"There was a sync conflict, do you prefer the first entry: \n"
|
||||
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
|
||||
"or the second entry:\n"
|
||||
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
|
||||
*additional_args,
|
||||
]
|
||||
)
|
||||
|
||||
if r.returncode == 2:
|
||||
# cancel was pressed
|
||||
return # shall lead to items not changed, because not copied
|
||||
|
||||
if r.returncode == 0:
|
||||
# we want to take the first item, so overwrite the second
|
||||
ical2_filename.write_text(ical1)
|
||||
else: # r.returncode == 1, we want the second item, so overwrite the first
|
||||
ical1_filename.write_text(ical2)
|
||||
|
||||
|
||||
if len(sys.argv) != 3:
|
||||
sys.stdout.write(__doc__)
|
||||
else:
|
||||
main(Path(sys.argv[1]), Path(sys.argv[2]))
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<!-- Blueprint for cron-like launchd plist -->
|
||||
<!-- Replace @@PLACEHOLDERS@@ with appropriate values for your system/settings! -->
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<!-- Locale to use for vdirsyncer, e.g. en_US.UTF-8 -->
|
||||
<key>LANG</key>
|
||||
<string>@@LOCALE@@</string>
|
||||
<key>LC_ALL</key>
|
||||
<string>@@LOCALE@@</string>
|
||||
</dict>
|
||||
<key>Label</key>
|
||||
<string>vdirsyncer</string>
|
||||
<key>WorkingDirectory</key>
|
||||
<!-- working directory for vdirsyncer, usually the base directory where
|
||||
vdirsyncer is installed, e.g. /usr/local/ -->
|
||||
<string>@@WORKINGDIRECTORY@@</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<!-- full path to vdirsyncer binary -->
|
||||
<string>@@VDIRSYNCER@@</string>
|
||||
<!-- only log errors -->
|
||||
<string>-v</string>
|
||||
<string>ERROR</string>
|
||||
<string>sync</string>
|
||||
</array>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>StartInterval</key>
|
||||
<!-- Sync intervall in seconds -->
|
||||
<integer>@@SYNCINTERVALL@@</integer>
|
||||
<!-- For logging, redirect stdout & stderr -->
|
||||
<!-- <key>StandardErrorPath</key> -->
|
||||
<!-- Full path to stderr logfile, e.g. /tmp/vdirsyncer_err.log -->
|
||||
<!-- <string>@@STDERRFILE@@</string> -->
|
||||
<!-- Full path to stdout logfile, e.g. /tmp/vdirsyncer_out.log -->
|
||||
<!-- <key>StandardOutPath</key> -->
|
||||
<!-- <string>@@STDOUTFILE@@</string> -->
|
||||
</dict>
|
||||
</plist>
|
||||
|
|
@ -1,7 +1,9 @@
|
|||
[Unit]
|
||||
Description=Synchronize calendars and contacts
|
||||
Documentation=https://vdirsyncer.readthedocs.org/
|
||||
StartLimitBurst=2
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/vdirsyncer sync
|
||||
Type=oneshot
|
||||
RuntimeMaxSec=3m
|
||||
Restart=on-failure
|
||||
|
|
|
|||
|
|
@ -1,3 +0,0 @@
|
|||
sphinx != 1.4.7
|
||||
sphinx_rtd_theme
|
||||
setuptools_scm
|
||||
|
|
@ -1 +1 @@
|
|||
.. include:: ../CHANGELOG.rst
|
||||
.. include:: ../CHANGELOG.rst
|
||||
|
|
|
|||
107
docs/conf.py
107
docs/conf.py
|
|
@ -1,93 +1,106 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import os
|
||||
|
||||
import setuptools_scm
|
||||
from pkg_resources import get_distribution
|
||||
|
||||
extensions = ['sphinx.ext.autodoc']
|
||||
extensions = ["sphinx.ext.autodoc"]
|
||||
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
source_suffix = '.rst'
|
||||
master_doc = 'index'
|
||||
source_suffix = ".rst"
|
||||
master_doc = "index"
|
||||
|
||||
project = u'vdirsyncer'
|
||||
copyright = (u'2014-{}, Markus Unterwaditzer & contributors'
|
||||
.format(datetime.date.today().strftime('%Y')))
|
||||
project = "vdirsyncer"
|
||||
copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
|
||||
datetime.date.today().strftime("%Y")
|
||||
)
|
||||
|
||||
release = setuptools_scm.get_version(root='..', relative_to=__file__)
|
||||
version = '.'.join(release.split('.')[:2]) # The short X.Y version.
|
||||
release = get_distribution("vdirsyncer").version
|
||||
version = ".".join(release.split(".")[:2]) # The short X.Y version.
|
||||
|
||||
rst_epilog = '.. |vdirsyncer_version| replace:: %s' % release
|
||||
rst_epilog = f".. |vdirsyncer_version| replace:: {release}"
|
||||
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||
|
||||
try:
|
||||
import sphinx_rtd_theme
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
except ImportError:
|
||||
html_theme = 'default'
|
||||
html_theme = "default"
|
||||
if not on_rtd:
|
||||
print('-' * 74)
|
||||
print('Warning: sphinx-rtd-theme not installed, building with default '
|
||||
'theme.')
|
||||
print('-' * 74)
|
||||
print("-" * 74)
|
||||
print("Warning: sphinx-rtd-theme not installed, building with default theme.")
|
||||
print("-" * 74)
|
||||
|
||||
html_static_path = ['_static']
|
||||
htmlhelp_basename = 'vdirsyncerdoc'
|
||||
html_static_path = ["_static"]
|
||||
htmlhelp_basename = "vdirsyncerdoc"
|
||||
|
||||
latex_elements = {}
|
||||
latex_documents = [
|
||||
('index', 'vdirsyncer.tex', u'vdirsyncer Documentation',
|
||||
u'Markus Unterwaditzer', 'manual'),
|
||||
(
|
||||
"index",
|
||||
"vdirsyncer.tex",
|
||||
"vdirsyncer Documentation",
|
||||
"Markus Unterwaditzer",
|
||||
"manual",
|
||||
),
|
||||
]
|
||||
|
||||
man_pages = [
|
||||
('index', 'vdirsyncer', u'vdirsyncer Documentation',
|
||||
[u'Markus Unterwaditzer'], 1)
|
||||
("index", "vdirsyncer", "vdirsyncer Documentation", ["Markus Unterwaditzer"], 1)
|
||||
]
|
||||
|
||||
texinfo_documents = [
|
||||
('index', 'vdirsyncer', u'vdirsyncer Documentation',
|
||||
u'Markus Unterwaditzer', 'vdirsyncer',
|
||||
'Synchronize calendars and contacts.', 'Miscellaneous'),
|
||||
(
|
||||
"index",
|
||||
"vdirsyncer",
|
||||
"vdirsyncer Documentation",
|
||||
"Markus Unterwaditzer",
|
||||
"vdirsyncer",
|
||||
"Synchronize calendars and contacts.",
|
||||
"Miscellaneous",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def github_issue_role(name, rawtext, text, lineno, inliner,
|
||||
options={}, content=()): # noqa: B006
|
||||
def github_issue_role(name, rawtext, text, lineno, inliner, options=None, content=()):
|
||||
options = options or {}
|
||||
try:
|
||||
issue_num = int(text)
|
||||
if issue_num <= 0:
|
||||
raise ValueError()
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
msg = inliner.reporter.error('Invalid GitHub issue: {}'.format(text),
|
||||
line=lineno)
|
||||
msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
|
||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||
return [prb], [msg]
|
||||
|
||||
from docutils import nodes
|
||||
|
||||
PROJECT_HOME = 'https://github.com/pimutils/vdirsyncer'
|
||||
link = '{}/{}/{}'.format(PROJECT_HOME,
|
||||
'issues' if name == 'gh' else 'pull',
|
||||
issue_num)
|
||||
linktext = ('issue #{}' if name == 'gh'
|
||||
else 'pull request #{}').format(issue_num)
|
||||
node = nodes.reference(rawtext, linktext, refuri=link,
|
||||
**options)
|
||||
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
|
||||
link = "{}/{}/{}".format(
|
||||
PROJECT_HOME, "issues" if name == "gh" else "pull", issue_num
|
||||
)
|
||||
linktext = ("issue #{}" if name == "gh" else "pull request #{}").format(issue_num)
|
||||
node = nodes.reference(rawtext, linktext, refuri=link, **options)
|
||||
return [node], []
|
||||
|
||||
|
||||
def setup(app):
|
||||
from sphinx.domains.python import PyObject
|
||||
app.add_object_type('storage', 'storage', 'pair: %s; storage',
|
||||
doc_field_types=PyObject.doc_field_types)
|
||||
app.add_role('gh', github_issue_role)
|
||||
app.add_role('ghpr', github_issue_role)
|
||||
|
||||
app.add_object_type(
|
||||
"storage",
|
||||
"storage",
|
||||
"pair: %s; storage",
|
||||
doc_field_types=PyObject.doc_field_types,
|
||||
)
|
||||
app.add_role("gh", github_issue_role)
|
||||
app.add_role("ghpr", github_issue_role)
|
||||
|
|
|
|||
167
docs/config.rst
167
docs/config.rst
|
|
@ -61,7 +61,8 @@ Pair Section
|
|||
sync`` is executed. See also :ref:`collections_tutorial`.
|
||||
|
||||
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
|
||||
autodiscovery on a specific storage.
|
||||
autodiscovery on a specific storage. It means all the collections on side A /
|
||||
side B.
|
||||
|
||||
If the collection you want to sync doesn't have the same name on each side,
|
||||
you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
|
||||
|
|
@ -71,8 +72,8 @@ Pair Section
|
|||
|
||||
Examples:
|
||||
|
||||
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the
|
||||
collections from side B, and also the collections named "foo" and "bar".
|
||||
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all
|
||||
the collections from side B, and also the collections named "foo" and "bar".
|
||||
|
||||
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
|
||||
existing collections on either side.
|
||||
|
|
@ -116,10 +117,26 @@ Pair Section
|
|||
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
|
||||
metasync`` is executed. Example::
|
||||
|
||||
metadata = ["color", "displayname"]
|
||||
metadata = ["color", "displayname", "description", "order"]
|
||||
|
||||
This synchronizes the ``color`` and the ``displayname`` properties. The
|
||||
``conflict_resolution`` parameter applies here as well.
|
||||
This synchronizes the following properties:
|
||||
|
||||
- color: ``http://apple.com/ns/ical/:calendar-color``
|
||||
- displayname: ``DAV:displayname``
|
||||
- description: ``CalDAV:calendar-description`` and ``CardDAV:addressbook-description``
|
||||
- order: ``http://apple.com/ns/ical/:calendar-order``
|
||||
|
||||
The ``conflict_resolution`` parameter applies for these properties too.
|
||||
|
||||
.. _implicit_def:
|
||||
|
||||
- ``implicit``: Opt into implicitly creating collections. Example::
|
||||
|
||||
implicit = "create"
|
||||
|
||||
When set to "create", missing collections are automatically created on both
|
||||
sides during sync without prompting the user. This simplifies workflows where
|
||||
all collections should be synchronized bidirectionally.
|
||||
|
||||
.. _storage_config:
|
||||
|
||||
|
|
@ -169,7 +186,7 @@ CalDAV and CardDAV
|
|||
url = "..."
|
||||
#username = ""
|
||||
#password = ""
|
||||
#verify = true
|
||||
#verify = /path/to/custom_ca.pem
|
||||
#auth = null
|
||||
#useragent = "vdirsyncer/0.16.4"
|
||||
#verify_fingerprint = null
|
||||
|
|
@ -202,12 +219,10 @@ CalDAV and CardDAV
|
|||
:param url: Base URL or an URL to a calendar.
|
||||
:param username: Username for authentication.
|
||||
:param password: Password for authentication.
|
||||
:param verify: Verify SSL certificate, default True. This can also be a
|
||||
local path to a self-signed SSL certificate. See :ref:`ssl-tutorial`
|
||||
for more information.
|
||||
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
|
||||
expected server certificate. See :ref:`ssl-tutorial` for more
|
||||
information.
|
||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||
See :ref:`ssl-tutorial` for more information.
|
||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||
default is preemptive Basic auth, sending credentials even if server
|
||||
didn't request them. This saves from an additional roundtrip per
|
||||
|
|
@ -229,21 +244,20 @@ CalDAV and CardDAV
|
|||
url = "..."
|
||||
#username = ""
|
||||
#password = ""
|
||||
#verify = true
|
||||
#verify = /path/to/custom_ca.pem
|
||||
#auth = null
|
||||
#useragent = "vdirsyncer/0.16.4"
|
||||
#verify_fingerprint = null
|
||||
#auth_cert = null
|
||||
#use_vcard_4 = false
|
||||
|
||||
:param url: Base URL or an URL to an addressbook.
|
||||
:param username: Username for authentication.
|
||||
:param password: Password for authentication.
|
||||
:param verify: Verify SSL certificate, default True. This can also be a
|
||||
local path to a self-signed SSL certificate. See
|
||||
:ref:`ssl-tutorial` for more information.
|
||||
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the expected
|
||||
server certificate. See :ref:`ssl-tutorial` for
|
||||
more information.
|
||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||
See :ref:`ssl-tutorial` for more information.
|
||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||
default is preemptive Basic auth, sending credentials even if
|
||||
server didn't request them. This saves from an additional
|
||||
|
|
@ -253,6 +267,7 @@ CalDAV and CardDAV
|
|||
certificate and the key or a list of paths to the files
|
||||
with them.
|
||||
:param useragent: Default ``vdirsyncer``.
|
||||
:param use_vcard_4: Whether the server use vCard 4.0.
|
||||
|
||||
Google
|
||||
++++++
|
||||
|
|
@ -266,7 +281,15 @@ in terms of data safety**. See `this blog post
|
|||
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
|
||||
up your data.
|
||||
|
||||
At first run you will be asked to authorize application for google account
|
||||
Another caveat is that Google group labels are not synced with vCard's
|
||||
`CATEGORIES <https://www.rfc-editor.org/rfc/rfc6350#section-6.7.1>`_ property
|
||||
(also see :gh:`814` and
|
||||
`upstream issue #36761530 <https://issuetracker.google.com/issues/36761530>`_
|
||||
for reference) and the
|
||||
`BDAY <https://www.rfc-editor.org/rfc/rfc6350#section-6.2.5>`_ property is not
|
||||
synced when only partial date information is present (e.g. the year is missing).
|
||||
|
||||
At first run you will be asked to authorize application for Google account
|
||||
access.
|
||||
|
||||
To use this storage type, you need to install some additional dependencies::
|
||||
|
|
@ -277,25 +300,29 @@ Furthermore you need to register vdirsyncer as an application yourself to
|
|||
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
|
||||
Service to hardcode those into opensource software [googleterms]_:
|
||||
|
||||
1. Go to the `Google API Manager <https://console.developers.google.com>`_ and
|
||||
create a new project under any name.
|
||||
1. Go to the `Google API Manager <https://console.developers.google.com>`_
|
||||
|
||||
2. Create a new project under any name.
|
||||
|
||||
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
|
||||
Calendar and Contacts APIs, those are different and won't work). There should
|
||||
be a searchbox where you can just enter those terms.
|
||||
be a search box where you can just enter those terms.
|
||||
|
||||
3. In the sidebar, select "Credentials", then "Create Credentials" and create a
|
||||
new "OAuth Client ID".
|
||||
|
||||
3. In the sidebar, select "Credentials" and create a new "OAuth Client ID". The
|
||||
application type is "Other".
|
||||
|
||||
You'll be prompted to create a OAuth consent screen first. Fill out that
|
||||
form however you like.
|
||||
|
||||
After setting up the consent screen, finish creating the new "OAuth Client
|
||||
ID'. The correct application type is "Desktop application".
|
||||
|
||||
4. Finally you should have a Client ID and a Client secret. Provide these in
|
||||
your storage config.
|
||||
|
||||
The ``token_file`` parameter should be a filepath where vdirsyncer can later
|
||||
store authentication-related data. You do not need to create the file itself
|
||||
or write anything to it.
|
||||
The ``token_file`` parameter should be a path to a file where vdirsyncer can
|
||||
later store authentication-related data. You do not need to create the file
|
||||
itself or write anything to it.
|
||||
|
||||
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
|
||||
section "Confidential Matters".
|
||||
|
|
@ -303,7 +330,7 @@ or write anything to it.
|
|||
.. note::
|
||||
|
||||
You need to configure which calendars Google should offer vdirsyncer using
|
||||
a rather hidden `settings page
|
||||
a secret `settings page
|
||||
<https://calendar.google.com/calendar/syncselect>`_.
|
||||
|
||||
.. storage:: google_calendar
|
||||
|
|
@ -343,55 +370,9 @@ or write anything to it.
|
|||
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
||||
API Manager.
|
||||
|
||||
EteSync
|
||||
+++++++
|
||||
|
||||
`EteSync <https://www.etesync.com/>`_ is a new cloud provider for end to end
|
||||
encrypted contacts and calendar storage. Vdirsyncer contains **experimental**
|
||||
support for it.
|
||||
|
||||
To use it, you need to install some optional dependencies::
|
||||
|
||||
pip install vdirsyncer[etesync]
|
||||
|
||||
On first usage you will be prompted for the service password and the encryption
|
||||
password. Neither are stored.
|
||||
|
||||
.. storage:: etesync_contacts
|
||||
|
||||
Contacts for etesync.
|
||||
|
||||
::
|
||||
|
||||
[storage example_for_etesync_contacts]
|
||||
email = ...
|
||||
secrets_dir = ...
|
||||
#server_path = ...
|
||||
#db_path = ...
|
||||
|
||||
:param email: The email address of your account.
|
||||
:param secrets_dir: A directory where vdirsyncer can store the encryption
|
||||
key and authentication token.
|
||||
:param server_url: Optional. URL to the root of your custom server.
|
||||
:param db_path: Optional. Use a different path for the database.
|
||||
|
||||
.. storage:: etesync_calendars
|
||||
|
||||
Calendars for etesync.
|
||||
|
||||
::
|
||||
|
||||
[storage example_for_etesync_calendars]
|
||||
email = ...
|
||||
secrets_dir = ...
|
||||
#server_path = ...
|
||||
#db_path = ...
|
||||
|
||||
:param email: The email address of your account.
|
||||
:param secrets_dir: A directory where vdirsyncer can store the encryption
|
||||
key and authentication token.
|
||||
:param server_url: Optional. URL to the root of your custom server.
|
||||
:param db_path: Optional. Use a different path for the database.
|
||||
The current flow is not ideal, but Google has deprecated the previous APIs used
|
||||
for this without providing a suitable replacement. See :gh:`975` for discussion
|
||||
on the topic.
|
||||
|
||||
Local
|
||||
+++++
|
||||
|
|
@ -408,6 +389,8 @@ Local
|
|||
fileext = "..."
|
||||
#encoding = "utf-8"
|
||||
#post_hook = null
|
||||
#pre_deletion_hook = null
|
||||
#fileignoreext = ".tmp"
|
||||
|
||||
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
|
||||
a more formal description of the format.
|
||||
|
|
@ -421,11 +404,17 @@ Local
|
|||
:param fileext: The file extension to use (e.g. ``.txt``). Contained in the
|
||||
href, so if you change the file extension after a sync, this will
|
||||
trigger a re-download of everything (but *should* not cause data-loss
|
||||
of any kind).
|
||||
of any kind). To be compatible with the ``vset`` format you have
|
||||
to either use ``.vcf`` or ``.ics``. Note that metasync won't work
|
||||
if you use an empty string here.
|
||||
:param encoding: File encoding for items, both content and filename.
|
||||
:param post_hook: A command to call for each item creation and
|
||||
modification. The command will be called with the path of the
|
||||
new/updated file.
|
||||
:param pre_deletion_hook: A command to call for each item deletion.
|
||||
The command will be called with the path of the deleted file.
|
||||
:param fileeignoreext: The file extention to ignore. It is only useful
|
||||
if fileext is set to the empty string. The default is ``.tmp``.
|
||||
|
||||
.. storage:: singlefile
|
||||
|
||||
|
|
@ -505,6 +494,7 @@ leads to an error.
|
|||
[storage holidays_remote]
|
||||
type = "http"
|
||||
url = https://example.com/holidays_from_hicksville.ics
|
||||
#filter_hook = null
|
||||
|
||||
Too many WebCAL providers generate UIDs of all ``VEVENT``-components
|
||||
on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
|
||||
|
|
@ -517,12 +507,10 @@ leads to an error.
|
|||
:param url: URL to the ``.ics`` file.
|
||||
:param username: Username for authentication.
|
||||
:param password: Password for authentication.
|
||||
:param verify: Verify SSL certificate, default True. This can also be a
|
||||
local path to a self-signed SSL certificate. See :ref:`ssl-tutorial`
|
||||
for more information.
|
||||
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
|
||||
expected server certificate. See :ref:`ssl-tutorial` for more
|
||||
information.
|
||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||
See :ref:`ssl-tutorial` for more information.
|
||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||
default is preemptive Basic auth, sending credentials even if server
|
||||
didn't request them. This saves from an additional roundtrip per
|
||||
|
|
@ -531,3 +519,8 @@ leads to an error.
|
|||
:param auth_cert: Optional. Either a path to a certificate with a client
|
||||
certificate and the key or a list of paths to the files with them.
|
||||
:param useragent: Default ``vdirsyncer``.
|
||||
:param filter_hook: Optional. A filter command to call for each fetched
|
||||
item, passed in raw form to stdin and returned via stdout.
|
||||
If nothing is returned by the filter command, the item is skipped.
|
||||
This can be used to alter fields as needed when dealing with providers
|
||||
generating malformed events.
|
||||
|
|
|
|||
|
|
@ -2,14 +2,11 @@
|
|||
Support and Contact
|
||||
===================
|
||||
|
||||
* The ``#pimutils`` `IRC channel on Freenode <https://pimutils.org/contact>`_
|
||||
* The ``#pimutils`` `IRC channel on Libera.Chat <https://pimutils.org/contact>`_
|
||||
might be active, depending on your timezone. Use it for support and general
|
||||
(including off-topic) discussion.
|
||||
|
||||
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
|
||||
concrete bug reports and feature requests.
|
||||
|
||||
* Lastly, you can also `contact the author directly
|
||||
<https://unterwaditzer.net/contact.html>`_. Do this for security issues. If
|
||||
that doesn't work out (i.e. if I don't respond within one week), use
|
||||
``contact@pimutils.org``.
|
||||
* For security issues, contact ``contact@pimutils.org``.
|
||||
|
|
|
|||
|
|
@ -75,36 +75,36 @@ Submitting patches, pull requests
|
|||
Running tests, how to set up your development environment
|
||||
---------------------------------------------------------
|
||||
|
||||
For many patches, it might suffice to just let Travis run the tests. However,
|
||||
Travis is slow, so you might want to run them locally too. For this, set up a
|
||||
For many patches, it might suffice to just let CI run the tests. However,
|
||||
CI is slow, so you might want to run them locally too. For this, set up a
|
||||
virtualenv_ and run this inside of it::
|
||||
|
||||
# install:
|
||||
# Install development dependencies, including:
|
||||
# - vdirsyncer from the repo into the virtualenv
|
||||
# - stylecheckers (flake8) and code formatters (autopep8)
|
||||
# - style checks and formatting (ruff)
|
||||
make install-dev
|
||||
|
||||
# Install git commit hook for the stylechecker
|
||||
make install-git-hooks
|
||||
|
||||
# install test dependencies
|
||||
make install-test
|
||||
# Install git commit hook for some extra linting and checking
|
||||
pre-commit install
|
||||
|
||||
Then you can run::
|
||||
|
||||
make test # The normal testsuite
|
||||
make style # Stylechecker
|
||||
make docs # Build the HTML docs, output is at docs/_build/html/
|
||||
pytest # The normal testsuite
|
||||
pre-commit run --all # Run all linters (which also run via pre-commit)
|
||||
make -C docs html # Build the HTML docs, output is at docs/_build/html/
|
||||
make -C docs linkcheck # Check docs for any broken links
|
||||
|
||||
The ``Makefile`` has a lot of options that allow you to control which tests are
|
||||
run, and which servers are tested. Take a look at its code where they are all
|
||||
initialized and documented.
|
||||
|
||||
For example, to test xandikos, run::
|
||||
To tests against a specific DAV server, use ``DAV_SERVER``::
|
||||
|
||||
make DAV_SERVER=xandikos install-test
|
||||
make DAV_SERVER=xandikos test
|
||||
|
||||
The server will be initialised in a docker container and terminated at the end
|
||||
of the test suite.
|
||||
|
||||
If you have any questions, feel free to open issues about it.
|
||||
|
||||
Structure of the testsuite
|
||||
|
|
|
|||
|
|
@ -2,23 +2,14 @@
|
|||
Donations
|
||||
=========
|
||||
|
||||
vdirsyncer is and will always be free and open source software. We appreciate
|
||||
sponsors willing to fund our continued work on it.
|
||||
|
||||
If you found my work useful, please consider donating. Thank you!
|
||||
|
||||
- Bitcoin: ``16sSHxZm263WHR9P9PJjCxp64jp9ooXKVt``
|
||||
- Bitcoin: ``13p42uWDL62bNRH3KWA6cSpSgvnHy1fs2E``.
|
||||
- Sponsor via one-time tips or recurring donations `via Ko-fi`_.
|
||||
- Sponsor via recurring donations `via liberapay`_.
|
||||
|
||||
- `PayPal.me <https://www.paypal.me/untitaker>`_
|
||||
|
||||
- `Bountysource <https://www.bountysource.com/teams/vdirsyncer>`_ is useful for
|
||||
funding work on a specific GitHub issue.
|
||||
|
||||
- There's also `Bountysource Salt
|
||||
<https://salt.bountysource.com/teams/vdirsyncer>`_, for one-time and
|
||||
recurring donations.
|
||||
|
||||
- Donations via Bountysource are publicly listed. Use PayPal if you dislike
|
||||
that.
|
||||
|
||||
- `Flattr
|
||||
<https://flattr.com/submit/auto?user_id=untitaker&url=https%3A%2F%2Fgithub.com%2Fpimutils%2Fvdirsyncer>`_
|
||||
or `Gratipay <https://gratipay.com/vdirsyncer/>`_ can be used for
|
||||
recurring donations.
|
||||
.. _via Ko-fi: https://ko-fi.com/whynothugo
|
||||
.. _via liberapay: https://liberapay.com/WhyNotHugo/
|
||||
|
|
|
|||
|
|
@ -7,17 +7,18 @@ Installation
|
|||
OS/distro packages
|
||||
------------------
|
||||
|
||||
The following packages are user-contributed and were up-to-date at the time of
|
||||
writing:
|
||||
The following packages are community-contributed and were up-to-date at the
|
||||
time of writing:
|
||||
|
||||
- `ArchLinux (AUR) <https://aur.archlinux.org/packages/vdirsyncer>`_
|
||||
- `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_
|
||||
- `Ubuntu and Debian, x86_64-only
|
||||
<https://packagecloud.io/pimutils/vdirsyncer/install>`_ (packages also exist
|
||||
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
|
||||
in the official repositories but may be out of date)
|
||||
- `GNU Guix <https://www.gnu.org/software/guix/package-list.html#vdirsyncer>`_
|
||||
- `OS X (homebrew) <http://braumeister.org/formula/vdirsyncer>`_
|
||||
- `BSD (pkgsrc) <http://pkgsrc.se/time/py-vdirsyncer>`_
|
||||
- `GNU Guix <https://packages.guix.gnu.org/packages/vdirsyncer/>`_
|
||||
- `macOS (homebrew) <https://formulae.brew.sh/formula/vdirsyncer>`_
|
||||
- `NetBSD <https://ftp.netbsd.org/pub/pkgsrc/current/pkgsrc/time/py-vdirsyncer/index.html>`_
|
||||
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
|
||||
- `Slackware (SlackBuild at Slackbuilds.org) <https://slackbuilds.org/repository/15.0/network/vdirsyncer/>`_
|
||||
|
||||
We only support the latest version of vdirsyncer, which is at the time of this
|
||||
writing |vdirsyncer_version|. Please **do not file bugs if you use an older
|
||||
|
|
@ -41,27 +42,53 @@ If your distribution doesn't provide a package for vdirsyncer, you still can
|
|||
use Python's package manager "pip". First, you'll have to check that the
|
||||
following things are installed:
|
||||
|
||||
- Python 3.4+ and pip.
|
||||
- Python 3.9 to 3.13 and pip.
|
||||
- ``libxml`` and ``libxslt``
|
||||
- ``zlib``
|
||||
- Linux or OS X. **Windows is not supported, see :gh:`535`.**
|
||||
- Linux or macOS. **Windows is not supported**, see :gh:`535`.
|
||||
|
||||
On Linux systems, using the distro's package manager is the best
|
||||
way to do this, for example, using Ubuntu::
|
||||
|
||||
sudo apt-get install libxml2 libxslt1.1 zlib1g python
|
||||
sudo apt-get install libxml2 libxslt1.1 zlib1g python3
|
||||
|
||||
Then you have several options. The following text applies for most Python
|
||||
software by the way.
|
||||
|
||||
pipx: The clean, easy way
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
pipx_ is a new package manager for Python-based software that automatically
|
||||
sets up a virtual environment for each program it installs. Please note that
|
||||
installing via pipx will not include manual pages nor systemd services.
|
||||
|
||||
pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
|
||||
|
||||
Assuming that pipx is installed, vdirsyncer can be installed with::
|
||||
|
||||
pipx install vdirsyncer
|
||||
|
||||
It can later be updated to the latest version with::
|
||||
|
||||
pipx upgrade vdirsyncer
|
||||
|
||||
And can be uninstalled with::
|
||||
|
||||
pipx uninstall vdirsyncer
|
||||
|
||||
This last command will remove vdirsyncer and any dependencies installed into
|
||||
the above location.
|
||||
|
||||
.. _pipx: https://github.com/pipxproject/pipx
|
||||
|
||||
The dirty, easy way
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The easiest way to install vdirsyncer at this point would be to run::
|
||||
If pipx is not available on your distribution, the easiest way to install
|
||||
vdirsyncer at this point would be to run::
|
||||
|
||||
pip install --user --ignore-installed vdirsyncer
|
||||
pip install --ignore-installed vdirsyncer
|
||||
|
||||
- ``--user`` is to install without root rights (into your home directory)
|
||||
- ``--ignore-installed`` is to work around Debian's potentially broken packages
|
||||
(see :ref:`debian-urllib3`).
|
||||
|
||||
|
|
@ -81,7 +108,7 @@ the simplest possible way would look something like::
|
|||
|
||||
virtualenv ~/vdirsyncer_env
|
||||
~/vdirsyncer_env/bin/pip install vdirsyncer
|
||||
alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer
|
||||
alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer"
|
||||
|
||||
You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``.
|
||||
|
||||
|
|
@ -92,25 +119,4 @@ This method has two advantages:
|
|||
distro-specific issues.
|
||||
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
|
||||
|
||||
The clean, easy way
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
pipsi_ is a new package manager for Python-based software that automatically
|
||||
sets up a virtualenv for each program you install. Assuming you have it
|
||||
installed on your operating system, you can do::
|
||||
|
||||
pipsi install --python python3 vdirsyncer
|
||||
|
||||
and ``.local/bin/vdirsyncer`` will be your new vdirsyncer installation. To
|
||||
update vdirsyncer to the latest version::
|
||||
|
||||
pipsi upgrade vdirsyncer
|
||||
|
||||
If you're done with vdirsyncer, you can do::
|
||||
|
||||
pipsi uninstall vdirsyncer
|
||||
|
||||
and vdirsyncer will be uninstalled, including its dependencies.
|
||||
|
||||
.. _virtualenv: https://virtualenv.readthedocs.io/
|
||||
.. _pipsi: https://github.com/mitsuhiko/pipsi
|
||||
|
|
|
|||
|
|
@ -38,6 +38,12 @@ You can fetch the username as well::
|
|||
|
||||
Or really any kind of parameter in a storage section.
|
||||
|
||||
You can also pass the command as a string to be executed in a shell::
|
||||
|
||||
[storage foo]
|
||||
...
|
||||
password.fetch = ["shell", "~/.local/bin/get-my-password | head -n1"]
|
||||
|
||||
With pass_ for example, you might find yourself writing something like this in
|
||||
your configuration file::
|
||||
|
||||
|
|
@ -60,7 +66,7 @@ passwords from the OS's password store. Installation::
|
|||
Basic usage::
|
||||
|
||||
password.fetch = ["command", "keyring", "get", "example.com", "foouser"]
|
||||
|
||||
|
||||
.. _keyring: https://github.com/jaraco/keyring/
|
||||
|
||||
Password Prompt
|
||||
|
|
@ -72,3 +78,19 @@ You can also simply prompt for the password::
|
|||
type = "caldav"
|
||||
username = "myusername"
|
||||
password.fetch = ["prompt", "Password for CalDAV"]
|
||||
|
||||
Environment variable
|
||||
===============
|
||||
|
||||
To read the password from an environment variable::
|
||||
|
||||
[storage foo]
|
||||
type = "caldav"
|
||||
username = "myusername"
|
||||
password.fetch = ["command", "printenv", "DAV_PW"]
|
||||
|
||||
This is especially handy if you use the same password multiple times
|
||||
(say, for a CardDAV and a CalDAV storage).
|
||||
On bash, you can read and export the password without printing::
|
||||
|
||||
read -s DAV_PW "DAV Password: " && export DAV_PW
|
||||
|
|
|
|||
|
|
@ -5,23 +5,27 @@ Packaging guidelines
|
|||
Thank you very much for packaging vdirsyncer! The following guidelines should
|
||||
help you to avoid some common pitfalls.
|
||||
|
||||
While they are called guidelines and therefore theoretically not mandatory, if
|
||||
you consider going a different direction, please first open an issue or contact
|
||||
me otherwise instead of just going ahead. These guidelines exist for my own
|
||||
convenience too.
|
||||
If you find yourself needing to patch anything, or going in a different direction,
|
||||
please open an issue so we can also address in a way that works for everyone. Otherwise
|
||||
we get bug reports for code or scenarios that don't exist in upstream vdirsycner.
|
||||
|
||||
Obtaining the source code
|
||||
=========================
|
||||
|
||||
The main distribution channel is `PyPI
|
||||
<https://pypi.python.org/pypi/vdirsyncer>`_, and source tarballs can be
|
||||
obtained there. Do not use the ones from GitHub: Their tarballs contain useless
|
||||
junk and are more of a distraction than anything else.
|
||||
obtained there. We mirror the same package tarball and wheel as GitHub
|
||||
releases. Please do not confuse these with the auto-generated GitHub "Source
|
||||
Code" tarball. Those are missing some important metadata and your build will fail.
|
||||
|
||||
I give each release a tag in the git repo. If you want to get notified of new
|
||||
We give each release a tag in the git repo. If you want to get notified of new
|
||||
releases, `GitHub's feed
|
||||
<https://github.com/pimutils/vdirsyncer/releases.atom>`_ is a good way.
|
||||
|
||||
Tags will be signed by the maintainer who is doing the release (starting with
|
||||
0.16.8), and generation of the tarball and wheel is done by CI. Hence, only the
|
||||
tag itself is signed.
|
||||
|
||||
Dependency versions
|
||||
===================
|
||||
|
||||
|
|
@ -33,24 +37,25 @@ Testing
|
|||
=======
|
||||
|
||||
Everything testing-related goes through the ``Makefile`` in the root of the
|
||||
repository or PyPI package. Trying to e.g. run ``py.test`` directly will
|
||||
repository or PyPI package. Trying to e.g. run ``pytest`` directly will
|
||||
require a lot of environment variables to be set (for configuration) and you
|
||||
probably don't want to deal with that.
|
||||
|
||||
You can install the testing dependencies with::
|
||||
You can install the all development dependencies with::
|
||||
|
||||
make install-test
|
||||
make install-dev
|
||||
|
||||
You probably don't want this since it will use pip to download the
|
||||
dependencies. Alternatively you can find the testing dependencies in
|
||||
``test-requirements.txt``, again with lower-bound version requirements.
|
||||
dependencies. Alternatively test dependencies are listed as ``test`` optional
|
||||
dependencies in ``pyproject.toml``, again with lower-bound version
|
||||
requirements.
|
||||
|
||||
You also have to have vdirsyncer fully installed at this point. Merely
|
||||
``cd``-ing into the tarball will not be sufficient.
|
||||
|
||||
Running the tests happens with::
|
||||
|
||||
make test
|
||||
pytest
|
||||
|
||||
Hypothesis will randomly generate test input. If you care about deterministic
|
||||
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
|
||||
|
|
@ -69,10 +74,11 @@ Using Sphinx_ you can generate the documentation you're reading right now in a
|
|||
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
|
||||
take care of the HTML docs' formatting.
|
||||
|
||||
You can find a list of dependencies in ``docs-requirements.txt``. Again, you
|
||||
can install those using pip with::
|
||||
You can find a list of dependencies in ``pyproject.toml``, in the
|
||||
``project.optional-dependencies`` section as ``docs``. Again, you can install
|
||||
those using pip with::
|
||||
|
||||
make install-docs
|
||||
pip install '.[docs]'
|
||||
|
||||
Then change into the ``docs/`` directory and build whatever format you want
|
||||
using the ``Makefile`` in there (run ``make`` for the formats you can build).
|
||||
|
|
|
|||
|
|
@ -32,15 +32,15 @@ Paste this into your vdirsyncer config::
|
|||
[storage holidays_public]
|
||||
type = "http"
|
||||
# The URL to your iCalendar file.
|
||||
url = ...
|
||||
url = "..."
|
||||
|
||||
[storage holidays_private]
|
||||
type = "caldav"
|
||||
# The direct URL to your calendar.
|
||||
url = ...
|
||||
url = "..."
|
||||
# The credentials to your CalDAV server
|
||||
username = ...
|
||||
password = ...
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
Then run ``vdirsyncer discover holidays`` and ``vdirsyncer sync holidays``, and
|
||||
your previously created calendar should be filled with events.
|
||||
|
|
|
|||
|
|
@ -18,5 +18,5 @@ package that don't play well with packages assuming a normal ``requests``. This
|
|||
is due to stubbornness on both sides.
|
||||
|
||||
See :gh:`82` and :gh:`140` for past discussions. You have one option to work
|
||||
around this, that is, to install vdirsyncer in a virtualenv, see
|
||||
around this, that is, to install vdirsyncer in a virtual environment, see
|
||||
:ref:`manual-installation`.
|
||||
|
|
|
|||
|
|
@ -14,21 +14,14 @@ To pin the certificate by fingerprint::
|
|||
[storage foo]
|
||||
type = "caldav"
|
||||
...
|
||||
verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA"
|
||||
#verify = false # Optional: Disable CA validation, useful for self-signed certs
|
||||
verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E"
|
||||
|
||||
SHA1-, SHA256- or MD5-Fingerprints can be used. They're detected by their
|
||||
length.
|
||||
SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported.
|
||||
CA validation is disabled when pinning a fingerprint.
|
||||
|
||||
You can use the following command for obtaining a SHA-1 fingerprint::
|
||||
You can use the following command for obtaining a SHA256 fingerprint::
|
||||
|
||||
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint
|
||||
|
||||
Note that ``verify_fingerprint`` doesn't suffice for vdirsyncer to work with
|
||||
self-signed certificates (or certificates that are not in your trust store). You
|
||||
most likely need to set ``verify = false`` as well. This disables verification
|
||||
of the SSL certificate's expiration time and the existence of it in your trust
|
||||
store, all that's verified now is the fingerprint.
|
||||
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256
|
||||
|
||||
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
|
||||
that you can forget about all of that. It is easier to deploy a free
|
||||
|
|
@ -47,22 +40,16 @@ To point vdirsyncer to a custom set of root CAs::
|
|||
...
|
||||
verify = "/path/to/cert.pem"
|
||||
|
||||
Vdirsyncer uses the requests_ library, which, by default, `uses its own set of
|
||||
trusted CAs
|
||||
<http://www.python-requests.org/en/latest/user/advanced/#ca-certificates>`_.
|
||||
Vdirsyncer uses the aiohttp_ library, which uses the default `ssl.SSLContext
|
||||
https://docs.python.org/3/library/ssl.html#ssl.SSLContext`_ by default.
|
||||
|
||||
However, the actual behavior depends on how you have installed it. Many Linux
|
||||
distributions patch their ``python-requests`` package to use the system
|
||||
certificate CAs. Normally these two stores are similar enough for you to not
|
||||
care.
|
||||
There are cases where certificate validation fails even though you can access
|
||||
the server fine through e.g. your browser. This usually indicates that your
|
||||
installation of ``python`` or the ``aiohttp`` or library is somehow broken. In
|
||||
such cases, it makes sense to explicitly set ``verify`` or
|
||||
``verify_fingerprint`` as shown above.
|
||||
|
||||
But there are cases where certificate validation fails even though you can
|
||||
access the server fine through e.g. your browser. This usually indicates that
|
||||
your installation of the ``requests`` library is somehow broken. In such cases,
|
||||
it makes sense to explicitly set ``verify`` or ``verify_fingerprint`` as shown
|
||||
above.
|
||||
|
||||
.. _requests: http://www.python-requests.org/
|
||||
.. _aiohttp: https://docs.aiohttp.org/en/stable/index.html
|
||||
|
||||
.. _ssl-client-certs:
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ Configuration
|
|||
.. note::
|
||||
|
||||
- The `config.example from the repository
|
||||
<https://github.com/pimutils/vdirsyncer/blob/master/config.example>`_
|
||||
<https://github.com/pimutils/vdirsyncer/blob/main/config.example>`_
|
||||
contains a very terse version of this.
|
||||
|
||||
- In this example we set up contacts synchronization, but calendar sync
|
||||
|
|
@ -176,8 +176,11 @@ as a file called ``color`` within the calendar folder.
|
|||
More information about collections
|
||||
----------------------------------
|
||||
|
||||
"Collection" is a collective term for addressbooks and calendars. Each
|
||||
collection from a storage has a "collection name", a unique identifier for each
|
||||
"Collection" is a collective term for addressbooks and calendars. A Cardav or
|
||||
Caldav server can contains several "collections" which correspond to several
|
||||
addressbooks or calendar.
|
||||
|
||||
Each collection from a storage has a "collection name", a unique identifier for each
|
||||
collection. In the case of :storage:`filesystem`-storage, this is the name of the
|
||||
directory that represents the collection, in the case of the DAV-storages this
|
||||
is the last segment of the URL. We use this identifier in the ``collections``
|
||||
|
|
|
|||
|
|
@ -7,4 +7,4 @@ Vdirsyncer is continuously tested against the latest version of Baikal_.
|
|||
- Baikal up to ``0.2.7`` also uses an old version of SabreDAV, with the same
|
||||
issue as ownCloud, see :gh:`160`. This issue is fixed in later versions.
|
||||
|
||||
.. _Baikal: http://baikal-server.com/
|
||||
.. _Baikal: http://sabre.io/baikal/
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ this:
|
|||
setup. We also set the storage to read-only such that no changes get
|
||||
synchronized back. Claws-Mail should not be able to do any changes anyway,
|
||||
but this is one extra safety step in case files get corrupted or vdirsyncer
|
||||
behaves eratically. You can leave that part out if you want to be able to
|
||||
behaves erratically. You can leave that part out if you want to be able to
|
||||
edit those files locally.
|
||||
- In the last section we configure that online contacts win in a conflict
|
||||
situation. Configure this part however you like. A correct value depends on
|
||||
|
|
@ -69,7 +69,7 @@ Now we discover and sync our contacts::
|
|||
Claws Mail
|
||||
----------
|
||||
|
||||
Open Claws-Mail. Got to **Tools** => **Addressbook**.
|
||||
Open Claws-Mail. Go to **Tools** => **Addressbook**.
|
||||
|
||||
Click on **Addressbook** => **New vCard**. Choose a name for the book.
|
||||
|
||||
|
|
@ -77,7 +77,7 @@ Then search for the for the vCard in the folder **~/.contacts/**. Click
|
|||
ok, and you we will see your contacts.
|
||||
|
||||
.. note::
|
||||
|
||||
|
||||
Claws-Mail shows only contacts that have a mail address.
|
||||
|
||||
Crontab
|
||||
|
|
@ -86,7 +86,7 @@ Crontab
|
|||
On the end we create a crontab, so that vdirsyncer syncs automatically
|
||||
every 30 minutes our contacts::
|
||||
|
||||
contab -e
|
||||
crontab -e
|
||||
|
||||
On the end of that file enter this line::
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ Exchange server you might get confronted with weird errors of all sorts
|
|||
type = "caldav"
|
||||
url = "http://localhost:1080/users/user@example.com/calendar/"
|
||||
username = "user@example.com"
|
||||
password = ...
|
||||
password = "..."
|
||||
|
||||
- Older versions of DavMail handle URLs case-insensitively. See :gh:`144`.
|
||||
- DavMail is handling malformed data on the Exchange server very poorly. In
|
||||
|
|
|
|||
|
|
@ -10,14 +10,14 @@ the settings to use::
|
|||
|
||||
[storage cal]
|
||||
type = "caldav"
|
||||
url = "https://caldav.messagingengine.com/"
|
||||
username = ...
|
||||
password = ...
|
||||
url = "https://caldav.fastmail.com/"
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
[storage card]
|
||||
type = "carddav"
|
||||
url = "https://carddav.messagingengine.com/"
|
||||
username = ...
|
||||
password = ...
|
||||
url = "https://carddav.fastmail.com/"
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
.. _FastMail: https://www.fastmail.com/
|
||||
|
|
|
|||
|
|
@ -11,14 +11,14 @@ Vdirsyncer is regularly tested against iCloud_.
|
|||
[storage cal]
|
||||
type = "caldav"
|
||||
url = "https://caldav.icloud.com/"
|
||||
username = ...
|
||||
password = ...
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
[storage card]
|
||||
type = "carddav"
|
||||
url = "https://contacts.icloud.com/"
|
||||
username = ...
|
||||
password = ...
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
Problems:
|
||||
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ Further applications, with missing pages:
|
|||
|
||||
.. _khal: http://lostpackets.de/khal/
|
||||
.. _dayplanner: http://www.day-planner.org/
|
||||
.. _Orage: http://www.kolumbus.fi/~w408237/orage/
|
||||
.. _Orage: https://gitlab.xfce.org/apps/orage
|
||||
.. _rainlendar: http://www.rainlendar.net/
|
||||
.. _khard: https://github.com/scheibler/khard/
|
||||
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ Vdirsyncer is continuously tested against the latest version of nextCloud_::
|
|||
[storage cal]
|
||||
type = "caldav"
|
||||
url = "https://nextcloud.example.com/"
|
||||
username = ...
|
||||
password = ...
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
[storage card]
|
||||
type = "carddav"
|
||||
|
|
|
|||
|
|
@ -13,8 +13,8 @@ minutes).
|
|||
unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_
|
||||
into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``.
|
||||
|
||||
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.service
|
||||
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.timer
|
||||
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.service
|
||||
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.timer
|
||||
|
||||
Activation
|
||||
----------
|
||||
|
|
@ -29,7 +29,7 @@ It's quite possible that the default "every fifteen minutes" interval isn't to
|
|||
your liking. No default will suit everybody, but this is configurable by simply
|
||||
running::
|
||||
|
||||
systemctl --user edit vdirsyncer
|
||||
systemctl --user edit vdirsyncer.timer
|
||||
|
||||
This will open a blank editor, where you can override the timer by including::
|
||||
|
||||
|
|
|
|||
|
|
@ -10,4 +10,60 @@ todoman_ is a CLI task manager supporting :doc:`vdir </vdir>`. Its interface is
|
|||
similar to the ones of Taskwarrior or the todo.txt CLI app. You can use
|
||||
:storage:`filesystem` with it.
|
||||
|
||||
.. _todoman: https://hugo.barrera.io/journal/2015/03/30/introducing-todoman/
|
||||
.. _todoman: http://todoman.readthedocs.io/
|
||||
|
||||
Setting up vdirsyncer
|
||||
=====================
|
||||
|
||||
For this tutorial we will use NextCloud.
|
||||
|
||||
Assuming a config like this::
|
||||
|
||||
[general]
|
||||
status_path = "~/.vdirsyncer/status/"
|
||||
|
||||
[pair calendars]
|
||||
conflict_resolution = "b wins"
|
||||
a = "calendars_local"
|
||||
b = "calendars_dav"
|
||||
collections = ["from b"]
|
||||
metadata = ["color", "displayname"]
|
||||
|
||||
[storage calendars_local]
|
||||
type = "filesystem"
|
||||
path = "~/.calendars/"
|
||||
fileext = ".ics"
|
||||
|
||||
[storage calendars_dav]
|
||||
type = "caldav"
|
||||
url = "https://nextcloud.example.net/"
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
``vdirsyncer sync`` will then synchronize the calendars of your NextCloud_
|
||||
instance to subfolders of ``~/.calendar/``.
|
||||
|
||||
.. _NextCloud: https://nextcloud.com/
|
||||
|
||||
Setting up todoman
|
||||
==================
|
||||
|
||||
Write this to ``~/.config/todoman/config.py``::
|
||||
|
||||
path = "~/.calendars/*"
|
||||
|
||||
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
|
||||
which is exactly the tasklists we want. Now you can use ``todoman`` as
|
||||
described in its documentation_ and run ``vdirsyncer sync`` to synchronize the changes to NextCloud.
|
||||
|
||||
.. _glob: https://en.wikipedia.org/wiki/Glob_(programming)
|
||||
.. _documentation: http://todoman.readthedocs.io/
|
||||
|
||||
Other clients
|
||||
=============
|
||||
|
||||
The following client applications also synchronize over CalDAV:
|
||||
|
||||
- The Tasks-app found on iOS
|
||||
- `OpenTasks for Android <https://github.com/dmfs/opentasks>`_
|
||||
- The `Tasks <https://apps.nextcloud.com/apps/tasks>`_-app for NextCloud's web UI
|
||||
|
|
|
|||
|
|
@ -11,13 +11,13 @@ point vdirsyncer against the root of Xandikos like this::
|
|||
[storage cal]
|
||||
type = "caldav"
|
||||
url = "https://xandikos.example.com/"
|
||||
username = ...
|
||||
password = ...
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
[storage card]
|
||||
type = "carddav"
|
||||
url = "https://xandikos.example.com/"
|
||||
username = ...
|
||||
password = ...
|
||||
username = "..."
|
||||
password = "..."
|
||||
|
||||
.. _Xandikos: https://github.com/jelmer/xandikos
|
||||
|
|
|
|||
|
|
@ -56,8 +56,11 @@ have any file extensions.
|
|||
known from CSS, for example) are allowed. The prefixing ``#`` must be
|
||||
present.
|
||||
|
||||
- A file called ``displayname`` contains a UTF-8 encoded label that may be used
|
||||
to represent the vdir in UIs.
|
||||
- Files called ``displayname`` and ``description`` contain a UTF-8 encoded label/
|
||||
description, that may be used to represent the vdir in UIs.
|
||||
|
||||
- A file called ``order`` inside the vdir includes the relative order
|
||||
of the calendar, a property that is only relevant in UI design.
|
||||
|
||||
Writing to vdirs
|
||||
================
|
||||
|
|
@ -96,7 +99,7 @@ collections for faster search and lookup.
|
|||
|
||||
The reason items' filenames don't contain any extra information is simple: The
|
||||
solutions presented induced duplication of data, where one duplicate might
|
||||
become out of date because of bad implementations. As it stands right now, a
|
||||
become out of date because of bad implementations. As it stands right now, an
|
||||
index format could be formalized separately though.
|
||||
|
||||
vdirsyncer doesn't really have to bother about efficient item lookup, because
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ program chosen:
|
|||
* Like with ``todo.txt``, Dropbox and friends are obviously agnostic/unaware of
|
||||
the files' contents. If a file has changed on both sides, Dropbox just copies
|
||||
both versions to both sides.
|
||||
|
||||
|
||||
This is a good idea if the user is directly interfacing with the file system
|
||||
and is able to resolve conflicts themselves. Here it might lead to
|
||||
erroneous behavior with e.g. ``khal``, since there are now two events with
|
||||
|
|
@ -50,7 +50,6 @@ program chosen:
|
|||
|
||||
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
|
||||
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
|
||||
e.g. DAVDroid_ or the apps by dmfs_.
|
||||
e.g. DAVx⁵_ or other apps bundled with smartphones.
|
||||
|
||||
.. _DAVDroid: http://davdroid.bitfire.at/
|
||||
.. _dmfs: https://dmfs.org/
|
||||
.. _DAVx⁵: https://www.davx5.com/
|
||||
|
|
|
|||
29
publish-release.yaml
Normal file
29
publish-release.yaml
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
# Push new version to PyPI.
|
||||
#
|
||||
# Usage: hut builds submit publish-release.yaml --follow
|
||||
|
||||
image: alpine/edge
|
||||
packages:
|
||||
- py3-build
|
||||
- py3-pip
|
||||
- py3-setuptools
|
||||
- py3-setuptools_scm
|
||||
- py3-wheel
|
||||
- twine
|
||||
sources:
|
||||
- https://github.com/pimutils/vdirsyncer
|
||||
secrets:
|
||||
- a36c8ba3-fba0-4338-b402-6aea0fbe771e # PyPI token.
|
||||
environment:
|
||||
CI: true
|
||||
tasks:
|
||||
- check-tag: |
|
||||
cd vdirsyncer
|
||||
git fetch --tags
|
||||
|
||||
# Stop here unless this is a tag.
|
||||
git describe --exact-match --tags || complete-build
|
||||
- publish: |
|
||||
cd vdirsyncer
|
||||
python -m build --no-isolation
|
||||
twine upload --non-interactive dist/*
|
||||
114
pyproject.toml
Normal file
114
pyproject.toml
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
# Vdirsyncer synchronizes calendars and contacts.
|
||||
#
|
||||
# Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
|
||||
# how to package vdirsyncer.
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=64", "setuptools_scm>=8"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "vdirsyncer"
|
||||
authors = [
|
||||
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
|
||||
]
|
||||
description = "Synchronize calendars and contacts"
|
||||
readme = "README.rst"
|
||||
requires-python = ">=3.9"
|
||||
keywords = ["todo", "task", "icalendar", "cli"]
|
||||
license = "BSD-3-Clause"
|
||||
license-files = ["LICENSE"]
|
||||
classifiers = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Console",
|
||||
"Operating System :: POSIX",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Topic :: Internet",
|
||||
"Topic :: Office/Business :: Scheduling",
|
||||
"Topic :: Utilities",
|
||||
]
|
||||
dependencies = [
|
||||
"click>=5.0,<9.0",
|
||||
"click-log>=0.3.0,<0.5.0",
|
||||
"requests>=2.20.0",
|
||||
"aiohttp>=3.8.2,<4.0.0",
|
||||
"aiostream>=0.4.3,<0.8.0",
|
||||
"tenacity>=9.0.0",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
|
||||
[project.optional-dependencies]
|
||||
google = ["aiohttp-oauthlib"]
|
||||
test = [
|
||||
"hypothesis>=6.72.0,<7.0.0",
|
||||
"pytest",
|
||||
"pytest-cov",
|
||||
"pytest-httpserver",
|
||||
"trustme",
|
||||
"pytest-asyncio",
|
||||
"aioresponses",
|
||||
]
|
||||
docs = [
|
||||
"sphinx!=1.4.7",
|
||||
"sphinx_rtd_theme",
|
||||
"setuptools_scm",
|
||||
]
|
||||
check = [
|
||||
"mypy",
|
||||
"ruff",
|
||||
"types-docutils",
|
||||
"types-requests",
|
||||
"types-setuptools",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
vdirsyncer = "vdirsyncer.cli:app"
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = [
|
||||
"B0",
|
||||
"C4",
|
||||
"E",
|
||||
"I",
|
||||
"RSE",
|
||||
"SIM",
|
||||
"TID",
|
||||
"UP",
|
||||
"W",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
force-single-line = true
|
||||
required-imports = ["from __future__ import annotations"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = """
|
||||
--tb=short
|
||||
--cov-config .coveragerc
|
||||
--cov=vdirsyncer
|
||||
--cov-report=term-missing:skip-covered
|
||||
--no-cov-on-fail
|
||||
--color=yes
|
||||
"""
|
||||
# filterwarnings=error
|
||||
asyncio_default_fixture_loop_scope = "function"
|
||||
|
||||
[tool.mypy]
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.coverage.report]
|
||||
exclude_lines = [
|
||||
"if TYPE_CHECKING:",
|
||||
]
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
include = ["vdirsyncer*"]
|
||||
|
||||
[tool.setuptools_scm]
|
||||
write_to = "vdirsyncer/version.py"
|
||||
version_scheme = "no-guess-dev"
|
||||
49
scripts/_build_deb_in_container.bash
Normal file
49
scripts/_build_deb_in_container.bash
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
#!/bin/bash
|
||||
#
|
||||
# This script is mean to be run inside a dedicated container,
|
||||
# and not interatively.
|
||||
|
||||
set -ex
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
apt-get update
|
||||
apt-get install -y build-essential fakeroot debhelper git
|
||||
apt-get install -y python3-all python3-pip python3-venv
|
||||
apt-get install -y ruby ruby-dev
|
||||
|
||||
pip3 install virtualenv virtualenv-tools3
|
||||
virtualenv -p python3 /vdirsyncer/env/
|
||||
|
||||
gem install fpm
|
||||
|
||||
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
|
||||
pip3 uninstall -y virtualenv
|
||||
echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
|
||||
chmod +x /usr/local/bin/virtualenv
|
||||
|
||||
cp -r /source/ /vdirsyncer/vdirsyncer/
|
||||
cd /vdirsyncer/vdirsyncer/ || exit 2
|
||||
mkdir /vdirsyncer/pkgs/
|
||||
|
||||
basename -- *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
|
||||
# XXX: Do I really not want google support included?
|
||||
(echo -n *.tar.gz; echo '[google]') | tee requirements.txt
|
||||
fpm --verbose \
|
||||
--input-type virtualenv \
|
||||
--output-type deb \
|
||||
--name "vdirsyncer-latest" \
|
||||
--version "$(cat version)" \
|
||||
--prefix /opt/venvs/vdirsyncer-latest \
|
||||
--depends python3 \
|
||||
requirements.txt
|
||||
|
||||
mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
|
||||
|
||||
cd /vdirsyncer/pkgs/
|
||||
dpkg -i -- *.deb
|
||||
|
||||
# Check that it works:
|
||||
LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
|
||||
|
||||
cp -- *.deb /source/
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
ARG distro
|
||||
ARG distrover
|
||||
|
||||
FROM $distro:$distrover
|
||||
|
||||
ARG distro
|
||||
ARG distrover
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y build-essential fakeroot debhelper git
|
||||
RUN apt-get install -y python3-all python3-pip
|
||||
RUN apt-get install -y ruby ruby-dev
|
||||
RUN apt-get install -y python-all python-pip
|
||||
|
||||
RUN gem install fpm
|
||||
|
||||
RUN pip2 install virtualenv-tools
|
||||
RUN pip3 install virtualenv
|
||||
RUN virtualenv -p python3 /vdirsyncer/env/
|
||||
|
||||
COPY . /vdirsyncer/vdirsyncer/
|
||||
WORKDIR /vdirsyncer/vdirsyncer/
|
||||
RUN mkdir /vdirsyncer/pkgs/
|
||||
|
||||
RUN basename *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
|
||||
RUN (echo -n *.tar.gz; echo '[google]') | tee requirements.txt
|
||||
RUN . /vdirsyncer/env/bin/activate; fpm -s virtualenv -t deb \
|
||||
-n "vdirsyncer-latest" \
|
||||
-v "$(cat version)" \
|
||||
--prefix /opt/venvs/vdirsyncer-latest \
|
||||
requirements.txt
|
||||
|
||||
RUN mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
|
||||
|
||||
WORKDIR /vdirsyncer/pkgs/
|
||||
RUN dpkg -i *.deb
|
||||
RUN LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
import itertools
|
||||
import json
|
||||
import sys
|
||||
|
||||
python_versions = ("3.4", "3.5", "3.6")
|
||||
latest_python = "3.6"
|
||||
|
||||
cfg = {}
|
||||
|
||||
cfg['sudo'] = True
|
||||
cfg['dist'] = 'trusty'
|
||||
cfg['language'] = 'python'
|
||||
cfg['cache'] = 'pip'
|
||||
|
||||
cfg['git'] = {
|
||||
'submodules': False
|
||||
}
|
||||
|
||||
cfg['branches'] = {
|
||||
'only': ['auto', 'master', '/^.*-maintenance$/']
|
||||
}
|
||||
|
||||
cfg['install'] = """
|
||||
. scripts/travis-install.sh
|
||||
pip install -U pip setuptools
|
||||
pip install wheel
|
||||
make -e install-dev
|
||||
make -e install-$BUILD
|
||||
""".strip().splitlines()
|
||||
|
||||
cfg['script'] = ["make -e $BUILD"]
|
||||
|
||||
matrix = []
|
||||
cfg['matrix'] = {'include': matrix}
|
||||
|
||||
matrix.append({
|
||||
'python': latest_python,
|
||||
'env': 'BUILD=style'
|
||||
})
|
||||
|
||||
|
||||
for python, requirements in itertools.product(python_versions,
|
||||
("devel", "release", "minimal")):
|
||||
dav_servers = ("radicale", "xandikos")
|
||||
|
||||
if python == latest_python and requirements == "release":
|
||||
dav_servers += ("fastmail",)
|
||||
|
||||
for dav_server in dav_servers:
|
||||
job = {
|
||||
'python': python,
|
||||
'env': ("BUILD=test "
|
||||
"DAV_SERVER={dav_server} "
|
||||
"REQUIREMENTS={requirements} "
|
||||
.format(dav_server=dav_server,
|
||||
requirements=requirements))
|
||||
}
|
||||
|
||||
build_prs = dav_server not in ("fastmail", "davical", "icloud")
|
||||
if not build_prs:
|
||||
job['if'] = 'NOT (type IN (pull_request))'
|
||||
|
||||
matrix.append(job)
|
||||
|
||||
matrix.append({
|
||||
'python': latest_python,
|
||||
'env': ("BUILD=test "
|
||||
"ETESYNC_TESTS=true "
|
||||
"REQUIREMENTS=latest")
|
||||
})
|
||||
|
||||
matrix.append({
|
||||
'language': 'generic',
|
||||
'os': 'osx',
|
||||
'env': 'BUILD=test'
|
||||
})
|
||||
|
||||
json.dump(cfg, sys.stdout, sort_keys=True, indent=2)
|
||||
|
|
@ -1,19 +1,56 @@
|
|||
#!/bin/sh
|
||||
set -xe
|
||||
distro=$1
|
||||
distrover=$2
|
||||
name=vdirsyncer-$distro-$distrover:latest
|
||||
context="$(mktemp -d)"
|
||||
|
||||
python setup.py sdist -d "$context"
|
||||
cp scripts/dpkg.Dockerfile "$context/Dockerfile"
|
||||
set -xeu
|
||||
|
||||
docker build \
|
||||
--build-arg distro=$distro \
|
||||
--build-arg distrover=$distrover \
|
||||
-t $name \
|
||||
"$context"
|
||||
SCRIPT_PATH=$(realpath "$0")
|
||||
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
|
||||
|
||||
docker run $name tar -c -C /vdirsyncer pkgs | tar x -C "$context"
|
||||
package_cloud push pimutils/vdirsyncer/$distro/$distrover $context/pkgs/*.deb
|
||||
rm -rf "$context"
|
||||
# E.g.: debian, ubuntu
|
||||
DISTRO=${DISTRO:1}
|
||||
# E.g.: bullseye, bookwork
|
||||
DISTROVER=${DISTROVER:2}
|
||||
CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
|
||||
CONTEXT="$(mktemp -d)"
|
||||
|
||||
DEST_DIR="$SCRIPT_DIR/../$DISTRO-$DISTROVER"
|
||||
|
||||
cleanup() {
|
||||
rm -rf "$CONTEXT"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
# Prepare files.
|
||||
cp scripts/_build_deb_in_container.bash "$CONTEXT"
|
||||
python setup.py sdist -d "$CONTEXT"
|
||||
|
||||
docker run -it \
|
||||
--name "$CONTAINER_NAME" \
|
||||
--volume "$CONTEXT:/source" \
|
||||
"$DISTRO:$DISTROVER" \
|
||||
bash /source/_build_deb_in_container.bash
|
||||
|
||||
# Keep around the package filename.
|
||||
PACKAGE=$(ls "$CONTEXT"/*.deb)
|
||||
PACKAGE=$(basename "$PACKAGE")
|
||||
|
||||
# Save the build deb files.
|
||||
mkdir -p "$DEST_DIR"
|
||||
cp "$CONTEXT"/*.deb "$DEST_DIR"
|
||||
|
||||
echo Build complete! 🤖
|
||||
|
||||
# Packagecloud uses some internal IDs for each distro.
|
||||
# Extract the one for the distro we're publishing.
|
||||
DISTRO_ID=$(
|
||||
curl -s \
|
||||
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/distributions.json | \
|
||||
jq '.deb | .[] | select(.index_name=="'"$DISTRO"'") | .versions | .[] | select(.index_name=="'"$DISTROVER"'") | .id'
|
||||
)
|
||||
|
||||
# Actually push the package.
|
||||
curl \
|
||||
-F "package[distro_version_id]=$DISTRO_ID" \
|
||||
-F "package[package_file]=@$DEST_DIR/$PACKAGE" \
|
||||
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/repos/pimutils/vdirsyncer/packages.json
|
||||
|
||||
echo Done! ✨
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
# The OS X VM doesn't have any Python support at all
|
||||
# See https://github.com/travis-ci/travis-ci/issues/2312
|
||||
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
|
||||
brew update
|
||||
brew install python3
|
||||
virtualenv -p python3 $HOME/osx-py3
|
||||
. $HOME/osx-py3/bin/activate
|
||||
fi
|
||||
14
setup.cfg
14
setup.cfg
|
|
@ -1,14 +0,0 @@
|
|||
[wheel]
|
||||
universal = 1
|
||||
|
||||
[tool:pytest]
|
||||
norecursedirs = tests/storage/servers/*
|
||||
addopts = --tb=short
|
||||
|
||||
[flake8]
|
||||
# E731: Use a def instead of lambda expr
|
||||
# E743: Ambiguous function definition
|
||||
ignore = E731, E743
|
||||
select = C,E,F,W,B,B9
|
||||
exclude = .eggs, tests/storage/servers/owncloud/, tests/storage/servers/nextcloud/, tests/storage/servers/baikal/, build/
|
||||
application-package-names = tests,vdirsyncer
|
||||
104
setup.py
104
setup.py
|
|
@ -1,104 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Vdirsyncer synchronizes calendars and contacts.
|
||||
|
||||
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
|
||||
how to package vdirsyncer.
|
||||
'''
|
||||
|
||||
|
||||
from setuptools import Command, find_packages, setup
|
||||
|
||||
|
||||
requirements = [
|
||||
# https://github.com/mitsuhiko/click/issues/200
|
||||
'click>=5.0',
|
||||
'click-log>=0.3.0, <0.4.0',
|
||||
|
||||
# https://github.com/pimutils/vdirsyncer/issues/478
|
||||
'click-threading>=0.2',
|
||||
|
||||
# !=2.9.0: https://github.com/kennethreitz/requests/issues/2930
|
||||
#
|
||||
# >=2.4.1: https://github.com/shazow/urllib3/pull/444
|
||||
# Without the above pull request, `verify=False` also disables fingerprint
|
||||
# validation. This is *not* what we want, and it's not possible to
|
||||
# replicate vdirsyncer's current behavior (verifying fingerprints without
|
||||
# verifying against CAs) with older versions of urllib3.
|
||||
'requests >=2.4.1, !=2.9.0',
|
||||
|
||||
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
|
||||
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
|
||||
'requests_toolbelt >=0.4.0',
|
||||
|
||||
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
|
||||
'atomicwrites>=0.1.7'
|
||||
]
|
||||
|
||||
|
||||
class PrintRequirements(Command):
|
||||
description = 'Prints minimal requirements'
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
pass
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
for requirement in requirements:
|
||||
print(requirement.replace(">", "=").replace(" ", ""))
|
||||
|
||||
|
||||
with open('README.rst') as f:
|
||||
long_description = f.read()
|
||||
|
||||
|
||||
setup(
|
||||
# General metadata
|
||||
name='vdirsyncer',
|
||||
author='Markus Unterwaditzer',
|
||||
author_email='markus@unterwaditzer.net',
|
||||
url='https://github.com/pimutils/vdirsyncer',
|
||||
description='Synchronize calendars and contacts',
|
||||
license='BSD',
|
||||
long_description=long_description,
|
||||
|
||||
# Runtime dependencies
|
||||
install_requires=requirements,
|
||||
|
||||
# Optional dependencies
|
||||
extras_require={
|
||||
'google': ['requests-oauthlib'],
|
||||
'etesync': ['etesync']
|
||||
},
|
||||
|
||||
# Build dependencies
|
||||
setup_requires=['setuptools_scm != 1.12.0'],
|
||||
|
||||
# Other
|
||||
packages=find_packages(exclude=['tests.*', 'tests']),
|
||||
include_package_data=True,
|
||||
cmdclass={
|
||||
'minimal_requirements': PrintRequirements
|
||||
},
|
||||
use_scm_version={
|
||||
'write_to': 'vdirsyncer/version.py'
|
||||
},
|
||||
entry_points={
|
||||
'console_scripts': ['vdirsyncer = vdirsyncer.cli:main']
|
||||
},
|
||||
classifiers=[
|
||||
'Development Status :: 4 - Beta',
|
||||
'Environment :: Console',
|
||||
'License :: OSI Approved :: BSD License',
|
||||
'Operating System :: POSIX',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.4',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Topic :: Internet',
|
||||
'Topic :: Utilities',
|
||||
],
|
||||
)
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
hypothesis>=3.1
|
||||
pytest
|
||||
pytest-localserver
|
||||
pytest-subtesthack
|
||||
|
|
@ -1,27 +1,26 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
Test suite for vdirsyncer.
|
||||
'''
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hypothesis.strategies as st
|
||||
import urllib3.exceptions
|
||||
|
||||
from vdirsyncer.vobject import normalize_item
|
||||
|
||||
import urllib3
|
||||
import urllib3.exceptions
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
def blow_up(*a, **kw):
|
||||
raise AssertionError('Did not expect to be called.')
|
||||
raise AssertionError("Did not expect to be called.")
|
||||
|
||||
|
||||
def assert_item_equals(a, b):
|
||||
assert normalize_item(a) == normalize_item(b)
|
||||
|
||||
|
||||
VCARD_TEMPLATE = u'''BEGIN:VCARD
|
||||
VCARD_TEMPLATE = """BEGIN:VCARD
|
||||
VERSION:3.0
|
||||
FN:Cyrus Daboo
|
||||
N:Daboo;Cyrus;;;
|
||||
|
|
@ -35,9 +34,9 @@ TEL;TYPE=FAX:412 605 0705
|
|||
URL;VALUE=URI:http://www.example.com
|
||||
X-SOMETHING:{r}
|
||||
UID:{uid}
|
||||
END:VCARD'''
|
||||
END:VCARD"""
|
||||
|
||||
TASK_TEMPLATE = u'''BEGIN:VCALENDAR
|
||||
TASK_TEMPLATE = """BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//dmfs.org//mimedir.icalendar//EN
|
||||
BEGIN:VTODO
|
||||
|
|
@ -49,25 +48,30 @@ SUMMARY:Book: Kowlani - Tödlicher Staub
|
|||
X-SOMETHING:{r}
|
||||
UID:{uid}
|
||||
END:VTODO
|
||||
END:VCALENDAR'''
|
||||
END:VCALENDAR"""
|
||||
|
||||
|
||||
BARE_EVENT_TEMPLATE = u'''BEGIN:VEVENT
|
||||
BARE_EVENT_TEMPLATE = """BEGIN:VEVENT
|
||||
DTSTART:19970714T170000Z
|
||||
DTEND:19970715T035959Z
|
||||
SUMMARY:Bastille Day Party
|
||||
X-SOMETHING:{r}
|
||||
UID:{uid}
|
||||
END:VEVENT'''
|
||||
END:VEVENT"""
|
||||
|
||||
|
||||
EVENT_TEMPLATE = u'''BEGIN:VCALENDAR
|
||||
EVENT_TEMPLATE = (
|
||||
"""BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||
''' + BARE_EVENT_TEMPLATE + u'''
|
||||
END:VCALENDAR'''
|
||||
"""
|
||||
+ BARE_EVENT_TEMPLATE
|
||||
+ """
|
||||
END:VCALENDAR"""
|
||||
)
|
||||
|
||||
EVENT_WITH_TIMEZONE_TEMPLATE = '''BEGIN:VCALENDAR
|
||||
EVENT_WITH_TIMEZONE_TEMPLATE = (
|
||||
"""BEGIN:VCALENDAR
|
||||
BEGIN:VTIMEZONE
|
||||
TZID:Europe/Rome
|
||||
X-LIC-LOCATION:Europe/Rome
|
||||
|
|
@ -86,26 +90,21 @@ DTSTART:19701025T030000
|
|||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
||||
END:STANDARD
|
||||
END:VTIMEZONE
|
||||
''' + BARE_EVENT_TEMPLATE + '''
|
||||
END:VCALENDAR'''
|
||||
"""
|
||||
+ BARE_EVENT_TEMPLATE
|
||||
+ """
|
||||
END:VCALENDAR"""
|
||||
)
|
||||
|
||||
|
||||
SIMPLE_TEMPLATE = u'''BEGIN:FOO
|
||||
SIMPLE_TEMPLATE = """BEGIN:FOO
|
||||
UID:{uid}
|
||||
X-SOMETHING:{r}
|
||||
HAHA:YES
|
||||
END:FOO'''
|
||||
END:FOO"""
|
||||
|
||||
printable_characters_strategy = st.text(
|
||||
st.characters(blacklist_categories=(
|
||||
'Cc', 'Cs'
|
||||
))
|
||||
)
|
||||
printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs")))
|
||||
|
||||
uid_strategy = st.text(
|
||||
st.characters(blacklist_categories=(
|
||||
'Zs', 'Zl', 'Zp',
|
||||
'Cc', 'Cs'
|
||||
)),
|
||||
min_size=1
|
||||
st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
|
||||
).filter(lambda x: x.strip() == x)
|
||||
|
|
|
|||
|
|
@ -1,44 +1,70 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
"""
|
||||
General-purpose fixtures for vdirsyncer's testsuite.
|
||||
'''
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import aiohttp
|
||||
import click_log
|
||||
|
||||
from hypothesis import HealthCheck, Verbosity, settings
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from hypothesis import HealthCheck
|
||||
from hypothesis import Verbosity
|
||||
from hypothesis import settings
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_logging():
|
||||
click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)
|
||||
click_log.basic_config("vdirsyncer").setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
try:
|
||||
import pytest_benchmark
|
||||
except ImportError:
|
||||
|
||||
@pytest.fixture
|
||||
def benchmark():
|
||||
return lambda x: x()
|
||||
|
||||
else:
|
||||
del pytest_benchmark
|
||||
|
||||
|
||||
settings.suppress_health_check = [HealthCheck.too_slow]
|
||||
settings.register_profile(
|
||||
"ci",
|
||||
settings(
|
||||
max_examples=1000,
|
||||
verbosity=Verbosity.verbose,
|
||||
suppress_health_check=[HealthCheck.too_slow],
|
||||
),
|
||||
)
|
||||
settings.register_profile(
|
||||
"deterministic",
|
||||
settings(
|
||||
derandomize=True,
|
||||
suppress_health_check=list(HealthCheck),
|
||||
),
|
||||
)
|
||||
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
|
||||
|
||||
settings.register_profile("ci", settings(
|
||||
max_examples=1000,
|
||||
verbosity=Verbosity.verbose,
|
||||
))
|
||||
settings.register_profile("deterministic", settings(
|
||||
derandomize=True,
|
||||
perform_health_check=False
|
||||
))
|
||||
|
||||
if os.environ.get('DETERMINISTIC_TESTS', 'false').lower() == 'true':
|
||||
if os.environ.get("DETERMINISTIC_TESTS", "false").lower() == "true":
|
||||
settings.load_profile("deterministic")
|
||||
elif os.environ.get('CI', 'false').lower() == 'true':
|
||||
elif os.environ.get("CI", "false").lower() == "true":
|
||||
settings.load_profile("ci")
|
||||
else:
|
||||
settings.load_profile("dev")
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def aio_session():
|
||||
async with aiohttp.ClientSession() as session:
|
||||
yield session
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def aio_connector():
|
||||
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
||||
yield conn
|
||||
|
|
|
|||
|
|
@ -1,27 +1,29 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
import uuid
|
||||
|
||||
import textwrap
|
||||
from urllib.parse import quote as urlquote, unquote as urlunquote
|
||||
|
||||
import hypothesis.strategies as st
|
||||
from hypothesis import given
|
||||
import uuid
|
||||
from urllib.parse import quote as urlquote
|
||||
from urllib.parse import unquote as urlunquote
|
||||
|
||||
import aiostream
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
from tests import EVENT_TEMPLATE
|
||||
from tests import TASK_TEMPLATE
|
||||
from tests import VCARD_TEMPLATE
|
||||
from tests import assert_item_equals
|
||||
from tests import normalize_item
|
||||
from vdirsyncer import exceptions
|
||||
from vdirsyncer.storage.base import normalize_meta_value
|
||||
from vdirsyncer.vobject import Item
|
||||
|
||||
from .. import EVENT_TEMPLATE, TASK_TEMPLATE, VCARD_TEMPLATE, \
|
||||
assert_item_equals, normalize_item, printable_characters_strategy
|
||||
|
||||
|
||||
def get_server_mixin(server_name):
|
||||
from . import __name__ as base
|
||||
x = __import__('{}.servers.{}'.format(base, server_name), fromlist=[''])
|
||||
|
||||
x = __import__(f"{base}.servers.{server_name}", fromlist=[""])
|
||||
return x.ServerMixin
|
||||
|
||||
|
||||
|
|
@ -31,35 +33,36 @@ def format_item(item_template, uid=None):
|
|||
return Item(item_template.format(r=r, uid=uid or r))
|
||||
|
||||
|
||||
class StorageTests(object):
|
||||
class StorageTests:
|
||||
storage_class = None
|
||||
supports_collections = True
|
||||
supports_metadata = True
|
||||
|
||||
@pytest.fixture(params=['VEVENT', 'VTODO', 'VCARD'])
|
||||
@pytest.fixture(params=["VEVENT", "VTODO", "VCARD"])
|
||||
def item_type(self, request):
|
||||
'''Parametrize with all supported item types.'''
|
||||
"""Parametrize with all supported item types."""
|
||||
return request.param
|
||||
|
||||
@pytest.fixture
|
||||
def get_storage_args(self):
|
||||
'''
|
||||
"""
|
||||
Return a function with the following properties:
|
||||
|
||||
:param collection: The name of the collection to create and use.
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@pytest.fixture
|
||||
def s(self, get_storage_args):
|
||||
return self.storage_class(**get_storage_args())
|
||||
@pytest_asyncio.fixture
|
||||
async def s(self, get_storage_args):
|
||||
rv = self.storage_class(**await get_storage_args())
|
||||
return rv
|
||||
|
||||
@pytest.fixture
|
||||
def get_item(self, item_type):
|
||||
template = {
|
||||
'VEVENT': EVENT_TEMPLATE,
|
||||
'VTODO': TASK_TEMPLATE,
|
||||
'VCARD': VCARD_TEMPLATE,
|
||||
"VEVENT": EVENT_TEMPLATE,
|
||||
"VTODO": TASK_TEMPLATE,
|
||||
"VCARD": VCARD_TEMPLATE,
|
||||
}[item_type]
|
||||
|
||||
return lambda **kw: format_item(template, **kw)
|
||||
|
|
@ -67,255 +70,321 @@ class StorageTests(object):
|
|||
@pytest.fixture
|
||||
def requires_collections(self):
|
||||
if not self.supports_collections:
|
||||
pytest.skip('This storage does not support collections.')
|
||||
pytest.skip("This storage does not support collections.")
|
||||
|
||||
@pytest.fixture
|
||||
def requires_metadata(self):
|
||||
if not self.supports_metadata:
|
||||
pytest.skip('This storage does not support metadata.')
|
||||
pytest.skip("This storage does not support metadata.")
|
||||
|
||||
def test_generic(self, s, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_generic(self, s, get_item):
|
||||
items = [get_item() for i in range(1, 10)]
|
||||
hrefs = []
|
||||
for item in items:
|
||||
href, etag = s.upload(item)
|
||||
href, etag = await s.upload(item)
|
||||
if etag is None:
|
||||
_, etag = s.get(href)
|
||||
_, etag = await s.get(href)
|
||||
hrefs.append((href, etag))
|
||||
hrefs.sort()
|
||||
assert hrefs == sorted(s.list())
|
||||
assert hrefs == sorted(await aiostream.stream.list(s.list()))
|
||||
for href, etag in hrefs:
|
||||
assert isinstance(href, (str, bytes))
|
||||
assert isinstance(etag, (str, bytes))
|
||||
assert s.has(href)
|
||||
item, etag2 = s.get(href)
|
||||
assert await s.has(href)
|
||||
item, etag2 = await s.get(href)
|
||||
assert etag == etag2
|
||||
|
||||
def test_empty_get_multi(self, s):
|
||||
assert list(s.get_multi([])) == []
|
||||
@pytest.mark.asyncio
|
||||
async def test_empty_get_multi(self, s):
|
||||
assert await aiostream.stream.list(s.get_multi([])) == []
|
||||
|
||||
def test_get_multi_duplicates(self, s, get_item):
|
||||
href, etag = s.upload(get_item())
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_multi_duplicates(self, s, get_item):
|
||||
href, etag = await s.upload(get_item())
|
||||
if etag is None:
|
||||
_, etag = s.get(href)
|
||||
(href2, item, etag2), = s.get_multi([href] * 2)
|
||||
_, etag = await s.get(href)
|
||||
((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
|
||||
assert href2 == href
|
||||
assert etag2 == etag
|
||||
|
||||
def test_upload_already_existing(self, s, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_already_existing(self, s, get_item):
|
||||
item = get_item()
|
||||
s.upload(item)
|
||||
await s.upload(item)
|
||||
with pytest.raises(exceptions.PreconditionFailed):
|
||||
s.upload(item)
|
||||
await s.upload(item)
|
||||
|
||||
def test_upload(self, s, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload(self, s, get_item):
|
||||
item = get_item()
|
||||
href, etag = s.upload(item)
|
||||
assert_item_equals(s.get(href)[0], item)
|
||||
href, _etag = await s.upload(item)
|
||||
assert_item_equals((await s.get(href))[0], item)
|
||||
|
||||
def test_update(self, s, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_update(self, s, get_item):
|
||||
item = get_item()
|
||||
href, etag = s.upload(item)
|
||||
href, etag = await s.upload(item)
|
||||
if etag is None:
|
||||
_, etag = s.get(href)
|
||||
assert_item_equals(s.get(href)[0], item)
|
||||
_, etag = await s.get(href)
|
||||
assert_item_equals((await s.get(href))[0], item)
|
||||
|
||||
new_item = get_item(uid=item.uid)
|
||||
new_etag = s.update(href, new_item, etag)
|
||||
new_etag = await s.update(href, new_item, etag)
|
||||
if new_etag is None:
|
||||
_, new_etag = s.get(href)
|
||||
_, new_etag = await s.get(href)
|
||||
# See https://github.com/pimutils/vdirsyncer/issues/48
|
||||
assert isinstance(new_etag, (bytes, str))
|
||||
assert_item_equals(s.get(href)[0], new_item)
|
||||
assert_item_equals((await s.get(href))[0], new_item)
|
||||
|
||||
def test_update_nonexisting(self, s, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_nonexisting(self, s, get_item):
|
||||
item = get_item()
|
||||
with pytest.raises(exceptions.PreconditionFailed):
|
||||
s.update('huehue', item, '"123"')
|
||||
await s.update("huehue", item, '"123"')
|
||||
|
||||
def test_wrong_etag(self, s, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_wrong_etag(self, s, get_item):
|
||||
item = get_item()
|
||||
href, etag = s.upload(item)
|
||||
href, _etag = await s.upload(item)
|
||||
with pytest.raises(exceptions.PreconditionFailed):
|
||||
s.update(href, item, '"lolnope"')
|
||||
await s.update(href, item, '"lolnope"')
|
||||
with pytest.raises(exceptions.PreconditionFailed):
|
||||
s.delete(href, '"lolnope"')
|
||||
await s.delete(href, '"lolnope"')
|
||||
|
||||
def test_delete(self, s, get_item):
|
||||
href, etag = s.upload(get_item())
|
||||
s.delete(href, etag)
|
||||
assert not list(s.list())
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete(self, s, get_item):
|
||||
href, etag = await s.upload(get_item())
|
||||
await s.delete(href, etag)
|
||||
assert not await aiostream.stream.list(s.list())
|
||||
|
||||
def test_delete_nonexisting(self, s, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_nonexisting(self, s, get_item):
|
||||
with pytest.raises(exceptions.PreconditionFailed):
|
||||
s.delete('1', '"123"')
|
||||
await s.delete("1", '"123"')
|
||||
|
||||
def test_list(self, s, get_item):
|
||||
assert not list(s.list())
|
||||
href, etag = s.upload(get_item())
|
||||
@pytest.mark.asyncio
|
||||
async def test_list(self, s, get_item):
|
||||
assert not await aiostream.stream.list(s.list())
|
||||
href, etag = await s.upload(get_item())
|
||||
if etag is None:
|
||||
_, etag = s.get(href)
|
||||
assert list(s.list()) == [(href, etag)]
|
||||
_, etag = await s.get(href)
|
||||
assert await aiostream.stream.list(s.list()) == [(href, etag)]
|
||||
|
||||
def test_has(self, s, get_item):
|
||||
assert not s.has('asd')
|
||||
href, etag = s.upload(get_item())
|
||||
assert s.has(href)
|
||||
assert not s.has('asd')
|
||||
s.delete(href, etag)
|
||||
assert not s.has(href)
|
||||
@pytest.mark.asyncio
|
||||
async def test_has(self, s, get_item):
|
||||
assert not await s.has("asd")
|
||||
href, etag = await s.upload(get_item())
|
||||
assert await s.has(href)
|
||||
assert not await s.has("asd")
|
||||
await s.delete(href, etag)
|
||||
assert not await s.has(href)
|
||||
|
||||
def test_update_others_stay_the_same(self, s, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_others_stay_the_same(self, s, get_item):
|
||||
info = {}
|
||||
for _ in range(4):
|
||||
href, etag = s.upload(get_item())
|
||||
href, etag = await s.upload(get_item())
|
||||
if etag is None:
|
||||
_, etag = s.get(href)
|
||||
_, etag = await s.get(href)
|
||||
info[href] = etag
|
||||
|
||||
assert dict(
|
||||
(href, etag) for href, item, etag
|
||||
in s.get_multi(href for href, etag in info.items())
|
||||
) == info
|
||||
items = await aiostream.stream.list(
|
||||
s.get_multi(href for href, etag in info.items())
|
||||
)
|
||||
assert {href: etag for href, item, etag in items} == info
|
||||
|
||||
def test_repr(self, s, get_storage_args):
|
||||
def test_repr(self, s):
|
||||
assert self.storage_class.__name__ in repr(s)
|
||||
assert s.instance_name is None
|
||||
|
||||
def test_discover(self, requires_collections, get_storage_args, get_item):
|
||||
@pytest.mark.asyncio
|
||||
async def test_discover(
|
||||
self,
|
||||
requires_collections,
|
||||
get_storage_args,
|
||||
get_item,
|
||||
aio_connector,
|
||||
):
|
||||
collections = set()
|
||||
for i in range(1, 5):
|
||||
collection = 'test{}'.format(i)
|
||||
s = self.storage_class(**get_storage_args(collection=collection))
|
||||
assert not list(s.list())
|
||||
s.upload(get_item())
|
||||
collection = f"test{i}"
|
||||
s = self.storage_class(**await get_storage_args(collection=collection))
|
||||
assert not await aiostream.stream.list(s.list())
|
||||
await s.upload(get_item())
|
||||
collections.add(s.collection)
|
||||
|
||||
actual = set(
|
||||
c['collection'] for c in
|
||||
self.storage_class.discover(**get_storage_args(collection=None))
|
||||
discovered = await aiostream.stream.list(
|
||||
self.storage_class.discover(**await get_storage_args(collection=None))
|
||||
)
|
||||
actual = {c["collection"] for c in discovered}
|
||||
|
||||
assert actual >= collections
|
||||
|
||||
def test_create_collection(self, requires_collections, get_storage_args,
|
||||
get_item):
|
||||
if getattr(self, 'dav_server', '') in \
|
||||
('icloud', 'fastmail', 'davical'):
|
||||
pytest.skip('Manual cleanup would be necessary.')
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_collection(
|
||||
self,
|
||||
requires_collections,
|
||||
get_storage_args,
|
||||
get_item,
|
||||
):
|
||||
if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"):
|
||||
pytest.skip("Manual cleanup would be necessary.")
|
||||
if getattr(self, "dav_server", "") == "radicale":
|
||||
pytest.skip("Radicale does not support collection creation")
|
||||
|
||||
args = get_storage_args(collection=None)
|
||||
args['collection'] = 'test'
|
||||
args = await get_storage_args(collection=None)
|
||||
args["collection"] = "test"
|
||||
|
||||
s = self.storage_class(
|
||||
**self.storage_class.create_collection(**args)
|
||||
s = self.storage_class(**await self.storage_class.create_collection(**args))
|
||||
|
||||
href = (await s.upload(get_item()))[0]
|
||||
assert href in await aiostream.stream.list(
|
||||
(href async for href, etag in s.list())
|
||||
)
|
||||
|
||||
href = s.upload(get_item())[0]
|
||||
assert href in set(href for href, etag in s.list())
|
||||
|
||||
def test_discover_collection_arg(self, requires_collections,
|
||||
get_storage_args):
|
||||
args = get_storage_args(collection='test2')
|
||||
@pytest.mark.asyncio
|
||||
async def test_discover_collection_arg(
|
||||
self, requires_collections, get_storage_args
|
||||
):
|
||||
args = await get_storage_args(collection="test2")
|
||||
with pytest.raises(TypeError) as excinfo:
|
||||
list(self.storage_class.discover(**args))
|
||||
await aiostream.stream.list(self.storage_class.discover(**args))
|
||||
|
||||
assert 'collection argument must not be given' in str(excinfo.value)
|
||||
|
||||
def test_collection_arg(self, get_storage_args):
|
||||
if self.storage_class.storage_name.startswith('etesync'):
|
||||
pytest.skip('etesync uses UUIDs.')
|
||||
assert "collection argument must not be given" in str(excinfo.value)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_collection_arg(self, get_storage_args):
|
||||
if self.supports_collections:
|
||||
s = self.storage_class(**get_storage_args(collection='test2'))
|
||||
s = self.storage_class(**await get_storage_args(collection="test2"))
|
||||
# Can't do stronger assertion because of radicale, which needs a
|
||||
# fileextension to guess the collection type.
|
||||
assert 'test2' in s.collection
|
||||
assert "test2" in s.collection
|
||||
else:
|
||||
with pytest.raises(ValueError):
|
||||
self.storage_class(collection='ayy', **get_storage_args())
|
||||
self.storage_class(collection="ayy", **await get_storage_args())
|
||||
|
||||
def test_case_sensitive_uids(self, s, get_item):
|
||||
if s.storage_name == 'filesystem':
|
||||
pytest.skip('Behavior depends on the filesystem.')
|
||||
@pytest.mark.asyncio
|
||||
async def test_case_sensitive_uids(self, s, get_item):
|
||||
if s.storage_name == "filesystem":
|
||||
pytest.skip("Behavior depends on the filesystem.")
|
||||
|
||||
uid = str(uuid.uuid4())
|
||||
s.upload(get_item(uid=uid.upper()))
|
||||
s.upload(get_item(uid=uid.lower()))
|
||||
items = list(href for href, etag in s.list())
|
||||
await s.upload(get_item(uid=uid.upper()))
|
||||
await s.upload(get_item(uid=uid.lower()))
|
||||
items = [href async for href, etag in s.list()]
|
||||
assert len(items) == 2
|
||||
assert len(set(items)) == 2
|
||||
|
||||
def test_specialchars(self, monkeypatch, requires_collections,
|
||||
get_storage_args, get_item):
|
||||
if getattr(self, 'dav_server', '') == 'radicale':
|
||||
pytest.skip('Radicale is fundamentally broken.')
|
||||
if getattr(self, 'dav_server', '') in ('icloud', 'fastmail'):
|
||||
pytest.skip('iCloud and FastMail reject this name.')
|
||||
@pytest.mark.asyncio
|
||||
async def test_specialchars(
|
||||
self, monkeypatch, requires_collections, get_storage_args, get_item
|
||||
):
|
||||
if getattr(self, "dav_server", "") in ("icloud", "fastmail"):
|
||||
pytest.skip("iCloud and FastMail reject this name.")
|
||||
|
||||
monkeypatch.setattr('vdirsyncer.utils.generate_href', lambda x: x)
|
||||
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
||||
|
||||
uid = u'test @ foo ät bar град сатану'
|
||||
collection = 'test @ foo ät bar'
|
||||
uid = "test @ foo ät bar град сатану"
|
||||
collection = "test @ foo ät bar"
|
||||
|
||||
s = self.storage_class(**get_storage_args(collection=collection))
|
||||
s = self.storage_class(**await get_storage_args(collection=collection))
|
||||
item = get_item(uid=uid)
|
||||
|
||||
href, etag = s.upload(item)
|
||||
item2, etag2 = s.get(href)
|
||||
href, etag = await s.upload(item)
|
||||
item2, etag2 = await s.get(href)
|
||||
if etag is not None:
|
||||
assert etag2 == etag
|
||||
assert_item_equals(item2, item)
|
||||
|
||||
(_, etag3), = s.list()
|
||||
((_, etag3),) = await aiostream.stream.list(s.list())
|
||||
assert etag2 == etag3
|
||||
|
||||
# etesync uses UUIDs for collection names
|
||||
if self.storage_class.storage_name.startswith('etesync'):
|
||||
return
|
||||
|
||||
assert collection in urlunquote(s.collection)
|
||||
if self.storage_class.storage_name.endswith('dav'):
|
||||
assert urlquote(uid, '/@:') in href
|
||||
if self.storage_class.storage_name.endswith("dav"):
|
||||
assert urlquote(uid, "/@:") in href
|
||||
|
||||
def test_metadata(self, requires_metadata, s):
|
||||
if not getattr(self, 'dav_server', ''):
|
||||
assert not s.get_meta('color')
|
||||
assert not s.get_meta('displayname')
|
||||
@pytest.mark.asyncio
|
||||
async def test_newline_in_uid(
|
||||
self, monkeypatch, requires_collections, get_storage_args, get_item
|
||||
):
|
||||
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
||||
|
||||
uid = "UID:20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%0A.ics"
|
||||
|
||||
s = self.storage_class(**await get_storage_args())
|
||||
item = get_item(uid=uid)
|
||||
|
||||
href, etag = await s.upload(item)
|
||||
item2, etag2 = await s.get(href)
|
||||
if etag is not None:
|
||||
assert etag2 == etag
|
||||
assert_item_equals(item2, item)
|
||||
|
||||
((_, etag3),) = await aiostream.stream.list(s.list())
|
||||
assert etag2 == etag3
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_empty_metadata(self, requires_metadata, s):
|
||||
if getattr(self, "dav_server", ""):
|
||||
pytest.skip()
|
||||
|
||||
assert await s.get_meta("color") is None
|
||||
assert await s.get_meta("displayname") is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_metadata(self, requires_metadata, s):
|
||||
if getattr(self, "dav_server", "") == "xandikos":
|
||||
pytest.skip("xandikos does not support removing metadata.")
|
||||
|
||||
try:
|
||||
s.set_meta('color', None)
|
||||
assert not s.get_meta('color')
|
||||
s.set_meta('color', u'#ff0000')
|
||||
assert s.get_meta('color') == u'#ff0000'
|
||||
await s.set_meta("color", None)
|
||||
assert await s.get_meta("color") is None
|
||||
await s.set_meta("color", "#ff0000")
|
||||
assert await s.get_meta("color") == "#ff0000"
|
||||
except exceptions.UnsupportedMetadataError:
|
||||
pass
|
||||
|
||||
for x in (u'hello world', u'hello wörld'):
|
||||
s.set_meta('displayname', x)
|
||||
rv = s.get_meta('displayname')
|
||||
@pytest.mark.asyncio
|
||||
async def test_encoding_metadata(self, requires_metadata, s):
|
||||
for x in ("hello world", "hello wörld"):
|
||||
await s.set_meta("displayname", x)
|
||||
rv = await s.get_meta("displayname")
|
||||
assert rv == x
|
||||
assert isinstance(rv, str)
|
||||
|
||||
@given(value=st.one_of(
|
||||
st.none(),
|
||||
printable_characters_strategy
|
||||
))
|
||||
def test_metadata_normalization(self, requires_metadata, s, value):
|
||||
x = s.get_meta('displayname')
|
||||
@pytest.mark.parametrize(
|
||||
"value",
|
||||
[
|
||||
None,
|
||||
"",
|
||||
"Hello there!",
|
||||
"Österreich",
|
||||
"中国",
|
||||
"한글",
|
||||
"42a4ec99-b1c2-4859-b142-759112f2ca50",
|
||||
"فلسطين",
|
||||
],
|
||||
)
|
||||
@pytest.mark.asyncio
|
||||
async def test_metadata_normalization(self, requires_metadata, s, value):
|
||||
x = await s.get_meta("displayname")
|
||||
assert x == normalize_meta_value(x)
|
||||
|
||||
if not getattr(self, 'dav_server', None):
|
||||
if not getattr(self, "dav_server", None):
|
||||
# ownCloud replaces "" with "unnamed"
|
||||
s.set_meta('displayname', value)
|
||||
assert s.get_meta('displayname') == normalize_meta_value(value)
|
||||
await s.set_meta("displayname", value)
|
||||
assert await s.get_meta("displayname") == normalize_meta_value(value)
|
||||
|
||||
def test_recurring_events(self, s, item_type):
|
||||
if item_type != 'VEVENT':
|
||||
pytest.skip('This storage instance doesn\'t support iCalendar.')
|
||||
@pytest.mark.asyncio
|
||||
async def test_recurring_events(self, s, item_type):
|
||||
if item_type != "VEVENT":
|
||||
pytest.skip("This storage instance doesn't support iCalendar.")
|
||||
|
||||
uid = str(uuid.uuid4())
|
||||
item = Item(textwrap.dedent(u'''
|
||||
item = Item(
|
||||
textwrap.dedent(
|
||||
f"""
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
BEGIN:VEVENT
|
||||
|
|
@ -336,7 +405,7 @@ class StorageTests(object):
|
|||
BEGIN:VEVENT
|
||||
DTSTART;TZID=UTC:20140128T083000Z
|
||||
DTEND;TZID=UTC:20140128T100000Z
|
||||
RRULE:FREQ=WEEKLY;UNTIL=20141208T213000Z;BYDAY=TU
|
||||
RRULE:FREQ=WEEKLY;BYDAY=TU;UNTIL=20141208T213000Z
|
||||
DTSTAMP:20140327T060506Z
|
||||
UID:{uid}
|
||||
CREATED:20131216T033331Z
|
||||
|
|
@ -349,9 +418,11 @@ class StorageTests(object):
|
|||
TRANSP:OPAQUE
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
'''.format(uid=uid)).strip())
|
||||
"""
|
||||
).strip()
|
||||
)
|
||||
|
||||
href, etag = s.upload(item)
|
||||
href, _etag = await s.upload(item)
|
||||
|
||||
item2, etag2 = s.get(href)
|
||||
item2, _etag2 = await s.get(href)
|
||||
assert normalize_item(item) == normalize_item(item2)
|
||||
|
|
|
|||
|
|
@ -1,36 +1,116 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pytest
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import contextlib
|
||||
import subprocess
|
||||
import time
|
||||
import uuid
|
||||
|
||||
import aiostream
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
import requests
|
||||
|
||||
@pytest.fixture
|
||||
def slow_create_collection(request):
|
||||
|
||||
def wait_for_container(url):
|
||||
"""Wait for a container to initialise.
|
||||
|
||||
Polls a URL every 100ms until the server responds.
|
||||
"""
|
||||
# give the server 5 seconds to settle
|
||||
for _ in range(50):
|
||||
print(_)
|
||||
|
||||
try:
|
||||
response = requests.get(url)
|
||||
response.raise_for_status()
|
||||
except requests.ConnectionError:
|
||||
pass
|
||||
else:
|
||||
return
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
pytest.exit(
|
||||
"Server did not initialise in 5 seconds.\n"
|
||||
"WARNING: There may be a stale docker container still running."
|
||||
)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def dockerised_server(name, container_port, exposed_port):
|
||||
"""Run a dockerised DAV server as a contenxt manager."""
|
||||
container_id = None
|
||||
url = f"http://127.0.0.1:{exposed_port}/"
|
||||
|
||||
try:
|
||||
# Hint: This will block while the pull happends, and only return once
|
||||
# the container has actually started.
|
||||
output = subprocess.check_output(
|
||||
[
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"--detach",
|
||||
"--publish",
|
||||
f"{exposed_port}:{container_port}",
|
||||
f"whynothugo/vdirsyncer-devkit-{name}",
|
||||
]
|
||||
)
|
||||
|
||||
container_id = output.decode().strip()
|
||||
wait_for_container(url)
|
||||
|
||||
yield url
|
||||
finally:
|
||||
if container_id:
|
||||
subprocess.check_output(["docker", "kill", container_id])
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def baikal_server():
|
||||
with dockerised_server("baikal", "80", "8002"):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def radicale_server():
|
||||
with dockerised_server("radicale", "8001", "8001"):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def xandikos_server():
|
||||
with dockerised_server("xandikos", "8000", "8000"):
|
||||
yield
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
async def slow_create_collection(request, aio_connector):
|
||||
# We need to properly clean up because otherwise we might run into
|
||||
# storage limits.
|
||||
to_delete = []
|
||||
|
||||
def delete_collections():
|
||||
for s in to_delete:
|
||||
s.session.request('DELETE', '')
|
||||
async def inner(cls: type, args: dict, collection_name: str) -> dict:
|
||||
"""Create a collection
|
||||
|
||||
request.addfinalizer(delete_collections)
|
||||
Returns args necessary to create a Storage instance pointing to it.
|
||||
"""
|
||||
assert collection_name.startswith("test")
|
||||
|
||||
def inner(cls, args, collection):
|
||||
assert collection.startswith('test')
|
||||
collection += '-vdirsyncer-ci-' + str(uuid.uuid4())
|
||||
# Make each name unique
|
||||
collection_name = f"{collection_name}-vdirsyncer-ci-{uuid.uuid4()}"
|
||||
|
||||
args = cls.create_collection(collection, **args)
|
||||
s = cls(**args)
|
||||
_clear_collection(s)
|
||||
assert not list(s.list())
|
||||
to_delete.append(s)
|
||||
# Create the collection:
|
||||
args = await cls.create_collection(collection_name, **args)
|
||||
collection = cls(**args)
|
||||
|
||||
# Keep collection in a list to be deleted once tests end:
|
||||
to_delete.append(collection)
|
||||
|
||||
assert not await aiostream.stream.list(collection.list())
|
||||
return args
|
||||
|
||||
return inner
|
||||
yield inner
|
||||
|
||||
|
||||
def _clear_collection(s):
|
||||
for href, etag in s.list():
|
||||
s.delete(href, etag)
|
||||
await asyncio.gather(*(c.session.request("DELETE", "") for c in to_delete))
|
||||
|
|
|
|||
|
|
@ -1,56 +1,53 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import uuid
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import uuid
|
||||
|
||||
import aiohttp
|
||||
import aiostream
|
||||
import pytest
|
||||
|
||||
import requests
|
||||
import requests.exceptions
|
||||
|
||||
from tests import assert_item_equals
|
||||
|
||||
from tests.storage import StorageTests
|
||||
from tests.storage import get_server_mixin
|
||||
from vdirsyncer import exceptions
|
||||
from vdirsyncer.vobject import Item
|
||||
|
||||
from .. import StorageTests, get_server_mixin
|
||||
|
||||
|
||||
dav_server = os.environ.get('DAV_SERVER', 'skip')
|
||||
dav_server = os.environ.get("DAV_SERVER", "skip")
|
||||
ServerMixin = get_server_mixin(dav_server)
|
||||
|
||||
|
||||
class DAVStorageTests(ServerMixin, StorageTests):
|
||||
dav_server = dav_server
|
||||
|
||||
@pytest.mark.skipif(dav_server == 'radicale',
|
||||
reason='Radicale is very tolerant.')
|
||||
def test_dav_broken_item(self, s):
|
||||
item = Item(u'HAHA:YES')
|
||||
with pytest.raises((exceptions.Error, requests.exceptions.HTTPError)):
|
||||
s.upload(item)
|
||||
assert not list(s.list())
|
||||
@pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.")
|
||||
@pytest.mark.asyncio
|
||||
async def test_dav_broken_item(self, s):
|
||||
item = Item("HAHA:YES")
|
||||
with pytest.raises((exceptions.Error, aiohttp.ClientResponseError)):
|
||||
await s.upload(item)
|
||||
assert not await aiostream.stream.list(s.list())
|
||||
|
||||
def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
||||
@pytest.mark.asyncio
|
||||
async def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
||||
def breakdown(*a, **kw):
|
||||
raise AssertionError('Expected not to be called.')
|
||||
raise AssertionError("Expected not to be called.")
|
||||
|
||||
monkeypatch.setattr('requests.sessions.Session.request', breakdown)
|
||||
monkeypatch.setattr("requests.sessions.Session.request", breakdown)
|
||||
|
||||
try:
|
||||
assert list(s.get_multi([])) == []
|
||||
assert list(await aiostream.stream.list(s.get_multi([]))) == []
|
||||
finally:
|
||||
# Make sure monkeypatch doesn't interfere with DAV server teardown
|
||||
monkeypatch.undo()
|
||||
|
||||
def test_dav_unicode_href(self, s, get_item, monkeypatch):
|
||||
if self.dav_server == 'radicale':
|
||||
pytest.skip('Radicale is unable to deal with unicode hrefs')
|
||||
@pytest.mark.asyncio
|
||||
async def test_dav_unicode_href(self, s, get_item, monkeypatch):
|
||||
if self.dav_server == "radicale":
|
||||
pytest.skip("Radicale is unable to deal with unicode hrefs")
|
||||
|
||||
monkeypatch.setattr(s, '_get_href',
|
||||
lambda item: item.ident + s.fileext)
|
||||
item = get_item(uid=u'град сатану' + str(uuid.uuid4()))
|
||||
href, etag = s.upload(item)
|
||||
item2, etag2 = s.get(href)
|
||||
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
|
||||
item = get_item(uid="град сатану" + str(uuid.uuid4()))
|
||||
href, _etag = await s.upload(item)
|
||||
item2, _etag2 = await s.get(href)
|
||||
assert_item_equals(item, item2)
|
||||
|
|
|
|||
|
|
@ -1,50 +1,60 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import datetime
|
||||
from textwrap import dedent
|
||||
|
||||
import aiohttp
|
||||
import aiostream
|
||||
import pytest
|
||||
from aioresponses import aioresponses
|
||||
|
||||
import requests
|
||||
import requests.exceptions
|
||||
|
||||
from tests import EVENT_TEMPLATE, TASK_TEMPLATE, VCARD_TEMPLATE
|
||||
|
||||
from tests import EVENT_TEMPLATE
|
||||
from tests import TASK_TEMPLATE
|
||||
from tests import VCARD_TEMPLATE
|
||||
from tests.storage import format_item
|
||||
from vdirsyncer import exceptions
|
||||
from vdirsyncer.storage.dav import CalDAVStorage
|
||||
|
||||
from . import DAVStorageTests, dav_server
|
||||
from .. import format_item
|
||||
from . import DAVStorageTests
|
||||
from . import dav_server
|
||||
|
||||
|
||||
class TestCalDAVStorage(DAVStorageTests):
|
||||
storage_class = CalDAVStorage
|
||||
|
||||
@pytest.fixture(params=['VTODO', 'VEVENT'])
|
||||
@pytest.fixture(params=["VTODO", "VEVENT"])
|
||||
def item_type(self, request):
|
||||
return request.param
|
||||
|
||||
def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
||||
s = self.storage_class(item_types=(item_type,), **get_storage_args())
|
||||
@pytest.mark.asyncio
|
||||
async def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
||||
s = self.storage_class(item_types=(item_type,), **await get_storage_args())
|
||||
|
||||
try:
|
||||
s.upload(format_item(VCARD_TEMPLATE))
|
||||
except (exceptions.Error, requests.exceptions.HTTPError):
|
||||
pass
|
||||
assert not list(s.list())
|
||||
# Most storages hard-fail, but xandikos doesn't.
|
||||
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError):
|
||||
await s.upload(format_item(VCARD_TEMPLATE))
|
||||
|
||||
assert not await aiostream.stream.list(s.list())
|
||||
|
||||
# The `arg` param is not named `item_types` because that would hit
|
||||
# https://bitbucket.org/pytest-dev/pytest/issue/745/
|
||||
@pytest.mark.parametrize('arg,calls_num', [
|
||||
(('VTODO',), 1),
|
||||
(('VEVENT',), 1),
|
||||
(('VTODO', 'VEVENT'), 2),
|
||||
(('VTODO', 'VEVENT', 'VJOURNAL'), 3),
|
||||
((), 1)
|
||||
])
|
||||
def test_item_types_performance(self, get_storage_args, arg, calls_num,
|
||||
monkeypatch):
|
||||
s = self.storage_class(item_types=arg, **get_storage_args())
|
||||
@pytest.mark.parametrize(
|
||||
("arg", "calls_num"),
|
||||
[
|
||||
(("VTODO",), 1),
|
||||
(("VEVENT",), 1),
|
||||
(("VTODO", "VEVENT"), 2),
|
||||
(("VTODO", "VEVENT", "VJOURNAL"), 3),
|
||||
((), 1),
|
||||
],
|
||||
)
|
||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
||||
@pytest.mark.asyncio
|
||||
async def test_item_types_performance(
|
||||
self, get_storage_args, arg, calls_num, monkeypatch
|
||||
):
|
||||
s = self.storage_class(item_types=arg, **await get_storage_args())
|
||||
old_parse = s._parse_prop_responses
|
||||
calls = []
|
||||
|
||||
|
|
@ -52,19 +62,24 @@ class TestCalDAVStorage(DAVStorageTests):
|
|||
calls.append(None)
|
||||
return old_parse(*a, **kw)
|
||||
|
||||
monkeypatch.setattr(s, '_parse_prop_responses', new_parse)
|
||||
list(s.list())
|
||||
monkeypatch.setattr(s, "_parse_prop_responses", new_parse)
|
||||
await aiostream.stream.list(s.list())
|
||||
assert len(calls) == calls_num
|
||||
|
||||
@pytest.mark.xfail(dav_server == 'radicale',
|
||||
reason='Radicale doesn\'t support timeranges.')
|
||||
def test_timerange_correctness(self, get_storage_args):
|
||||
@pytest.mark.xfail(
|
||||
dav_server == "radicale", reason="Radicale doesn't support timeranges."
|
||||
)
|
||||
@pytest.mark.asyncio
|
||||
async def test_timerange_correctness(self, get_storage_args):
|
||||
start_date = datetime.datetime(2013, 9, 10)
|
||||
end_date = datetime.datetime(2013, 9, 13)
|
||||
s = self.storage_class(start_date=start_date, end_date=end_date,
|
||||
**get_storage_args())
|
||||
s = self.storage_class(
|
||||
start_date=start_date, end_date=end_date, **await get_storage_args()
|
||||
)
|
||||
|
||||
too_old_item = format_item(dedent(u'''
|
||||
too_old_item = format_item(
|
||||
dedent(
|
||||
"""
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||
|
|
@ -76,9 +91,13 @@ class TestCalDAVStorage(DAVStorageTests):
|
|||
UID:{r}
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
''').strip())
|
||||
"""
|
||||
).strip()
|
||||
)
|
||||
|
||||
too_new_item = format_item(dedent(u'''
|
||||
too_new_item = format_item(
|
||||
dedent(
|
||||
"""
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||
|
|
@ -90,9 +109,13 @@ class TestCalDAVStorage(DAVStorageTests):
|
|||
UID:{r}
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
''').strip())
|
||||
"""
|
||||
).strip()
|
||||
)
|
||||
|
||||
good_item = format_item(dedent(u'''
|
||||
good_item = format_item(
|
||||
dedent(
|
||||
"""
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||
|
|
@ -104,49 +127,48 @@ class TestCalDAVStorage(DAVStorageTests):
|
|||
UID:{r}
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
''').strip())
|
||||
"""
|
||||
).strip()
|
||||
)
|
||||
|
||||
s.upload(too_old_item)
|
||||
s.upload(too_new_item)
|
||||
expected_href, _ = s.upload(good_item)
|
||||
await s.upload(too_old_item)
|
||||
await s.upload(too_new_item)
|
||||
expected_href, _ = await s.upload(good_item)
|
||||
|
||||
(actual_href, _), = s.list()
|
||||
((actual_href, _),) = await aiostream.stream.list(s.list())
|
||||
assert actual_href == expected_href
|
||||
|
||||
def test_invalid_resource(self, monkeypatch, get_storage_args):
|
||||
calls = []
|
||||
args = get_storage_args(collection=None)
|
||||
@pytest.mark.asyncio
|
||||
async def test_invalid_resource(self, monkeypatch, get_storage_args):
|
||||
args = await get_storage_args(collection=None)
|
||||
|
||||
def request(session, method, url, **kwargs):
|
||||
assert url == args['url']
|
||||
calls.append(None)
|
||||
with aioresponses() as m:
|
||||
m.add(args["url"], method="PROPFIND", status=200, body="Hello world")
|
||||
|
||||
r = requests.Response()
|
||||
r.status_code = 200
|
||||
r._content = b'Hello World.'
|
||||
return r
|
||||
with pytest.raises(ValueError):
|
||||
s = self.storage_class(**args)
|
||||
await aiostream.stream.list(s.list())
|
||||
|
||||
monkeypatch.setattr('requests.sessions.Session.request', request)
|
||||
assert len(m.requests) == 1
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
s = self.storage_class(**args)
|
||||
list(s.list())
|
||||
assert len(calls) == 1
|
||||
@pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT")
|
||||
@pytest.mark.skipif(
|
||||
dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs."
|
||||
)
|
||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
||||
@pytest.mark.asyncio
|
||||
async def test_item_types_general(self, s):
|
||||
event = (await s.upload(format_item(EVENT_TEMPLATE)))[0]
|
||||
task = (await s.upload(format_item(TASK_TEMPLATE)))[0]
|
||||
s.item_types = ("VTODO", "VEVENT")
|
||||
|
||||
@pytest.mark.skipif(dav_server == 'icloud',
|
||||
reason='iCloud only accepts VEVENT')
|
||||
def test_item_types_general(self, s):
|
||||
event = s.upload(format_item(EVENT_TEMPLATE))[0]
|
||||
task = s.upload(format_item(TASK_TEMPLATE))[0]
|
||||
s.item_types = ('VTODO', 'VEVENT')
|
||||
async def hrefs():
|
||||
return {href async for href, etag in s.list()}
|
||||
|
||||
def l():
|
||||
return set(href for href, etag in s.list())
|
||||
|
||||
assert l() == {event, task}
|
||||
s.item_types = ('VTODO',)
|
||||
assert l() == {task}
|
||||
s.item_types = ('VEVENT',)
|
||||
assert l() == {event}
|
||||
assert await hrefs() == {event, task}
|
||||
s.item_types = ("VTODO",)
|
||||
assert await hrefs() == {task}
|
||||
s.item_types = ("VEVENT",)
|
||||
assert await hrefs() == {event}
|
||||
s.item_types = ()
|
||||
assert l() == {event, task}
|
||||
assert await hrefs() == {event, task}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -10,6 +10,6 @@ from . import DAVStorageTests
|
|||
class TestCardDAVStorage(DAVStorageTests):
|
||||
storage_class = CardDAVStorage
|
||||
|
||||
@pytest.fixture(params=['VCARD'])
|
||||
@pytest.fixture(params=["VCARD"])
|
||||
def item_type(self, request):
|
||||
return request.param
|
||||
|
|
|
|||
|
|
@ -1,40 +1,59 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from vdirsyncer.storage.dav import _BAD_XML_CHARS, _merge_xml, _parse_xml
|
||||
from vdirsyncer.storage.dav import _BAD_XML_CHARS
|
||||
from vdirsyncer.storage.dav import _merge_xml
|
||||
from vdirsyncer.storage.dav import _normalize_href
|
||||
from vdirsyncer.storage.dav import _parse_xml
|
||||
|
||||
|
||||
def test_xml_utilities():
|
||||
x = _parse_xml(b'''<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<D:multistatus xmlns:D="DAV:">
|
||||
<D:response>
|
||||
<D:propstat>
|
||||
<D:status>HTTP/1.1 404 Not Found</D:status>
|
||||
<D:prop>
|
||||
<D:getcontenttype/>
|
||||
</D:prop>
|
||||
</D:propstat>
|
||||
<D:propstat>
|
||||
<D:prop>
|
||||
<D:resourcetype>
|
||||
<D:collection/>
|
||||
</D:resourcetype>
|
||||
</D:prop>
|
||||
</D:propstat>
|
||||
</D:response>
|
||||
</D:multistatus>
|
||||
''')
|
||||
x = _parse_xml(
|
||||
b"""<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<multistatus xmlns="DAV:">
|
||||
<response>
|
||||
<propstat>
|
||||
<status>HTTP/1.1 404 Not Found</status>
|
||||
<prop>
|
||||
<getcontenttype/>
|
||||
</prop>
|
||||
</propstat>
|
||||
<propstat>
|
||||
<prop>
|
||||
<resourcetype>
|
||||
<collection/>
|
||||
</resourcetype>
|
||||
</prop>
|
||||
</propstat>
|
||||
</response>
|
||||
</multistatus>
|
||||
"""
|
||||
)
|
||||
|
||||
response = x.find('{DAV:}response')
|
||||
props = _merge_xml(response.findall('{DAV:}propstat/{DAV:}prop'))
|
||||
assert props.find('{DAV:}resourcetype/{DAV:}collection') is not None
|
||||
assert props.find('{DAV:}getcontenttype') is not None
|
||||
response = x.find("{DAV:}response")
|
||||
props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop"))
|
||||
assert props.find("{DAV:}resourcetype/{DAV:}collection") is not None
|
||||
assert props.find("{DAV:}getcontenttype") is not None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('char', range(32))
|
||||
@pytest.mark.parametrize("char", range(32))
|
||||
def test_xml_specialchars(char):
|
||||
x = _parse_xml('<?xml version="1.0" encoding="UTF-8" ?>'
|
||||
'<foo>ye{}s\r\n'
|
||||
'hello</foo>'.format(chr(char)).encode('ascii'))
|
||||
x = _parse_xml(
|
||||
'<?xml version="1.0" encoding="UTF-8" ?>'
|
||||
f"<foo>ye{chr(char)}s\r\n"
|
||||
"hello</foo>".encode("ascii")
|
||||
)
|
||||
|
||||
if char in _BAD_XML_CHARS:
|
||||
assert x.text == 'yes\nhello'
|
||||
assert x.text == "yes\nhello"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"href",
|
||||
[
|
||||
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
|
||||
],
|
||||
)
|
||||
def test_normalize_href(href):
|
||||
assert href == _normalize_href("https://example.com", href)
|
||||
|
|
|
|||
Binary file not shown.
|
|
@ -1,124 +0,0 @@
|
|||
"""
|
||||
Django settings for etesync_server project.
|
||||
|
||||
Generated by 'django-admin startproject' using Django 1.10.6.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/1.10/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/1.10/ref/settings/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
|
||||
# Quick-start development settings - unsuitable for production
|
||||
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = 'd7r(p-9=$3a@bbt%*+$p@4)cej13nzd0gmnt8+m0bitb=-umj#'
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = True
|
||||
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
|
||||
# Application definition
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'django.contrib.admin',
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.sessions',
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
'rest_framework',
|
||||
'rest_framework.authtoken',
|
||||
'journal.apps.JournalConfig',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'etesync_server.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = 'etesync_server.wsgi.application'
|
||||
|
||||
|
||||
# Database
|
||||
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
|
||||
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.environ.get('ETESYNC_DB_PATH',
|
||||
os.path.join(BASE_DIR, 'db.sqlite3')),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/1.10/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
TIME_ZONE = 'UTC'
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/1.10/howto/static-files/
|
||||
|
||||
STATIC_URL = '/static/'
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
"""etesync_server URL Configuration
|
||||
|
||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
||||
https://docs.djangoproject.com/en/1.10/topics/http/urls/
|
||||
Examples:
|
||||
Function views
|
||||
1. Add an import: from my_app import views
|
||||
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
|
||||
Class-based views
|
||||
1. Add an import: from other_app.views import Home
|
||||
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
|
||||
Including another URLconf
|
||||
1. Import the include() function: from django.conf.urls import url, include
|
||||
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
|
||||
"""
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from rest_framework_nested import routers
|
||||
|
||||
from journal import views
|
||||
|
||||
router = routers.DefaultRouter()
|
||||
router.register(r'journals', views.JournalViewSet)
|
||||
router.register(r'journal/(?P<journal_uid>[^/]+)', views.EntryViewSet)
|
||||
router.register(r'user', views.UserInfoViewSet)
|
||||
|
||||
journals_router = routers.NestedSimpleRouter(router, r'journals',
|
||||
lookup='journal')
|
||||
journals_router.register(r'members', views.MembersViewSet,
|
||||
base_name='journal-members')
|
||||
journals_router.register(r'entries', views.EntryViewSet,
|
||||
base_name='journal-entries')
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^api/v1/', include(router.urls)),
|
||||
url(r'^api/v1/', include(journals_router.urls)),
|
||||
]
|
||||
|
||||
# Adding this just for testing, this shouldn't be here normally
|
||||
urlpatterns += url(r'^reset/$', views.reset, name='reset_debug'),
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
"""
|
||||
WSGI config for etesync_server project.
|
||||
|
||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings")
|
||||
|
||||
application = get_wsgi_application()
|
||||
|
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings")
|
||||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError:
|
||||
# The above import may fail for some other reason. Ensure that the
|
||||
# issue is really that Django is missing to avoid masking other
|
||||
# exceptions on Python 2.
|
||||
try:
|
||||
import django # noqa
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
)
|
||||
raise
|
||||
execute_from_command_line(sys.argv)
|
||||
|
|
@ -1 +0,0 @@
|
|||
63ae6eec45b592d5c511f79b7b0c312d2c5f7d6a
|
||||
Binary file not shown.
|
|
@ -1,92 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import shutil
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from vdirsyncer.storage.etesync import EtesyncContacts, EtesyncCalendars
|
||||
|
||||
from .. import StorageTests
|
||||
|
||||
|
||||
pytestmark = pytest.mark.skipif(os.getenv('ETESYNC_TESTS', '') != 'true',
|
||||
reason='etesync tests disabled')
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def etesync_app(tmpdir_factory):
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__),
|
||||
'etesync_server'))
|
||||
|
||||
db = tmpdir_factory.mktemp('etesync').join('etesync.sqlite')
|
||||
shutil.copy(
|
||||
os.path.join(os.path.dirname(__file__), 'etesync_server',
|
||||
'db.sqlite3'),
|
||||
str(db)
|
||||
)
|
||||
|
||||
os.environ['ETESYNC_DB_PATH'] = str(db)
|
||||
from etesync_server.wsgi import application
|
||||
return application
|
||||
|
||||
|
||||
class EtesyncTests(StorageTests):
|
||||
|
||||
supports_metadata = False
|
||||
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, request, get_item, tmpdir, etesync_app):
|
||||
import wsgi_intercept
|
||||
import wsgi_intercept.requests_intercept
|
||||
wsgi_intercept.requests_intercept.install()
|
||||
wsgi_intercept.add_wsgi_intercept('127.0.0.1', 8000,
|
||||
lambda: etesync_app)
|
||||
|
||||
def teardown():
|
||||
wsgi_intercept.remove_wsgi_intercept('127.0.0.1', 8000)
|
||||
wsgi_intercept.requests_intercept.uninstall()
|
||||
|
||||
request.addfinalizer(teardown)
|
||||
|
||||
with open(os.path.join(os.path.dirname(__file__),
|
||||
'test@localhost/auth_token')) as f:
|
||||
token = f.read().strip()
|
||||
headers = {'Authorization': 'Token ' + token}
|
||||
r = requests.post('http://127.0.0.1:8000/reset/', headers=headers,
|
||||
allow_redirects=False)
|
||||
assert r.status_code == 200
|
||||
|
||||
def inner(collection='test'):
|
||||
rv = {
|
||||
'email': 'test@localhost',
|
||||
'db_path': str(tmpdir.join('etesync.db')),
|
||||
'secrets_dir': os.path.dirname(__file__),
|
||||
'server_url': 'http://127.0.0.1:8000/'
|
||||
}
|
||||
if collection is not None:
|
||||
rv = self.storage_class.create_collection(
|
||||
collection=collection,
|
||||
**rv
|
||||
)
|
||||
return rv
|
||||
return inner
|
||||
|
||||
|
||||
class TestContacts(EtesyncTests):
|
||||
storage_class = EtesyncContacts
|
||||
|
||||
@pytest.fixture(params=['VCARD'])
|
||||
def item_type(self, request):
|
||||
return request.param
|
||||
|
||||
|
||||
class TestCalendars(EtesyncTests):
|
||||
storage_class = EtesyncCalendars
|
||||
|
||||
@pytest.fixture(params=['VEVENT'])
|
||||
def item_type(self, request):
|
||||
return request.param
|
||||
|
|
@ -1 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit 6c8c379f1ee8bf4ab0ac54fc4eec3e4a6349c237
|
||||
38
tests/storage/servers/baikal/__init__.py
Normal file
38
tests/storage/servers/baikal/__init__.py
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class ServerMixin:
|
||||
@pytest.fixture
|
||||
def get_storage_args(
|
||||
self,
|
||||
request,
|
||||
tmpdir,
|
||||
slow_create_collection,
|
||||
baikal_server,
|
||||
aio_connector,
|
||||
):
|
||||
async def inner(collection="test"):
|
||||
base_url = "http://127.0.0.1:8002/"
|
||||
args = {
|
||||
"url": base_url,
|
||||
"username": "baikal",
|
||||
"password": "baikal",
|
||||
"connector": aio_connector,
|
||||
}
|
||||
|
||||
if self.storage_class.fileext == ".vcf":
|
||||
args["url"] = base_url + "card.php/"
|
||||
else:
|
||||
args["url"] = base_url + "cal.php/"
|
||||
|
||||
if collection is not None:
|
||||
args = await slow_create_collection(
|
||||
self.storage_class,
|
||||
args,
|
||||
collection,
|
||||
)
|
||||
return args
|
||||
|
||||
return inner
|
||||
|
|
@ -1,47 +1,50 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
|
||||
try:
|
||||
caldav_args = {
|
||||
# Those credentials are configured through the Travis UI
|
||||
'username': os.environ['DAVICAL_USERNAME'].strip(),
|
||||
'password': os.environ['DAVICAL_PASSWORD'].strip(),
|
||||
'url': 'https://brutus.lostpackets.de/davical-test/caldav.php/',
|
||||
"username": os.environ["DAVICAL_USERNAME"].strip(),
|
||||
"password": os.environ["DAVICAL_PASSWORD"].strip(),
|
||||
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
|
||||
}
|
||||
except KeyError as e:
|
||||
pytestmark = pytest.mark.skip('Missing envkey: {}'.format(str(e)))
|
||||
pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}")
|
||||
|
||||
|
||||
@pytest.mark.flaky(reruns=5)
|
||||
class ServerMixin(object):
|
||||
class ServerMixin:
|
||||
@pytest.fixture
|
||||
def davical_args(self):
|
||||
if self.storage_class.fileext == '.ics':
|
||||
if self.storage_class.fileext == ".ics":
|
||||
return dict(caldav_args)
|
||||
elif self.storage_class.fileext == '.vcf':
|
||||
pytest.skip('No carddav')
|
||||
elif self.storage_class.fileext == ".vcf":
|
||||
pytest.skip("No carddav")
|
||||
else:
|
||||
raise RuntimeError()
|
||||
raise RuntimeError
|
||||
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, davical_args, request):
|
||||
def inner(collection='test'):
|
||||
async def inner(collection="test"):
|
||||
if collection is None:
|
||||
return davical_args
|
||||
|
||||
assert collection.startswith('test')
|
||||
assert collection.startswith("test")
|
||||
|
||||
for _ in range(4):
|
||||
args = self.storage_class.create_collection(
|
||||
collection + str(uuid.uuid4()),
|
||||
**davical_args
|
||||
collection + str(uuid.uuid4()), **davical_args
|
||||
)
|
||||
s = self.storage_class(**args)
|
||||
if not list(s.list()):
|
||||
request.addfinalizer(
|
||||
lambda: s.session.request('DELETE', ''))
|
||||
# See: https://stackoverflow.com/a/33984811
|
||||
request.addfinalizer(lambda x=s: x.session.request("DELETE", ""))
|
||||
return args
|
||||
|
||||
raise RuntimeError('Failed to find free collection.')
|
||||
raise RuntimeError("Failed to find free collection.")
|
||||
|
||||
return inner
|
||||
|
|
|
|||
|
|
@ -1,27 +1,42 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class ServerMixin(object):
|
||||
|
||||
class ServerMixin:
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, slow_create_collection):
|
||||
def inner(collection='test'):
|
||||
def get_storage_args(self, slow_create_collection, aio_connector, request):
|
||||
if (
|
||||
"item_type" in request.fixturenames
|
||||
and request.getfixturevalue("item_type") == "VTODO"
|
||||
):
|
||||
# Fastmail has non-standard support for TODOs
|
||||
# See https://github.com/pimutils/vdirsyncer/issues/824
|
||||
pytest.skip("Fastmail has non-standard VTODO support.")
|
||||
|
||||
async def inner(collection="test"):
|
||||
args = {
|
||||
'username': os.environ['FASTMAIL_USERNAME'],
|
||||
'password': os.environ['FASTMAIL_PASSWORD']
|
||||
"username": os.environ["FASTMAIL_USERNAME"],
|
||||
"password": os.environ["FASTMAIL_PASSWORD"],
|
||||
"connector": aio_connector,
|
||||
}
|
||||
|
||||
if self.storage_class.fileext == '.ics':
|
||||
args['url'] = 'https://caldav.messagingengine.com/'
|
||||
elif self.storage_class.fileext == '.vcf':
|
||||
args['url'] = 'https://carddav.messagingengine.com/'
|
||||
if self.storage_class.fileext == ".ics":
|
||||
args["url"] = "https://caldav.fastmail.com/"
|
||||
elif self.storage_class.fileext == ".vcf":
|
||||
args["url"] = "https://carddav.fastmail.com/"
|
||||
else:
|
||||
raise RuntimeError()
|
||||
raise RuntimeError
|
||||
|
||||
if collection is not None:
|
||||
args = slow_create_collection(self.storage_class, args,
|
||||
collection)
|
||||
args = await slow_create_collection(
|
||||
self.storage_class,
|
||||
args,
|
||||
collection,
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
return inner
|
||||
|
|
|
|||
|
|
@ -1,32 +1,33 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class ServerMixin(object):
|
||||
|
||||
class ServerMixin:
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, item_type, slow_create_collection):
|
||||
if item_type != 'VEVENT':
|
||||
if item_type != "VEVENT":
|
||||
# iCloud collections can either be calendars or task lists.
|
||||
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa
|
||||
pytest.skip('iCloud doesn\'t support anything else than VEVENT')
|
||||
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615
|
||||
pytest.skip("iCloud doesn't support anything else than VEVENT")
|
||||
|
||||
def inner(collection='test'):
|
||||
async def inner(collection="test"):
|
||||
args = {
|
||||
'username': os.environ['ICLOUD_USERNAME'],
|
||||
'password': os.environ['ICLOUD_PASSWORD']
|
||||
"username": os.environ["ICLOUD_USERNAME"],
|
||||
"password": os.environ["ICLOUD_PASSWORD"],
|
||||
}
|
||||
|
||||
if self.storage_class.fileext == '.ics':
|
||||
args['url'] = 'https://caldav.icloud.com/'
|
||||
elif self.storage_class.fileext == '.vcf':
|
||||
args['url'] = 'https://contacts.icloud.com/'
|
||||
if self.storage_class.fileext == ".ics":
|
||||
args["url"] = "https://caldav.icloud.com/"
|
||||
elif self.storage_class.fileext == ".vcf":
|
||||
args["url"] = "https://contacts.icloud.com/"
|
||||
else:
|
||||
raise RuntimeError()
|
||||
raise RuntimeError
|
||||
|
||||
if collection is not None:
|
||||
args = slow_create_collection(self.storage_class, args,
|
||||
collection)
|
||||
args = slow_create_collection(self.storage_class, args, collection)
|
||||
return args
|
||||
|
||||
return inner
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
mysteryshack
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import time
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
import requests
|
||||
|
||||
testserver_repo = os.path.dirname(__file__)
|
||||
make_sh = os.path.abspath(os.path.join(testserver_repo, 'make.sh'))
|
||||
|
||||
|
||||
def wait():
|
||||
for i in range(100):
|
||||
try:
|
||||
requests.get('http://127.0.0.1:6767/', verify=False)
|
||||
except Exception as e:
|
||||
# Don't know exact exception class, don't care.
|
||||
# Also, https://github.com/kennethreitz/requests/issues/2192
|
||||
if 'connection refused' not in str(e).lower():
|
||||
raise
|
||||
time.sleep(2 ** i)
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class ServerMixin(object):
|
||||
@pytest.fixture(scope='session')
|
||||
def setup_mysteryshack_server(self, xprocess):
|
||||
def preparefunc(cwd):
|
||||
return wait, ['sh', make_sh, 'testserver']
|
||||
|
||||
subprocess.check_call(['sh', make_sh, 'testserver-config'])
|
||||
xprocess.ensure('mysteryshack_server', preparefunc)
|
||||
|
||||
return subprocess.check_output([
|
||||
os.path.join(
|
||||
testserver_repo,
|
||||
'mysteryshack/target/debug/mysteryshack'
|
||||
),
|
||||
'-c', '/tmp/mysteryshack/config',
|
||||
'user',
|
||||
'authorize',
|
||||
'testuser',
|
||||
'https://example.com',
|
||||
self.storage_class.scope + ':rw'
|
||||
]).strip().decode()
|
||||
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, monkeypatch, setup_mysteryshack_server):
|
||||
from requests import Session
|
||||
|
||||
monkeypatch.setitem(os.environ, 'OAUTHLIB_INSECURE_TRANSPORT', 'true')
|
||||
|
||||
old_request = Session.request
|
||||
|
||||
def request(self, method, url, **kw):
|
||||
url = url.replace('https://', 'http://')
|
||||
return old_request(self, method, url, **kw)
|
||||
|
||||
monkeypatch.setattr(Session, 'request', request)
|
||||
shutil.rmtree('/tmp/mysteryshack/testuser/data', ignore_errors=True)
|
||||
shutil.rmtree('/tmp/mysteryshack/testuser/meta', ignore_errors=True)
|
||||
|
||||
def inner(**kw):
|
||||
kw['account'] = 'testuser@127.0.0.1:6767'
|
||||
kw['access_token'] = setup_mysteryshack_server
|
||||
if self.storage_class.fileext == '.ics':
|
||||
kw.setdefault('collection', 'test')
|
||||
return kw
|
||||
return inner
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
#!/bin/sh
|
||||
set -ex
|
||||
cd "$(dirname "$0")"
|
||||
. ./variables.sh
|
||||
|
||||
if [ "$CI" = "true" ]; then
|
||||
curl -sL https://static.rust-lang.org/rustup.sh -o ~/rust-installer/rustup.sh
|
||||
sh ~/rust-installer/rustup.sh --prefix=~/rust --spec=stable -y --disable-sudo 2> /dev/null
|
||||
fi
|
||||
|
||||
if [ ! -d mysteryshack ]; then
|
||||
git clone https://github.com/untitaker/mysteryshack
|
||||
fi
|
||||
|
||||
pip install pytest-xprocess
|
||||
|
||||
cd mysteryshack
|
||||
make debug-build # such that first test doesn't hang too long w/o output
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
# pytest-xprocess doesn't allow us to CD into a particular directory before
|
||||
# launching a command, so we do it here.
|
||||
cd "$(dirname "$0")"
|
||||
. ./variables.sh
|
||||
cd mysteryshack
|
||||
exec make "$@"
|
||||
|
|
@ -1 +0,0 @@
|
|||
export PATH="$PATH:$HOME/.cargo/bin/"
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit a27144ddcf39a3283179a4f7ce1ab22b2e810205
|
||||
|
|
@ -1 +0,0 @@
|
|||
Subproject commit bb4fcc6f524467d58c95f1dcec8470fdfcd65adf
|
||||
|
|
@ -1,59 +1,33 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import pytest
|
||||
|
||||
import radicale
|
||||
import radicale.config
|
||||
|
||||
from pkg_resources import parse_version as ver
|
||||
|
||||
import wsgi_intercept
|
||||
import wsgi_intercept.requests_intercept
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ServerMixin(object):
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup(self, request, tmpdir):
|
||||
if ver(radicale.VERSION) < ver('2.0.0-pre'):
|
||||
raise RuntimeError('Testing against Radicale only works with '
|
||||
'Radicale >= 2.0.0')
|
||||
|
||||
def get_app():
|
||||
config = radicale.config.load(())
|
||||
config.set('storage', 'filesystem_folder', str(tmpdir))
|
||||
config.set('rights', 'type', 'owner_only')
|
||||
|
||||
app = radicale.Application(config, logger)
|
||||
|
||||
def is_authenticated(user, password):
|
||||
return user == 'bob' and password == 'bob'
|
||||
|
||||
app.is_authenticated = is_authenticated
|
||||
return app
|
||||
|
||||
wsgi_intercept.requests_intercept.install()
|
||||
wsgi_intercept.add_wsgi_intercept('127.0.0.1', 80, get_app)
|
||||
|
||||
def teardown():
|
||||
wsgi_intercept.remove_wsgi_intercept('127.0.0.1', 80)
|
||||
wsgi_intercept.requests_intercept.uninstall()
|
||||
request.addfinalizer(teardown)
|
||||
|
||||
class ServerMixin:
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, get_item):
|
||||
def inner(collection='test'):
|
||||
url = 'http://127.0.0.1/'
|
||||
rv = {'url': url, 'username': 'bob', 'password': 'bob'}
|
||||
def get_storage_args(
|
||||
self,
|
||||
request,
|
||||
tmpdir,
|
||||
slow_create_collection,
|
||||
radicale_server,
|
||||
aio_connector,
|
||||
):
|
||||
async def inner(collection="test"):
|
||||
url = "http://127.0.0.1:8001/"
|
||||
args = {
|
||||
"url": url,
|
||||
"username": "radicale",
|
||||
"password": "radicale",
|
||||
"connector": aio_connector,
|
||||
}
|
||||
|
||||
if collection is not None:
|
||||
collection = collection + self.storage_class.fileext
|
||||
rv = self.storage_class.create_collection(collection, **rv)
|
||||
s = self.storage_class(**rv)
|
||||
assert not list(s.list())
|
||||
args = await slow_create_collection(
|
||||
self.storage_class,
|
||||
args,
|
||||
collection,
|
||||
)
|
||||
return args
|
||||
|
||||
return rv
|
||||
return inner
|
||||
|
|
|
|||
|
|
@ -1,12 +0,0 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
if [ "$REQUIREMENTS" = "release" ] || [ "$REQUIREMENTS" = "minimal" ]; then
|
||||
radicale_pkg="radicale"
|
||||
elif [ "$REQUIREMENTS" = "devel" ]; then
|
||||
radicale_pkg="git+https://github.com/Kozea/Radicale.git"
|
||||
else
|
||||
echo "Invalid requirements envvar"
|
||||
false
|
||||
fi
|
||||
pip install wsgi_intercept $radicale_pkg
|
||||
|
|
@ -1,8 +1,9 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class ServerMixin(object):
|
||||
|
||||
class ServerMixin:
|
||||
@pytest.fixture
|
||||
def get_storage_args(self):
|
||||
pytest.skip('DAV tests disabled.')
|
||||
pytest.skip("DAV tests disabled.")
|
||||
|
|
|
|||
|
|
@ -1 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
|
@ -1,35 +1,29 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from xandikos.web import XandikosApp, XandikosBackend, WellknownRedirector
|
||||
|
||||
import wsgi_intercept
|
||||
import wsgi_intercept.requests_intercept
|
||||
|
||||
|
||||
class ServerMixin(object):
|
||||
class ServerMixin:
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, request, tmpdir, slow_create_collection):
|
||||
tmpdir.mkdir('xandikos')
|
||||
backend = XandikosBackend(path=str(tmpdir))
|
||||
cup = '/user/'
|
||||
backend.create_principal(cup, create_defaults=True)
|
||||
app = XandikosApp(backend, cup)
|
||||
|
||||
app = WellknownRedirector(app, '/')
|
||||
|
||||
wsgi_intercept.requests_intercept.install()
|
||||
wsgi_intercept.add_wsgi_intercept('127.0.0.1', 8080, lambda: app)
|
||||
|
||||
def teardown():
|
||||
wsgi_intercept.remove_wsgi_intercept('127.0.0.1', 8080)
|
||||
wsgi_intercept.requests_intercept.uninstall()
|
||||
request.addfinalizer(teardown)
|
||||
|
||||
def inner(collection='test'):
|
||||
url = 'http://127.0.0.1:8080/'
|
||||
args = {'url': url, 'collection': collection}
|
||||
def get_storage_args(
|
||||
self,
|
||||
request,
|
||||
tmpdir,
|
||||
slow_create_collection,
|
||||
xandikos_server,
|
||||
aio_connector,
|
||||
):
|
||||
async def inner(collection="test"):
|
||||
url = "http://127.0.0.1:8000/"
|
||||
args = {"url": url, "connector": aio_connector}
|
||||
|
||||
if collection is not None:
|
||||
args = self.storage_class.create_collection(**args)
|
||||
args = await slow_create_collection(
|
||||
self.storage_class,
|
||||
args,
|
||||
collection,
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
return inner
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
pip install wsgi_intercept
|
||||
|
||||
if [ "$REQUIREMENTS" = "release" ] || [ "$REQUIREMENTS" = "minimal" ]; then
|
||||
pip install -U xandikos
|
||||
elif [ "$REQUIREMENTS" = "devel" ]; then
|
||||
pip install -U git+https://github.com/jelmer/xandikos
|
||||
else
|
||||
echo "Invalid REQUIREMENTS value"
|
||||
false
|
||||
fi
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
|
||||
import aiostream
|
||||
import pytest
|
||||
|
||||
from vdirsyncer.storage.filesystem import FilesystemStorage
|
||||
|
|
@ -15,72 +16,117 @@ class TestFilesystemStorage(StorageTests):
|
|||
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, tmpdir):
|
||||
def inner(collection='test'):
|
||||
rv = {'path': str(tmpdir), 'fileext': '.txt', 'collection':
|
||||
collection}
|
||||
async def inner(collection="test"):
|
||||
rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection}
|
||||
if collection is not None:
|
||||
rv = self.storage_class.create_collection(**rv)
|
||||
rv = await self.storage_class.create_collection(**rv)
|
||||
return rv
|
||||
|
||||
return inner
|
||||
|
||||
def test_is_not_directory(self, tmpdir):
|
||||
with pytest.raises(IOError):
|
||||
f = tmpdir.join('hue')
|
||||
f.write('stub')
|
||||
self.storage_class(str(tmpdir) + '/hue', '.txt')
|
||||
with pytest.raises(OSError):
|
||||
f = tmpdir.join("hue")
|
||||
f.write("stub")
|
||||
self.storage_class(str(tmpdir) + "/hue", ".txt")
|
||||
|
||||
def test_broken_data(self, tmpdir):
|
||||
s = self.storage_class(str(tmpdir), '.txt')
|
||||
@pytest.mark.asyncio
|
||||
async def test_broken_data(self, tmpdir):
|
||||
s = self.storage_class(str(tmpdir), ".txt")
|
||||
|
||||
class BrokenItem(object):
|
||||
raw = u'Ц, Ш, Л, ж, Д, З, Ю'.encode('utf-8')
|
||||
uid = 'jeezus'
|
||||
class BrokenItem:
|
||||
raw = "Ц, Ш, Л, ж, Д, З, Ю".encode()
|
||||
uid = "jeezus"
|
||||
ident = uid
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
s.upload(BrokenItem)
|
||||
await s.upload(BrokenItem)
|
||||
assert not tmpdir.listdir()
|
||||
|
||||
def test_ident_with_slash(self, tmpdir):
|
||||
s = self.storage_class(str(tmpdir), '.txt')
|
||||
s.upload(Item(u'UID:a/b/c'))
|
||||
item_file, = tmpdir.listdir()
|
||||
assert '/' not in item_file.basename and item_file.isfile()
|
||||
@pytest.mark.asyncio
|
||||
async def test_ident_with_slash(self, tmpdir):
|
||||
s = self.storage_class(str(tmpdir), ".txt")
|
||||
await s.upload(Item("UID:a/b/c"))
|
||||
(item_file,) = tmpdir.listdir()
|
||||
assert "/" not in item_file.basename
|
||||
assert item_file.isfile()
|
||||
|
||||
def test_too_long_uid(self, tmpdir):
|
||||
s = self.storage_class(str(tmpdir), '.txt')
|
||||
item = Item(u'UID:' + u'hue' * 600)
|
||||
href, etag = s.upload(item)
|
||||
@pytest.mark.asyncio
|
||||
async def test_ignore_tmp_files(self, tmpdir):
|
||||
"""Test that files with .tmp suffix beside .ics files are ignored."""
|
||||
s = self.storage_class(str(tmpdir), ".ics")
|
||||
await s.upload(Item("UID:xyzxyz"))
|
||||
(item_file,) = tmpdir.listdir()
|
||||
item_file.copy(item_file.new(ext="tmp"))
|
||||
assert len(tmpdir.listdir()) == 2
|
||||
assert len(await aiostream.stream.list(s.list())) == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ignore_tmp_files_empty_fileext(self, tmpdir):
|
||||
"""Test that files with .tmp suffix are ignored with empty fileext."""
|
||||
s = self.storage_class(str(tmpdir), "")
|
||||
await s.upload(Item("UID:xyzxyz"))
|
||||
(item_file,) = tmpdir.listdir()
|
||||
item_file.copy(item_file.new(ext="tmp"))
|
||||
assert len(tmpdir.listdir()) == 2
|
||||
# assert False, tmpdir.listdir() # enable to see the created filename
|
||||
assert len(await aiostream.stream.list(s.list())) == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_ignore_files_typical_backup(self, tmpdir):
|
||||
"""Test file-name ignorance with typical backup ending ~."""
|
||||
ignorext = "~" # without dot
|
||||
|
||||
storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext)
|
||||
await storage.upload(Item("UID:xyzxyz"))
|
||||
(item_file,) = tmpdir.listdir()
|
||||
item_file.copy(item_file.new(basename=item_file.basename + ignorext))
|
||||
|
||||
assert len(tmpdir.listdir()) == 2
|
||||
assert len(await aiostream.stream.list(storage.list())) == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_too_long_uid(self, tmpdir):
|
||||
storage = self.storage_class(str(tmpdir), ".txt")
|
||||
item = Item("UID:" + "hue" * 600)
|
||||
|
||||
href, _etag = await storage.upload(item)
|
||||
assert item.uid not in href
|
||||
|
||||
def test_post_hook_inactive(self, tmpdir, monkeypatch):
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_post_hook_inactive(self, tmpdir, monkeypatch):
|
||||
def check_call_mock(*args, **kwargs):
|
||||
assert False
|
||||
raise AssertionError
|
||||
|
||||
monkeypatch.setattr(subprocess, 'call', check_call_mock)
|
||||
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
||||
|
||||
s = self.storage_class(str(tmpdir), '.txt', post_hook=None)
|
||||
s.upload(Item(u'UID:a/b/c'))
|
||||
|
||||
def test_post_hook_active(self, tmpdir, monkeypatch):
|
||||
s = self.storage_class(str(tmpdir), ".txt", post_hook=None)
|
||||
await s.upload(Item("UID:a/b/c"))
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_post_hook_active(self, tmpdir, monkeypatch):
|
||||
calls = []
|
||||
exe = 'foo'
|
||||
exe = "foo"
|
||||
|
||||
def check_call_mock(l, *args, **kwargs):
|
||||
def check_call_mock(call, *args, **kwargs):
|
||||
calls.append(True)
|
||||
assert len(l) == 2
|
||||
assert l[0] == exe
|
||||
assert len(call) == 2
|
||||
assert call[0] == exe
|
||||
|
||||
monkeypatch.setattr(subprocess, 'call', check_call_mock)
|
||||
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
||||
|
||||
s = self.storage_class(str(tmpdir), '.txt', post_hook=exe)
|
||||
s.upload(Item(u'UID:a/b/c'))
|
||||
s = self.storage_class(str(tmpdir), ".txt", post_hook=exe)
|
||||
await s.upload(Item("UID:a/b/c"))
|
||||
assert calls
|
||||
|
||||
def test_ignore_git_dirs(self, tmpdir):
|
||||
tmpdir.mkdir('.git').mkdir('foo')
|
||||
tmpdir.mkdir('a')
|
||||
tmpdir.mkdir('b')
|
||||
assert set(c['collection'] for c
|
||||
in self.storage_class.discover(str(tmpdir))) == {'a', 'b'}
|
||||
@pytest.mark.asyncio
|
||||
async def test_ignore_git_dirs(self, tmpdir):
|
||||
tmpdir.mkdir(".git").mkdir("foo")
|
||||
tmpdir.mkdir("a")
|
||||
tmpdir.mkdir("b")
|
||||
|
||||
expected = {"a", "b"}
|
||||
actual = {
|
||||
c["collection"] async for c in self.storage_class.discover(str(tmpdir))
|
||||
}
|
||||
assert actual == expected
|
||||
|
|
|
|||
|
|
@ -1,123 +1,163 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import aiohttp
|
||||
import pytest
|
||||
|
||||
from requests import Response
|
||||
from aioresponses import CallbackResult
|
||||
from aioresponses import aioresponses
|
||||
|
||||
from tests import normalize_item
|
||||
|
||||
from vdirsyncer.exceptions import UserError
|
||||
from vdirsyncer.storage.http import HttpStorage, prepare_auth
|
||||
from vdirsyncer.http import BasicAuthMethod
|
||||
from vdirsyncer.http import DigestAuthMethod
|
||||
from vdirsyncer.http import UsageLimitReached
|
||||
from vdirsyncer.http import request
|
||||
from vdirsyncer.storage.http import HttpStorage
|
||||
from vdirsyncer.storage.http import prepare_auth
|
||||
|
||||
|
||||
def test_list(monkeypatch):
|
||||
collection_url = 'http://127.0.0.1/calendar/collection.ics'
|
||||
@pytest.mark.asyncio
|
||||
async def test_list(aio_connector):
|
||||
collection_url = "http://127.0.0.1/calendar/collection.ics"
|
||||
|
||||
items = [
|
||||
(u'BEGIN:VEVENT\n'
|
||||
u'SUMMARY:Eine Kurzinfo\n'
|
||||
u'DESCRIPTION:Beschreibung des Termines\n'
|
||||
u'END:VEVENT'),
|
||||
(u'BEGIN:VEVENT\n'
|
||||
u'SUMMARY:Eine zweite Küèrzinfo\n'
|
||||
u'DESCRIPTION:Beschreibung des anderen Termines\n'
|
||||
u'BEGIN:VALARM\n'
|
||||
u'ACTION:AUDIO\n'
|
||||
u'TRIGGER:19980403T120000\n'
|
||||
u'ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n'
|
||||
u'REPEAT:4\n'
|
||||
u'DURATION:PT1H\n'
|
||||
u'END:VALARM\n'
|
||||
u'END:VEVENT')
|
||||
(
|
||||
"BEGIN:VEVENT\n"
|
||||
"SUMMARY:Eine Kurzinfo\n"
|
||||
"DESCRIPTION:Beschreibung des Termines\n"
|
||||
"END:VEVENT"
|
||||
),
|
||||
(
|
||||
"BEGIN:VEVENT\n"
|
||||
"SUMMARY:Eine zweite Küèrzinfo\n"
|
||||
"DESCRIPTION:Beschreibung des anderen Termines\n"
|
||||
"BEGIN:VALARM\n"
|
||||
"ACTION:AUDIO\n"
|
||||
"TRIGGER:19980403T120000\n"
|
||||
"ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n"
|
||||
"REPEAT:4\n"
|
||||
"DURATION:PT1H\n"
|
||||
"END:VALARM\n"
|
||||
"END:VEVENT"
|
||||
),
|
||||
]
|
||||
|
||||
responses = [
|
||||
u'\n'.join([u'BEGIN:VCALENDAR'] + items + [u'END:VCALENDAR'])
|
||||
] * 2
|
||||
responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2
|
||||
|
||||
def get(self, method, url, *a, **kw):
|
||||
assert method == 'GET'
|
||||
assert url == collection_url
|
||||
r = Response()
|
||||
r.status_code = 200
|
||||
def callback(url, headers, **kwargs):
|
||||
assert headers["User-Agent"].startswith("vdirsyncer/")
|
||||
assert responses
|
||||
r._content = responses.pop().encode('utf-8')
|
||||
r.headers['Content-Type'] = 'text/calendar'
|
||||
r.encoding = 'ISO-8859-1'
|
||||
return r
|
||||
|
||||
monkeypatch.setattr('requests.sessions.Session.request', get)
|
||||
return CallbackResult(
|
||||
status=200,
|
||||
body=responses.pop().encode("utf-8"),
|
||||
headers={"Content-Type": "text/calendar; charset=iso-8859-1"},
|
||||
)
|
||||
|
||||
s = HttpStorage(url=collection_url)
|
||||
with aioresponses() as m:
|
||||
m.get(collection_url, callback=callback, repeat=True)
|
||||
|
||||
found_items = {}
|
||||
s = HttpStorage(url=collection_url, connector=aio_connector)
|
||||
|
||||
for href, etag in s.list():
|
||||
item, etag2 = s.get(href)
|
||||
assert item.uid is not None
|
||||
assert etag2 == etag
|
||||
found_items[normalize_item(item)] = href
|
||||
found_items = {}
|
||||
|
||||
expected = set(normalize_item(u'BEGIN:VCALENDAR\n' + x + '\nEND:VCALENDAR')
|
||||
for x in items)
|
||||
async for href, etag in s.list():
|
||||
item, etag2 = await s.get(href)
|
||||
assert item.uid is not None
|
||||
assert etag2 == etag
|
||||
found_items[normalize_item(item)] = href
|
||||
|
||||
assert set(found_items) == expected
|
||||
expected = {
|
||||
normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items
|
||||
}
|
||||
|
||||
for href, etag in s.list():
|
||||
item, etag2 = s.get(href)
|
||||
assert item.uid is not None
|
||||
assert etag2 == etag
|
||||
assert found_items[normalize_item(item)] == href
|
||||
assert set(found_items) == expected
|
||||
|
||||
async for href, etag in s.list():
|
||||
item, etag2 = await s.get(href)
|
||||
assert item.uid is not None
|
||||
assert etag2 == etag
|
||||
assert found_items[normalize_item(item)] == href
|
||||
|
||||
|
||||
def test_readonly_param():
|
||||
url = 'http://example.com/'
|
||||
def test_readonly_param(aio_connector):
|
||||
"""The ``readonly`` param cannot be ``False``."""
|
||||
|
||||
url = "http://example.com/"
|
||||
with pytest.raises(ValueError):
|
||||
HttpStorage(url=url, read_only=False)
|
||||
HttpStorage(url=url, read_only=False, connector=aio_connector)
|
||||
|
||||
a = HttpStorage(url=url, read_only=True).read_only
|
||||
b = HttpStorage(url=url, read_only=None).read_only
|
||||
assert a is b is True
|
||||
a = HttpStorage(url=url, read_only=True, connector=aio_connector)
|
||||
b = HttpStorage(url=url, read_only=None, connector=aio_connector)
|
||||
|
||||
assert a.read_only is b.read_only is True
|
||||
|
||||
|
||||
def test_prepare_auth():
|
||||
assert prepare_auth(None, '', '') is None
|
||||
assert prepare_auth(None, "", "") is None
|
||||
|
||||
assert prepare_auth(None, 'user', 'pwd') == ('user', 'pwd')
|
||||
assert prepare_auth('basic', 'user', 'pwd') == ('user', 'pwd')
|
||||
assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd")
|
||||
assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd")
|
||||
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
assert prepare_auth('basic', '', 'pwd')
|
||||
assert 'you need to specify username and password' in \
|
||||
str(excinfo.value).lower()
|
||||
assert prepare_auth("basic", "", "pwd")
|
||||
assert "you need to specify username and password" in str(excinfo.value).lower()
|
||||
|
||||
from requests.auth import HTTPDigestAuth
|
||||
assert isinstance(prepare_auth('digest', 'user', 'pwd'),
|
||||
HTTPDigestAuth)
|
||||
assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod)
|
||||
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
prepare_auth('ladida', 'user', 'pwd')
|
||||
prepare_auth("ladida", "user", "pwd")
|
||||
|
||||
assert 'unknown authentication method' in str(excinfo.value).lower()
|
||||
assert "unknown authentication method" in str(excinfo.value).lower()
|
||||
|
||||
|
||||
def test_prepare_auth_guess(monkeypatch):
|
||||
import requests_toolbelt.auth.guess
|
||||
|
||||
assert isinstance(prepare_auth('guess', 'user', 'pwd'),
|
||||
requests_toolbelt.auth.guess.GuessAuth)
|
||||
|
||||
monkeypatch.delattr(requests_toolbelt.auth.guess, 'GuessAuth')
|
||||
|
||||
def test_prepare_auth_guess():
|
||||
# guess auth is currently not supported
|
||||
with pytest.raises(UserError) as excinfo:
|
||||
prepare_auth('guess', 'user', 'pwd')
|
||||
prepare_auth("guess", "usr", "pwd")
|
||||
|
||||
assert 'requests_toolbelt is too old' in str(excinfo.value).lower()
|
||||
assert "not supported" in str(excinfo.value).lower()
|
||||
|
||||
|
||||
def test_verify_false_disallowed():
|
||||
def test_verify_false_disallowed(aio_connector):
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
HttpStorage(url='http://example.com', verify=False)
|
||||
HttpStorage(url="http://example.com", verify=False, connector=aio_connector)
|
||||
|
||||
assert 'forbidden' in str(excinfo.value).lower()
|
||||
assert 'consider setting verify_fingerprint' in str(excinfo.value).lower()
|
||||
assert "must be a path to a pem-file." in str(excinfo.value).lower()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_403_usage_limit_exceeded(aio_connector):
|
||||
url = "http://127.0.0.1/test_403"
|
||||
error_body = {
|
||||
"error": {
|
||||
"errors": [
|
||||
{
|
||||
"domain": "usageLimits",
|
||||
"message": "Calendar usage limits exceeded.",
|
||||
"reason": "quotaExceeded",
|
||||
}
|
||||
],
|
||||
"code": 403,
|
||||
"message": "Calendar usage limits exceeded.",
|
||||
}
|
||||
}
|
||||
|
||||
async with aiohttp.ClientSession(connector=aio_connector) as session:
|
||||
with aioresponses() as m:
|
||||
m.get(url, status=403, payload=error_body, repeat=True)
|
||||
with pytest.raises(UsageLimitReached):
|
||||
await request("GET", url, session)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_403_without_usage_limits_domain(aio_connector):
|
||||
"""A 403 JSON error without the Google 'usageLimits' domain should not be
|
||||
treated as UsageLimitReached and should surface as ClientResponseError.
|
||||
"""
|
||||
url = "http://127.0.0.1/test_403_no_usage_limits"
|
||||
|
||||
async with aiohttp.ClientSession(connector=aio_connector) as session:
|
||||
with aioresponses() as m:
|
||||
m.get(url, status=403, repeat=True)
|
||||
with pytest.raises(aiohttp.ClientResponseError):
|
||||
await request("GET", url, session)
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import aiostream
|
||||
import pytest
|
||||
|
||||
from requests import Response
|
||||
from aioresponses import CallbackResult
|
||||
from aioresponses import aioresponses
|
||||
|
||||
import vdirsyncer.storage.http
|
||||
from vdirsyncer.storage.base import Storage
|
||||
|
|
@ -12,37 +13,39 @@ from . import StorageTests
|
|||
|
||||
|
||||
class CombinedStorage(Storage):
|
||||
'''A subclass of HttpStorage to make testing easier. It supports writes via
|
||||
SingleFileStorage.'''
|
||||
_repr_attributes = ('url', 'path')
|
||||
storage_name = 'http_and_singlefile'
|
||||
"""A subclass of HttpStorage to make testing easier. It supports writes via
|
||||
SingleFileStorage."""
|
||||
|
||||
def __init__(self, url, path, **kwargs):
|
||||
if kwargs.get('collection', None) is not None:
|
||||
raise ValueError()
|
||||
_repr_attributes = ("url", "path")
|
||||
storage_name = "http_and_singlefile"
|
||||
|
||||
super(CombinedStorage, self).__init__(**kwargs)
|
||||
def __init__(self, url, path, *, connector, **kwargs):
|
||||
if kwargs.get("collection") is not None:
|
||||
raise ValueError
|
||||
|
||||
super().__init__(**kwargs)
|
||||
self.url = url
|
||||
self.path = path
|
||||
self._reader = vdirsyncer.storage.http.HttpStorage(url=url)
|
||||
self._reader = vdirsyncer.storage.http.HttpStorage(url=url, connector=connector)
|
||||
self._reader._ignore_uids = False
|
||||
self._writer = SingleFileStorage(path=path)
|
||||
|
||||
def list(self, *a, **kw):
|
||||
return self._reader.list(*a, **kw)
|
||||
async def list(self, *a, **kw):
|
||||
async for item in self._reader.list(*a, **kw):
|
||||
yield item
|
||||
|
||||
def get(self, *a, **kw):
|
||||
self.list()
|
||||
return self._reader.get(*a, **kw)
|
||||
async def get(self, *a, **kw):
|
||||
await aiostream.stream.list(self.list())
|
||||
return await self._reader.get(*a, **kw)
|
||||
|
||||
def upload(self, *a, **kw):
|
||||
return self._writer.upload(*a, **kw)
|
||||
async def upload(self, *a, **kw):
|
||||
return await self._writer.upload(*a, **kw)
|
||||
|
||||
def update(self, *a, **kw):
|
||||
return self._writer.update(*a, **kw)
|
||||
async def update(self, *a, **kw):
|
||||
return await self._writer.update(*a, **kw)
|
||||
|
||||
def delete(self, *a, **kw):
|
||||
return self._writer.delete(*a, **kw)
|
||||
async def delete(self, *a, **kw):
|
||||
return await self._writer.delete(*a, **kw)
|
||||
|
||||
|
||||
class TestHttpStorage(StorageTests):
|
||||
|
|
@ -52,30 +55,39 @@ class TestHttpStorage(StorageTests):
|
|||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_tmpdir(self, tmpdir, monkeypatch):
|
||||
self.tmpfile = str(tmpdir.ensure('collection.txt'))
|
||||
self.tmpfile = str(tmpdir.ensure("collection.txt"))
|
||||
|
||||
def _request(method, url, *args, **kwargs):
|
||||
assert method == 'GET'
|
||||
assert url == 'http://localhost:123/collection.txt'
|
||||
assert 'vdirsyncer' in kwargs['headers']['User-Agent']
|
||||
r = Response()
|
||||
r.status_code = 200
|
||||
try:
|
||||
with open(self.tmpfile, 'rb') as f:
|
||||
r._content = f.read()
|
||||
except IOError:
|
||||
r._content = b''
|
||||
def callback(url, headers, **kwargs):
|
||||
"""Read our tmpfile at request time.
|
||||
|
||||
r.headers['Content-Type'] = 'text/calendar'
|
||||
r.encoding = 'utf-8'
|
||||
return r
|
||||
We can't just read this during test setup since the file get written to
|
||||
during test execution.
|
||||
|
||||
monkeypatch.setattr(vdirsyncer.storage.http, 'request', _request)
|
||||
It might make sense to actually run a server serving the local file.
|
||||
"""
|
||||
assert headers["User-Agent"].startswith("vdirsyncer/")
|
||||
|
||||
with open(self.tmpfile) as f:
|
||||
body = f.read()
|
||||
|
||||
return CallbackResult(
|
||||
status=200,
|
||||
body=body,
|
||||
headers={"Content-Type": "text/calendar; charset=utf-8"},
|
||||
)
|
||||
|
||||
with aioresponses() as m:
|
||||
m.get("http://localhost:123/collection.txt", callback=callback, repeat=True)
|
||||
yield
|
||||
|
||||
@pytest.fixture
|
||||
def get_storage_args(self):
|
||||
def inner(collection=None):
|
||||
def get_storage_args(self, aio_connector):
|
||||
async def inner(collection=None):
|
||||
assert collection is None
|
||||
return {'url': 'http://localhost:123/collection.txt',
|
||||
'path': self.tmpfile}
|
||||
return {
|
||||
"url": "http://localhost:123/collection.txt",
|
||||
"path": self.tmpfile,
|
||||
"connector": aio_connector,
|
||||
}
|
||||
|
||||
return inner
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -8,10 +8,12 @@ from . import StorageTests
|
|||
|
||||
|
||||
class TestMemoryStorage(StorageTests):
|
||||
|
||||
storage_class = MemoryStorage
|
||||
supports_collections = False
|
||||
|
||||
@pytest.fixture
|
||||
def get_storage_args(self):
|
||||
return lambda **kw: kw
|
||||
async def inner(**args):
|
||||
return args
|
||||
|
||||
return inner
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
|
|
@ -8,16 +8,15 @@ from . import StorageTests
|
|||
|
||||
|
||||
class TestSingleFileStorage(StorageTests):
|
||||
|
||||
storage_class = SingleFileStorage
|
||||
supports_metadata = False
|
||||
|
||||
@pytest.fixture
|
||||
def get_storage_args(self, tmpdir):
|
||||
def inner(collection='test'):
|
||||
rv = {'path': str(tmpdir.join('%s.txt')),
|
||||
'collection': collection}
|
||||
async def inner(collection="test"):
|
||||
rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection}
|
||||
if collection is not None:
|
||||
rv = self.storage_class.create_collection(**rv)
|
||||
rv = await self.storage_class.create_collection(**rv)
|
||||
return rv
|
||||
|
||||
return inner
|
||||
|
|
|
|||
|
|
@ -1,31 +1,34 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import annotations
|
||||
|
||||
from textwrap import dedent
|
||||
|
||||
from click.testing import CliRunner
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
import vdirsyncer.cli as cli
|
||||
|
||||
|
||||
class _CustomRunner(object):
|
||||
class _CustomRunner:
|
||||
def __init__(self, tmpdir):
|
||||
self.tmpdir = tmpdir
|
||||
self.cfg = tmpdir.join('config')
|
||||
self.cfg = tmpdir.join("config")
|
||||
self.runner = CliRunner()
|
||||
|
||||
def invoke(self, args, env=None, **kwargs):
|
||||
env = env or {}
|
||||
env.setdefault('VDIRSYNCER_CONFIG', str(self.cfg))
|
||||
env.setdefault("VDIRSYNCER_CONFIG", str(self.cfg))
|
||||
return self.runner.invoke(cli.app, args, env=env, **kwargs)
|
||||
|
||||
def write_with_general(self, data):
|
||||
self.cfg.write(dedent('''
|
||||
self.cfg.write(
|
||||
dedent(
|
||||
"""
|
||||
[general]
|
||||
status_path = "{}/status/"
|
||||
''').format(str(self.tmpdir)))
|
||||
self.cfg.write(data, mode='a')
|
||||
"""
|
||||
).format(str(self.tmpdir))
|
||||
)
|
||||
self.cfg.write(data, mode="a")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue