mirror of
https://github.com/samsonjs/vdirsyncer.git
synced 2026-03-26 09:05:50 +00:00
Compare commits
1926 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c3262d88cc | ||
| cbb4e314f6 | |||
|
|
ac9919d865 | ||
|
|
b124ce835b | ||
|
|
6708dbbbdc | ||
|
|
81d8444810 | ||
|
|
4990cdf229 | ||
|
|
4c2c60402e | ||
|
|
2f4f4ac72b | ||
|
|
6354db82c4 | ||
|
|
a9b6488dac | ||
|
|
a4ceabf80b | ||
|
|
3488f77cd6 | ||
|
|
19120422a7 | ||
|
|
2e619806a0 | ||
|
|
4669bede07 | ||
|
|
59c1c55407 | ||
|
|
1502f5b5f4 | ||
|
|
a4d4bf8fd1 | ||
|
|
aab70e9fb0 | ||
|
|
ed88406aec | ||
|
|
ffe883a2f1 | ||
|
|
e5f2869580 | ||
|
|
95bb7bd7f9 | ||
|
|
e3b2473383 | ||
|
|
424cfc5799 | ||
|
|
29312e87c5 | ||
|
|
c77b22334a | ||
|
|
02350c924b | ||
|
|
605f878f9b | ||
|
|
bb2b71da81 | ||
|
|
065ebe4752 | ||
|
|
0d741022a9 | ||
|
|
b5d3b7e578 | ||
|
|
9677cf9812 | ||
|
|
6da84c7881 | ||
|
|
dceb113334 | ||
|
|
01fa614b6b | ||
|
|
20cc1247ed | ||
|
|
2f548e048d | ||
|
|
5d343264f3 | ||
|
|
bc3fa8bd39 | ||
|
|
8803d5a086 | ||
|
|
96754a3d0a | ||
|
|
d42707c108 | ||
|
|
ddfe3cc749 | ||
|
|
84ff0ac943 | ||
|
|
388c16f188 | ||
|
|
78f41d32ce | ||
|
|
164559ad7a | ||
|
|
2c6dc4cddf | ||
|
|
9bbb7fa91a | ||
|
|
f8bcafa9d7 | ||
|
|
162879df21 | ||
|
|
3b9db0e4db | ||
|
|
63d2e6c795 | ||
|
|
03d1c4666d | ||
|
|
ecdd565be4 | ||
|
|
17e43fd633 | ||
|
|
2b4496fea4 | ||
|
|
fc4a02c0c9 | ||
|
|
c19802e4d8 | ||
|
|
cce8fef8de | ||
|
|
9a0dbc8cd0 | ||
|
|
32453cccfc | ||
|
|
057f3af293 | ||
|
|
e76d8a5b03 | ||
|
|
d8961232c4 | ||
|
|
646e0b48a5 | ||
|
|
fb6a859b88 | ||
|
|
ff999b5b74 | ||
|
|
41b48857eb | ||
|
|
70d09e6d5d | ||
|
|
8b063c39cb | ||
|
|
12a06917db | ||
|
|
2fee1d67f2 | ||
|
|
a934d5ec66 | ||
|
|
c79d3680cd | ||
|
|
cd050d57b9 | ||
|
|
8c98992f74 | ||
|
|
c2eed9fb59 | ||
|
|
a490544405 | ||
|
|
688d6f907f | ||
|
|
2e7e31fdbf | ||
|
|
616d7aacb0 | ||
|
|
89129e37b6 | ||
|
|
88722ef4b7 | ||
|
|
35f299679f | ||
|
|
67e1c0ded5 | ||
|
|
89a01631fa | ||
|
|
611b8667a3 | ||
|
|
8550475548 | ||
|
|
cd2445b991 | ||
|
|
5ca2742271 | ||
|
|
5ac9dcec29 | ||
|
|
a513a7e4fa | ||
|
|
5ae05245e6 | ||
|
|
055ed120dd | ||
|
|
31816dc652 | ||
|
|
2e023a5feb | ||
|
|
14afe16a13 | ||
|
|
5766e1c501 | ||
|
|
fade399a21 | ||
|
|
3433f8a034 | ||
|
|
6a3077f9dc | ||
|
|
42c5dba208 | ||
|
|
7991419ab1 | ||
|
|
03e6afe9dc | ||
|
|
762d369560 | ||
|
|
2396c46b04 | ||
|
|
b626236128 | ||
|
|
45b67122fe | ||
|
|
7a387b8efe | ||
|
|
889e1f9ea2 | ||
|
|
d1f93ea0be | ||
|
|
82fd03be64 | ||
|
|
b50f9def00 | ||
|
|
91c16b3215 | ||
|
|
d45ae04006 | ||
|
|
9abf9c8e45 | ||
|
|
0f0e5b97d3 | ||
|
|
301aa0e16f | ||
|
|
dcd3b7a359 | ||
|
|
df8c4a1cf5 | ||
|
|
5a17ec1bba | ||
|
|
ab3aa108fc | ||
|
|
f194bb0a4c | ||
|
|
c073d55b2f | ||
|
|
3611e7d62f | ||
|
|
adc974bdd1 | ||
|
|
efad9eb624 | ||
|
|
246568f149 | ||
|
|
439f1e6f50 | ||
|
|
ef8e8980d1 | ||
|
|
08616abbb5 | ||
|
|
4237ff863c | ||
|
|
1a6ad54543 | ||
|
|
203468fd25 | ||
|
|
6368af1365 | ||
|
|
b38306bdd0 | ||
|
|
d26557bee3 | ||
|
|
b9f749467c | ||
|
|
7e5910a341 | ||
|
|
7403182645 | ||
|
|
bad381e5ba | ||
|
|
700586d959 | ||
|
|
c1d3efb6b8 | ||
|
|
c55b969791 | ||
|
|
079a156bf8 | ||
|
|
242216d85a | ||
|
|
b1ef68089b | ||
|
|
85ae33955f | ||
|
|
54a90aa5dd | ||
|
|
443ae3d3e7 | ||
|
|
3bf9a3d684 | ||
|
|
2138c43456 | ||
|
|
5a46c93987 | ||
|
|
180f91f0fe | ||
|
|
6443d37c97 | ||
|
|
13ca008380 | ||
|
|
24cb49f64c | ||
|
|
defe8e2591 | ||
|
|
e11fa357ff | ||
|
|
e20a65793e | ||
|
|
df14865f43 | ||
|
|
f45ecf6ad0 | ||
|
|
72bcef282d | ||
|
|
3a56f26d05 | ||
|
|
4dd17c7f59 | ||
|
|
73f2554932 | ||
|
|
627f574777 | ||
|
|
37a7f9bea8 | ||
|
|
d2d1532883 | ||
|
|
0dcef26b9d | ||
|
|
d646357cd3 | ||
|
|
8c6c0be15a | ||
|
|
dfc29db312 | ||
|
|
a41cf64b6c | ||
|
|
a2eda52b71 | ||
|
|
61006f0685 | ||
|
|
9b48bccde2 | ||
|
|
7c72caef3f | ||
|
|
0045b23800 | ||
|
|
c07fbc2053 | ||
|
|
e3485beb45 | ||
|
|
0f83fd96d5 | ||
|
|
8980a80560 | ||
|
|
90b6ce1d04 | ||
|
|
7a801d3d5d | ||
|
|
2c44f7d773 | ||
|
|
6506c86f58 | ||
|
|
51b409017d | ||
|
|
84613e73b0 | ||
|
|
a4ef45095e | ||
|
|
63ba948241 | ||
|
|
3067b32de5 | ||
|
|
a87518c474 | ||
|
|
b26e771865 | ||
|
|
2fbb0ab7a5 | ||
|
|
60352f84fe | ||
|
|
b7201013bc | ||
|
|
b61095ad47 | ||
|
|
278e6de8b0 | ||
|
|
843c58b92e | ||
|
|
cd412aa161 | ||
|
|
c5f80d1644 | ||
|
|
c50eabc77e | ||
|
|
a88389c4f1 | ||
|
|
1f7497c9d1 | ||
|
|
baaf737873 | ||
|
|
7c2fed1ceb | ||
|
|
3be048be18 | ||
|
|
f103b10b2a | ||
|
|
e44c704ae3 | ||
|
|
f32e0a9c1f | ||
|
|
24e3625cc0 | ||
|
|
4df54b9231 | ||
|
|
8557c6e0bb | ||
|
|
9fdc93c140 | ||
|
|
f3f8eb6824 | ||
|
|
b18e1c78d2 | ||
|
|
0a4114ef9f | ||
|
|
06f8001d65 | ||
|
|
61f3785e6c | ||
|
|
b0020f9436 | ||
|
|
74d738ec80 | ||
|
|
711eccedab | ||
|
|
5d58a86ba0 | ||
|
|
60c3b59552 | ||
|
|
22a127191d | ||
|
|
dc6e4ba5af | ||
|
|
ea640001d0 | ||
|
|
545b8ce2f1 | ||
|
|
3035d9cfae | ||
|
|
68c5968be8 | ||
|
|
0d1ca319c0 | ||
|
|
817eab51f1 | ||
|
|
e8b72130c2 | ||
|
|
8a44b278d1 | ||
|
|
54a5bf4ad3 | ||
|
|
10659b80ba | ||
|
|
1c6beae9b4 | ||
|
|
7ce9466c46 | ||
|
|
9f0390ee21 | ||
|
|
4e3f39468b | ||
|
|
a7e984f013 | ||
|
|
7c7f97c6b2 | ||
|
|
7e9132b817 | ||
|
|
59b95d9999 | ||
|
|
4dd15716db | ||
|
|
ec101b20d6 | ||
|
|
2c551afafb | ||
|
|
ad7bb82f40 | ||
|
|
961203e865 | ||
|
|
d72536805c | ||
|
|
ac6e19261f | ||
|
|
cbb0cad827 | ||
|
|
2c69f865f0 | ||
|
|
63510414ae | ||
|
|
fce5062a12 | ||
|
|
c77d750ef6 | ||
|
|
02ee9f96e4 | ||
|
|
ddaeccb2ee | ||
|
|
63ef204835 | ||
|
|
7d61cd3e2e | ||
|
|
db6da70c26 | ||
|
|
bf95bf2941 | ||
|
|
b3c9df1b1d | ||
|
|
87574be547 | ||
|
|
2e35214421 | ||
|
|
f5c2026dcf | ||
|
|
acf29cf659 | ||
|
|
fbd5ff88d5 | ||
|
|
7605416054 | ||
|
|
7a12e6028c | ||
|
|
d6876c6bad | ||
|
|
09eb375c5b | ||
|
|
48747463ed | ||
|
|
955f434d9d | ||
|
|
889183ec89 | ||
|
|
0650cc3bc2 | ||
|
|
6281e7a237 | ||
|
|
dff48f101b | ||
|
|
1081a15895 | ||
|
|
cf1d082628 | ||
|
|
54e829262d | ||
|
|
8830307e38 | ||
|
|
7a7deffa2c | ||
|
|
ecb181d9d7 | ||
|
|
fed1ee69c3 | ||
|
|
48aa4912a2 | ||
|
|
8886854367 | ||
|
|
a910e9f446 | ||
|
|
f3714fc493 | ||
|
|
6af4dd124b | ||
|
|
bc5e03630e | ||
|
|
6491bc53fb | ||
|
|
21eccfc2ef | ||
|
|
7b493416f7 | ||
|
|
5b8f00e720 | ||
|
|
0556e53e0c | ||
|
|
7379a96f73 | ||
|
|
c0ccb3d1aa | ||
|
|
98fcd12fa7 | ||
|
|
f2a1afe6d3 | ||
|
|
e16f83c1c2 | ||
|
|
55c563ff8c | ||
|
|
a5731b269e | ||
|
|
459efbf7af | ||
|
|
58aa0a3a31 | ||
|
|
177748d3d1 | ||
|
|
61edfc090e | ||
|
|
b3bee77c17 | ||
|
|
21db2547cb | ||
|
|
be131a0063 | ||
|
|
71879045e4 | ||
|
|
54e0c114fa | ||
|
|
17f422c1b7 | ||
|
|
a9f1a5195a | ||
|
|
8dab258ef0 | ||
|
|
f09d060580 | ||
|
|
ef2419efa9 | ||
|
|
2eff8e08e1 | ||
|
|
152ebb05dd | ||
|
|
5a9fc2cc7e | ||
|
|
dfed9794cb | ||
|
|
8d69b73c9e | ||
|
|
1a1f6f0788 | ||
|
|
7c9170c677 | ||
|
|
623c0537e1 | ||
|
|
4930b5f389 | ||
|
|
25435ce11d | ||
|
|
1f6cc6f8be | ||
|
|
59b6e24795 | ||
|
|
722dace828 | ||
|
|
6cebba0853 | ||
|
|
29528123a3 | ||
|
|
72618e374d | ||
|
|
c254b4ad1d | ||
|
|
cfd5af832a | ||
|
|
342cb863cd | ||
|
|
e1c979751d | ||
|
|
3191886658 | ||
|
|
3260afb495 | ||
|
|
0231f3999e | ||
|
|
d1b148c919 | ||
|
|
e70e8c03e8 | ||
|
|
46a49e3481 | ||
|
|
5e36ca1334 | ||
|
|
910317d4bb | ||
|
|
29c2b6bb4b | ||
|
|
2e4fc7c65a | ||
|
|
87f3a594c6 | ||
|
|
d95a8264f4 | ||
|
|
77d64ddc2c | ||
|
|
808e01f9c8 | ||
|
|
86535a9db3 | ||
|
|
57d662cba1 | ||
|
|
8d62ac4279 | ||
|
|
3bf4bd079d | ||
|
|
39ccc168b2 | ||
|
|
7b0d6671da | ||
|
|
8e8c1d5719 | ||
|
|
299c699cb9 | ||
|
|
8cd4a44d02 | ||
|
|
b0f08e051a | ||
|
|
4450393d4f | ||
|
|
6c80293a98 | ||
|
|
a9fa61040f | ||
|
|
ee124f5c82 | ||
|
|
5a97307a2f | ||
|
|
e467809bb0 | ||
|
|
320ac6020b | ||
|
|
9d3ef030fa | ||
|
|
eec142ac15 | ||
|
|
96f1c41bee | ||
|
|
a0b814ec17 | ||
|
|
91ffb931e1 | ||
|
|
bba9d43caf | ||
|
|
ec221b52b4 | ||
|
|
2336076baf | ||
|
|
d747977af2 | ||
|
|
0e47775ce3 | ||
|
|
d6c1b1847c | ||
|
|
95bf683771 | ||
|
|
5844480588 | ||
|
|
b9f5d88af9 | ||
|
|
5c00cceeb4 | ||
|
|
5e3e57ffc2 | ||
|
|
addab47786 | ||
|
|
32bc8d9046 | ||
|
|
0d0f2974ae | ||
|
|
9a1582cc0f | ||
|
|
9b5e01ab38 | ||
|
|
81895c291e | ||
|
|
439e63f8ea | ||
|
|
804b9f0429 | ||
|
|
44e4beb06f | ||
|
|
94f8d608ec | ||
|
|
d2d41e5df1 | ||
|
|
abf199f21e | ||
|
|
75719ecc66 | ||
|
|
9513ee7f61 | ||
|
|
a68121e439 | ||
|
|
e355b3336b | ||
|
|
b435465bc7 | ||
|
|
56688a6c50 | ||
|
|
bc002a700e | ||
|
|
12c860978b | ||
|
|
fdc12d561c | ||
|
|
f549b1d706 | ||
|
|
90e8b7b0bc | ||
|
|
c5a59ab10b | ||
|
|
2685f8db68 | ||
|
|
98d28ea2c0 | ||
|
|
ed0b4bef10 | ||
|
|
b9c01f8756 | ||
|
|
de867fcda2 | ||
|
|
1f066ca6ca | ||
|
|
55af4eaf80 | ||
|
|
2161de30d0 | ||
|
|
ef34d77ab1 | ||
|
|
255ea8f9bf | ||
|
|
8608f37fbb | ||
|
|
5ed9c821b8 | ||
|
|
53878f001a | ||
|
|
9df587df26 | ||
|
|
8ac4a00306 | ||
|
|
7750dda980 | ||
|
|
bf67af609e | ||
|
|
50cb2def73 | ||
|
|
6897995080 | ||
|
|
31c60021fa | ||
|
|
a42906b0e8 | ||
|
|
f9c6602684 | ||
|
|
74bb2ffb66 | ||
|
|
b5d4d3f9a9 | ||
|
|
f79647b29c | ||
|
|
1de3632620 | ||
|
|
aeb46ab5a9 | ||
|
|
27ebb0902b | ||
|
|
f281f956f1 | ||
|
|
83e5361643 | ||
|
|
2650a7ed0b | ||
|
|
68ff37e677 | ||
|
|
14deb20ce5 | ||
|
|
5eef4b1ba1 | ||
|
|
7577fa2117 | ||
|
|
1031b07349 | ||
|
|
47caebe843 | ||
|
|
3eb9ce5ae4 | ||
|
|
b1b4dd92fe | ||
|
|
9cb1f8d704 | ||
|
|
56b1fc2187 | ||
|
|
b5dd0929d0 | ||
|
|
d854bd62eb | ||
|
|
f6e6b0b6c3 | ||
|
|
399274286e | ||
|
|
be59ba5ab4 | ||
|
|
6e59ee0b5f | ||
|
|
82375f20aa | ||
|
|
cd86ea7a62 | ||
|
|
354aaec2e0 | ||
|
|
af3659ac1a | ||
|
|
50eefa1816 | ||
|
|
53331fedee | ||
|
|
88f2cd5b53 | ||
|
|
4f894e04dd | ||
|
|
0319035688 | ||
|
|
6c6da2f613 | ||
|
|
b0d8fd34dc | ||
|
|
0f3b2e74c0 | ||
|
|
c410fbf331 | ||
|
|
f1f51ac3cf | ||
|
|
3037c15a65 | ||
|
|
e5caf6750d | ||
|
|
f0fe104427 | ||
|
|
5c3900500d | ||
|
|
6befffcc45 | ||
|
|
22717ee217 | ||
|
|
c78ec6b3bd | ||
|
|
289f60da44 | ||
|
|
69e235c35d | ||
|
|
5a2032d6d9 | ||
|
|
fb68a6c4aa | ||
|
|
107edfd52d | ||
|
|
60e2e9669e | ||
|
|
b1214cd693 | ||
|
|
3d7d92c2d6 | ||
|
|
59740b379f | ||
|
|
461e4c55b0 | ||
|
|
a5b98517e8 | ||
|
|
1e425a590a | ||
|
|
308289febf | ||
|
|
eece9a6bde | ||
|
|
a26d3bb58c | ||
|
|
e2d3c1add7 | ||
|
|
b4bbc5946a | ||
|
|
7e4a0be674 | ||
|
|
aafafaa501 | ||
|
|
9505430b83 | ||
|
|
216ce8d180 | ||
|
|
cb4ba5b38c | ||
|
|
72ea0a6ad3 | ||
|
|
810349eef0 | ||
|
|
65d17bdcbf | ||
|
|
470c2c6630 | ||
|
|
7c04289ed4 | ||
|
|
e987d6eb4a | ||
|
|
558da29e5e | ||
|
|
83fe7d2c8a | ||
|
|
78599a131d | ||
|
|
dcf5f701b7 | ||
|
|
80a42e4c6c | ||
|
|
aec9b91602 | ||
|
|
7e6d618ccf | ||
|
|
1f9668393c | ||
|
|
f1cc05af62 | ||
|
|
6c652c9b7a | ||
|
|
68f2cf3195 | ||
|
|
0094acddd0 | ||
|
|
83026ad7de | ||
|
|
97f58ddc46 | ||
|
|
c63e55d020 | ||
|
|
dde30c22be | ||
|
|
250bd974f3 | ||
|
|
a5ed1eb98f | ||
|
|
19dd9df091 | ||
|
|
93df284d3c | ||
|
|
cca412e7a8 | ||
|
|
9e3c231cc3 | ||
|
|
1f8d1a0dc5 | ||
|
|
be33fd3ed3 | ||
|
|
32d1fecbb4 | ||
|
|
c507e12e0d | ||
|
|
af4cb4624c | ||
|
|
cf8e89b5b4 | ||
|
|
c6cc45c4b4 | ||
|
|
944d5e709c | ||
|
|
88969ae5e6 | ||
|
|
4e2c5e51f3 | ||
|
|
8495d87196 | ||
|
|
9314a93d67 | ||
|
|
e2583ededf | ||
|
|
18a4f93c97 | ||
|
|
3112e6899e | ||
|
|
77658fa0b5 | ||
|
|
d6268f24a1 | ||
|
|
414292bc50 | ||
|
|
ebafa3b69b | ||
|
|
ab798568ac | ||
|
|
cc91f334e7 | ||
|
|
189668400e | ||
|
|
131b3d257d | ||
|
|
1da0bd1cdd | ||
|
|
6954b26e0f | ||
|
|
9930b4b3ca | ||
|
|
35ffdd6f8c | ||
|
|
c9a085522f | ||
|
|
8565277623 | ||
|
|
395e8276de | ||
|
|
629de5558d | ||
|
|
0d47e06d89 | ||
|
|
e305869fac | ||
|
|
6e27770271 | ||
|
|
2d6125f8eb | ||
|
|
547b327557 | ||
|
|
9bba4bb9fe | ||
|
|
dfc9f1aa3f | ||
|
|
01c88b514c | ||
|
|
bde00c227a | ||
|
|
3bb51f81f1 | ||
|
|
56fe38fcdc | ||
|
|
e3060529a5 | ||
|
|
3affea685a | ||
|
|
37a1eb2fdb | ||
|
|
bf79ac1748 | ||
|
|
d5db9357f9 | ||
|
|
c0a6fb1b41 | ||
|
|
87c7143fa4 | ||
|
|
5259ffd00e | ||
|
|
76df8a3032 | ||
|
|
b7e7cfc439 | ||
|
|
a96f358464 | ||
|
|
95d5b2c1b9 | ||
|
|
a2cbbbb9d1 | ||
|
|
527acb2617 | ||
|
|
fb558e5139 | ||
|
|
fb3641f3c1 | ||
|
|
72eae0a4d3 | ||
|
|
c3cefd7046 | ||
|
|
7dcd4d5118 | ||
|
|
fdff446e92 | ||
|
|
9d393623fd | ||
|
|
76b9bb0aa1 | ||
|
|
fa6a98ec9b | ||
|
|
6a273953f7 | ||
|
|
7f80251527 | ||
|
|
5f3c14ef7d | ||
|
|
1bf863c615 | ||
|
|
d2127030c2 | ||
|
|
6851ceede0 | ||
|
|
34dc84c29f | ||
|
|
9e1adf877e | ||
|
|
609434fe76 | ||
|
|
8a5df8b802 | ||
|
|
2b0492fe5e | ||
|
|
cc54ed6e7c | ||
|
|
83531f95e5 | ||
|
|
09dacde3f3 | ||
|
|
5a8b141b53 | ||
|
|
1ed6d4db0a | ||
|
|
3f6c135806 | ||
|
|
97708ab3da | ||
|
|
b0cd233f81 | ||
|
|
3dd132e1b6 | ||
|
|
0d9ec646d7 | ||
|
|
45d275f71d | ||
|
|
7aa0ea20be | ||
|
|
b6be4b44de | ||
|
|
ef0ef1bdde | ||
|
|
22d7a88063 | ||
|
|
dad9fd8904 | ||
|
|
fbe3f9910d | ||
|
|
07f6c3af12 | ||
|
|
e7a746705f | ||
|
|
9a32c34e76 | ||
|
|
0e89753757 | ||
|
|
00ce809a34 | ||
|
|
e69360aa50 | ||
|
|
9d2d35f21a | ||
|
|
2952ea8b15 | ||
|
|
b361fe755c | ||
|
|
7744070568 | ||
|
|
461fab84ec | ||
|
|
5a508ae327 | ||
|
|
28f204162b | ||
|
|
bb4a9a1e6c | ||
|
|
f36061969a | ||
|
|
fe77532948 | ||
|
|
5e0dc0d5aa | ||
|
|
56b0b48d90 | ||
|
|
8a1e7a8c3b | ||
|
|
ceb6296669 | ||
|
|
a217623e8e | ||
|
|
e5bc3fd6e7 | ||
|
|
e1af47b1aa | ||
|
|
ba17fe6135 | ||
|
|
06017c39c0 | ||
|
|
a285c555f0 | ||
|
|
feea65ff1d | ||
|
|
bb35d7c091 | ||
|
|
a02f36f0cb | ||
|
|
c15a04a1de | ||
|
|
bada6abfc6 | ||
|
|
7fc5f1bf23 | ||
|
|
785bf52d73 | ||
|
|
3b713a14ab | ||
|
|
4027e077e7 | ||
|
|
b722e5aec5 | ||
|
|
335c7062fd | ||
|
|
ae2542c992 | ||
|
|
668220dbd7 | ||
|
|
325298bf51 | ||
|
|
4de920b552 | ||
|
|
38599e1666 | ||
|
|
968b416cc5 | ||
|
|
7914acdeb7 | ||
|
|
47a38c0af3 | ||
|
|
76f794eef3 | ||
|
|
83daa8698d | ||
|
|
6aeeb90259 | ||
|
|
ded1feb05a | ||
|
|
96e7c4da37 | ||
|
|
e457586b29 | ||
|
|
b3e389cb59 | ||
|
|
76d7dcd281 | ||
|
|
821c70a782 | ||
|
|
1790fdd7cc | ||
|
|
af150ef0ae | ||
|
|
19b57f8728 | ||
|
|
ccbc01721c | ||
|
|
fcd8658b7a | ||
|
|
cc56a05b7c | ||
|
|
7f38102936 | ||
|
|
10490a12f1 | ||
|
|
c73229e0a9 | ||
|
|
a6f4232ff9 | ||
|
|
fcf8801863 | ||
|
|
7cf0865081 | ||
|
|
8d10046fc8 | ||
|
|
558ea3923a | ||
|
|
9744a3a682 | ||
|
|
c4dda80e52 | ||
|
|
ff5732f5ff | ||
|
|
c87848fd03 | ||
|
|
0c2157b56b | ||
|
|
9c9620e33f | ||
|
|
7ab6de6f37 | ||
|
|
0f2e7e2395 | ||
|
|
11694f2766 | ||
|
|
e874c38509 | ||
|
|
62764ae1ef | ||
|
|
cb92b61b8a | ||
|
|
f993f97dcc | ||
|
|
c6b23b493d | ||
|
|
38f5dafc1c | ||
|
|
6a0b95a4bd | ||
|
|
14878e9a5d | ||
|
|
960f36ccdc | ||
|
|
007e05a878 | ||
|
|
6b2450f4ac | ||
|
|
44206a1e4e | ||
|
|
49deedd416 | ||
|
|
7f43a69de6 | ||
|
|
9db58e26d0 | ||
|
|
2890b48cd7 | ||
|
|
94fe934ec2 | ||
|
|
e54f5b89a3 | ||
|
|
413e2de45d | ||
|
|
185abe9ca9 | ||
|
|
dd465eb909 | ||
|
|
d454093365 | ||
|
|
b449287784 | ||
|
|
a3e39f9c7c | ||
|
|
3514d7348c | ||
|
|
f756366081 | ||
|
|
b10e82b344 | ||
|
|
9c417248de | ||
|
|
91a284d60b | ||
|
|
0172166510 | ||
|
|
41794f095d | ||
|
|
e7910e92aa | ||
|
|
2e9bccd8b1 | ||
|
|
7d4e28e690 | ||
|
|
b2422dbea6 | ||
|
|
c9d78e2391 | ||
|
|
3cdd1f6644 | ||
|
|
b8ad1def85 | ||
|
|
7590ff3aef | ||
|
|
af5705f740 | ||
|
|
11922bdcc2 | ||
|
|
855be466bd | ||
|
|
2a3e599868 | ||
|
|
536d9400c6 | ||
|
|
5568e4873e | ||
|
|
cc8fe7f773 | ||
|
|
6280aa6643 | ||
|
|
d8d1d5b4c7 | ||
|
|
06817e40d5 | ||
|
|
94c9003593 | ||
|
|
45cea7fce2 | ||
|
|
8f00a6ae39 | ||
|
|
31963ca920 | ||
|
|
ca57a295eb | ||
|
|
cbdf0f27ae | ||
|
|
21d83ae0d2 | ||
|
|
ad8fc1dc75 | ||
|
|
11ef34a184 | ||
|
|
bd19ee45d8 | ||
|
|
e2ef842bb5 | ||
|
|
b5b3c764d5 | ||
|
|
c7164cfcca | ||
|
|
c4d04ad3d7 | ||
|
|
b0f85f9f53 | ||
|
|
c6873c5f18 | ||
|
|
7dcaa47a22 | ||
|
|
96bf21c1da | ||
|
|
eadb7203ee | ||
|
|
d1232947c6 | ||
|
|
e08d096f29 | ||
|
|
9658908118 | ||
|
|
835f8e745e | ||
|
|
ef8e3324c6 | ||
|
|
e5be554449 | ||
|
|
e17cd96342 | ||
|
|
dd9394af01 | ||
|
|
95795476ed | ||
|
|
5160437f27 | ||
|
|
db970981a0 | ||
|
|
5a257ec2cd | ||
|
|
8171c46b10 | ||
|
|
4da83bfb79 | ||
|
|
2186178968 | ||
|
|
1795e63910 | ||
|
|
c6e185d8a3 | ||
|
|
63e9e55bbe | ||
|
|
39b27532e7 | ||
|
|
74b73a4011 | ||
|
|
6ecc14fca2 | ||
|
|
ec00ea70af | ||
|
|
d28527c6ea | ||
|
|
2e3ebd814a | ||
|
|
ff7586fc40 | ||
|
|
d33c98ac93 | ||
|
|
a5eec6193b | ||
|
|
15bf13cfe1 | ||
|
|
1c030d40ac | ||
|
|
471882fc5e | ||
|
|
be297b52df | ||
|
|
fe3f8eabf1 | ||
|
|
d1495118c0 | ||
|
|
6fdee91a5b | ||
|
|
51063f09a6 | ||
|
|
827299ef24 | ||
|
|
8ca5446e02 | ||
|
|
ffeaf25471 | ||
|
|
497e4a958c | ||
|
|
fccc9094c5 | ||
|
|
4db036a055 | ||
|
|
13772c67cd | ||
|
|
0eda0984f2 | ||
|
|
3cf33e5efe | ||
|
|
826a64226e | ||
|
|
34ac29fc2a | ||
|
|
95c6be6aee | ||
|
|
f7c2aa5f81 | ||
|
|
a12df8f5d2 | ||
|
|
ef9f13c982 | ||
|
|
68d2647e38 | ||
|
|
5dc6a5ff36 | ||
|
|
7d0ec9fb32 | ||
|
|
3aca7e247a | ||
|
|
841318208d | ||
|
|
aede90eee9 | ||
|
|
c9d92ea6eb | ||
|
|
0e2d9baf39 | ||
|
|
952966b677 | ||
|
|
37ed28d849 | ||
|
|
7df4333a77 | ||
|
|
441509e191 | ||
|
|
8f6f44e1fc | ||
|
|
373f4b8e77 | ||
|
|
ce8e027346 | ||
|
|
eb5e112293 | ||
|
|
565e971e4e | ||
|
|
8e3caf2c58 | ||
|
|
a524f8e971 | ||
|
|
6f4ff7aab1 | ||
|
|
ac3fd8d7fc | ||
|
|
6aedd624a9 | ||
|
|
2cf0aee526 | ||
|
|
30ab52ea4c | ||
|
|
5f76c9e720 | ||
|
|
8cbfb69691 | ||
|
|
4e3d351917 | ||
|
|
dd5f76ca5d | ||
|
|
78e11ebb66 | ||
|
|
fbaac88776 | ||
|
|
d60132cbc0 | ||
|
|
5e44f0cfe1 | ||
|
|
2747bee9a6 | ||
|
|
1087afeaa8 | ||
|
|
722a4be598 | ||
|
|
22568571c2 | ||
|
|
e62e3a26f6 | ||
|
|
dc5500892b | ||
|
|
cfbc7ec71b | ||
|
|
99e7ff6d37 | ||
|
|
b0c498f70a | ||
|
|
1846b392fa | ||
|
|
4354469a93 | ||
|
|
b20fc996a2 | ||
|
|
18d8bb9fc2 | ||
|
|
696e53dc1f | ||
|
|
b6f009e79e | ||
|
|
5a382be9c6 | ||
|
|
8ecef1e9a6 | ||
|
|
d9387ad0a7 | ||
|
|
ecd35e4a00 | ||
|
|
509b1f5b88 | ||
|
|
540713f006 | ||
|
|
d89ec31168 | ||
|
|
12f5d32251 | ||
|
|
0de8415183 | ||
|
|
3c7c1f5f67 | ||
|
|
5ae2d27b66 | ||
|
|
66031a51e6 | ||
|
|
6bed4f504e | ||
|
|
6f43e14908 | ||
|
|
a05c30be58 | ||
|
|
8645310422 | ||
|
|
902d914db6 | ||
|
|
e1c2e1b5d8 | ||
|
|
f238a58c85 | ||
|
|
f8dffaf51e | ||
|
|
214ec10826 | ||
|
|
9a4dbef3a9 | ||
|
|
4b4ed83df2 | ||
|
|
5606f3171a | ||
|
|
a54999ff17 | ||
|
|
e9185324ae | ||
|
|
4ca4f13423 | ||
|
|
d96ad6ad10 | ||
|
|
7b81415b58 | ||
|
|
c099b876d2 | ||
|
|
3deba27d8b | ||
|
|
bec3a81186 | ||
|
|
dc8c1402c2 | ||
|
|
707c824c2b | ||
|
|
48d72aa7a1 | ||
|
|
97bfccdb44 | ||
|
|
f7076118df | ||
|
|
d8a1fb4a62 | ||
|
|
140af81ead | ||
|
|
4df423b93e | ||
|
|
e4a0afcf00 | ||
|
|
3228e22621 | ||
|
|
5b87dcceeb | ||
|
|
df5545946e | ||
|
|
842162136a | ||
|
|
8d0b055110 | ||
|
|
c6e66dc682 | ||
|
|
403a683e70 | ||
|
|
8c1ae4f099 | ||
|
|
5f302b0206 | ||
|
|
54517b61c9 | ||
|
|
cf82eef7b9 | ||
|
|
f289437e46 | ||
|
|
a8c79ed1fc | ||
|
|
2c3238e550 | ||
|
|
9f5c88bece | ||
|
|
eb2c25ee76 | ||
|
|
f892389806 | ||
|
|
0fb27cc391 | ||
|
|
3cb03a55e4 | ||
|
|
f3023748a7 | ||
|
|
d107a1f9cc | ||
|
|
1b31f1449e | ||
|
|
252f634c81 | ||
|
|
586f923c83 | ||
|
|
b3d70a7a93 | ||
|
|
d24f3835ef | ||
|
|
130c9248ba | ||
|
|
35c46a030a | ||
|
|
c84bab113a | ||
|
|
b806f1b827 | ||
|
|
aa006a7c4a | ||
|
|
c0aee14acf | ||
|
|
139182ef2a | ||
|
|
09d6aeb4b1 | ||
|
|
61dce503a3 | ||
|
|
36ffac8801 | ||
|
|
b5a690e9b2 | ||
|
|
207091f11a | ||
|
|
2b6456e572 | ||
|
|
4742ea61bc | ||
|
|
45cea8f1f3 | ||
|
|
89e45a54c6 | ||
|
|
2fd7f69479 | ||
|
|
e33768757d | ||
|
|
d7a527b5be | ||
|
|
e72dc441a5 | ||
|
|
d16721cde8 | ||
|
|
fc3e823b6b | ||
|
|
d3724ebb19 | ||
|
|
900ffceca9 | ||
|
|
e4c88ce84b | ||
|
|
08b7db3478 | ||
|
|
66c6f31c8b | ||
|
|
a1532f36f6 | ||
|
|
fc0eca0eb8 | ||
|
|
5749986b54 | ||
|
|
a79e30b0b0 | ||
|
|
cf17ac0509 | ||
|
|
cf225cc038 | ||
|
|
7418acc42f | ||
|
|
cbb1967017 | ||
|
|
b9f3ca3581 | ||
|
|
96c90af1e3 | ||
|
|
f007cacdba | ||
|
|
29d7054e82 | ||
|
|
e7d73da4aa | ||
|
|
208c9ca787 | ||
|
|
ea17f2ac01 | ||
|
|
3ac486cff1 | ||
|
|
632b884e54 | ||
|
|
70c6349de9 | ||
|
|
d06dd4ca85 | ||
|
|
b68bb78bfc | ||
|
|
41c59f4c47 | ||
|
|
f1d03e6380 | ||
|
|
db4923c3ef | ||
|
|
f1821f0786 | ||
|
|
2beb854157 | ||
|
|
9081284f02 | ||
|
|
8d9efc3014 | ||
|
|
aee99708a3 | ||
|
|
90fa9f7f5f | ||
|
|
1674a6565a | ||
|
|
746de7fb70 | ||
|
|
ec2f743ffa | ||
|
|
3bad8e0c78 | ||
|
|
d594f9fd24 | ||
|
|
43c3ef6d03 | ||
|
|
7917f2e6f4 | ||
|
|
ed61980000 | ||
|
|
52d4e596a4 | ||
|
|
05366cd2ff | ||
|
|
ba4f48842e | ||
|
|
be8df955e9 | ||
|
|
777eb35898 | ||
|
|
2386f1c1bc | ||
|
|
313e36646b | ||
|
|
46639ff65e | ||
|
|
f4d285a066 | ||
|
|
374a4921b7 | ||
|
|
12c22f19c0 | ||
|
|
f8a3961de5 | ||
|
|
089c88b6be | ||
|
|
7a5241101e | ||
|
|
10cc4b1a16 | ||
|
|
69b88dd0c0 | ||
|
|
b3c1b00f1b | ||
|
|
2fb7a8d99f | ||
|
|
020f67ec26 | ||
|
|
50fc582aa3 | ||
|
|
d034c6b67f | ||
|
|
f8a2b109a9 | ||
|
|
8134313390 | ||
|
|
45389f87d9 | ||
|
|
3b9ef7a16b | ||
|
|
77d5a7d655 | ||
|
|
6e10666ab1 | ||
|
|
2888757e1b | ||
|
|
eca9faad16 | ||
|
|
6d8db949fa | ||
|
|
4cb7e6c1f5 | ||
|
|
b501a86081 | ||
|
|
0ee39c7e0a | ||
|
|
15de73d8fc | ||
|
|
bd6bca6540 | ||
|
|
391f193eae | ||
|
|
7f54fb5dd9 | ||
|
|
862bf24e6b | ||
|
|
0195f85af0 | ||
|
|
b73c0908d0 | ||
|
|
3e327ad9f5 | ||
|
|
0788277a2f | ||
|
|
307a368917 | ||
|
|
7ce0fb958f | ||
|
|
68f052b7c2 | ||
|
|
b93bcf0ba8 | ||
|
|
c855be0015 | ||
|
|
63fc62afda | ||
|
|
085cd692a7 | ||
|
|
3fd1098ca5 | ||
|
|
89f9b35e1c | ||
|
|
071f3e06f4 | ||
|
|
b726364b93 | ||
|
|
9f48f7d3b2 | ||
|
|
0ac1688518 | ||
|
|
b197592774 | ||
|
|
87560481d6 | ||
|
|
44721f18dd | ||
|
|
e5e1f42013 | ||
|
|
dac6295376 | ||
|
|
c95fa8fc3c | ||
|
|
0e7fa2ed62 | ||
|
|
d8854294e1 | ||
|
|
c9aada3012 | ||
|
|
108c375f39 | ||
|
|
a2ec405fd0 | ||
|
|
0d0902cc48 | ||
|
|
f9375eaa82 | ||
|
|
7f124494ee | ||
|
|
163da71453 | ||
|
|
f61d19bf5f | ||
|
|
89923a0e72 | ||
|
|
6a2dc53db9 | ||
|
|
0c22a2fe1e | ||
|
|
b346e2ea07 | ||
|
|
88df14a724 | ||
|
|
130edac67e | ||
|
|
d7dace2a26 | ||
|
|
ad304d1435 | ||
|
|
4e28c07b02 | ||
|
|
55914bd575 | ||
|
|
552ecbee16 | ||
|
|
4467d3e2de | ||
|
|
376d6fc70b | ||
|
|
9862eb9865 | ||
|
|
0cba8eb816 | ||
|
|
baa82846c0 | ||
|
|
1f190e69ff | ||
|
|
a3a1898ef4 | ||
|
|
4ff0680092 | ||
|
|
393387fe46 | ||
|
|
66bd52244d | ||
|
|
acac746d2d | ||
|
|
75a9987ec2 | ||
|
|
23ea0e203f | ||
|
|
6b9d7b074a | ||
|
|
949c568852 | ||
|
|
81b56ac5d3 | ||
|
|
017495935f | ||
|
|
c049f17928 | ||
|
|
bac0966814 | ||
|
|
cfe53f4a8b | ||
|
|
88547294a7 | ||
|
|
13af5ffbaf | ||
|
|
e244eecc52 | ||
|
|
7ad2af7063 | ||
|
|
1ede6884b7 | ||
|
|
c8c4409321 | ||
|
|
fe5ac51231 | ||
|
|
dd48ac05c9 | ||
|
|
8a7ac52ee7 | ||
|
|
48a649ee8c | ||
|
|
cd07d7fc68 | ||
|
|
c7e6ca20e4 | ||
|
|
e04511f348 | ||
|
|
52489f471f | ||
|
|
e725df4747 | ||
|
|
325831049b | ||
|
|
07078fa06b | ||
|
|
5b1128611e | ||
|
|
3eee5a55a1 | ||
|
|
54fa46ffae | ||
|
|
dec27d3b17 | ||
|
|
767566b48b | ||
|
|
dc8d787d50 | ||
|
|
eb396c12cc | ||
|
|
a25fd354d9 | ||
|
|
649576eeb2 | ||
|
|
f9854b9077 | ||
|
|
768cebe0e1 | ||
|
|
8b13a932b8 | ||
|
|
f9c1bcb9a1 | ||
|
|
c66432e2ee | ||
|
|
0162a0c4b6 | ||
|
|
95a7fc9789 | ||
|
|
7d60c05b2b | ||
|
|
8a6ad410da | ||
|
|
f3c459eb7d | ||
|
|
5300e6816b | ||
|
|
d1d47256ab | ||
|
|
46b9168440 | ||
|
|
4c6d3a3b89 | ||
|
|
9c1c4f92de | ||
|
|
00c0d64632 | ||
|
|
5e8d422e9a | ||
|
|
532f63a227 | ||
|
|
85a4b4da6d | ||
|
|
53dc69262e | ||
|
|
452cec1778 | ||
|
|
36d8b535d7 | ||
|
|
865a2df854 | ||
|
|
b89b890769 | ||
|
|
2c3af41fa6 | ||
|
|
ecadc430b8 | ||
|
|
f81f248dde | ||
|
|
6e9e856aae | ||
|
|
f2a0d07c09 | ||
|
|
3d856749f3 | ||
|
|
ebfc3182ce | ||
|
|
125084fcf9 | ||
|
|
fd3d81e925 | ||
|
|
27345add6d | ||
|
|
e30f71d63e | ||
|
|
7ef0e992b1 | ||
|
|
727ce250cf | ||
|
|
3c9ef726df | ||
|
|
be4baba19e | ||
|
|
9e8b5f2dad | ||
|
|
5c769fd63d | ||
|
|
15c8ceee5f | ||
|
|
20ac606132 | ||
|
|
fcb732874d | ||
|
|
9bd490f402 | ||
|
|
d30b4752db | ||
|
|
6c7b6f47b1 | ||
|
|
b72303b823 | ||
|
|
ef8c638f41 | ||
|
|
d4a55f55b8 | ||
|
|
4212797531 | ||
|
|
b12660f1a8 | ||
|
|
c54aaeb56e | ||
|
|
0d88a6b3ab | ||
|
|
6d01161a05 | ||
|
|
3a3b6ee7ee | ||
|
|
7583be5826 | ||
|
|
a87ab770f8 | ||
|
|
893f29dbaf | ||
|
|
196f9c858a | ||
|
|
d6787892e8 | ||
|
|
7646222bc7 | ||
|
|
2957a0dc41 | ||
|
|
129c9305f7 | ||
|
|
3b9cce2128 | ||
|
|
d8964660f8 | ||
|
|
12c87df092 | ||
|
|
99f5e51ab6 | ||
|
|
8b57abaf44 | ||
|
|
ce5276fba9 | ||
|
|
4fe6d59d1f | ||
|
|
4e8dbe9598 | ||
|
|
c0f773214f | ||
|
|
bb4e7b2c0e | ||
|
|
7290321151 | ||
|
|
54620c31ed | ||
|
|
054a0f8cd2 | ||
|
|
435b25fbca | ||
|
|
51abf452a1 | ||
|
|
731d15ebc9 | ||
|
|
746e1bfccf | ||
|
|
b828685386 | ||
|
|
4970c7ed78 | ||
|
|
709377561f | ||
|
|
2c600c96e5 | ||
|
|
563f9c9d34 | ||
|
|
b96ace761b | ||
|
|
16f0c033c2 | ||
|
|
8104a91769 | ||
|
|
359a25e702 | ||
|
|
0aeb599e75 | ||
|
|
ae4049ea97 | ||
|
|
2782186421 | ||
|
|
7e9df3dee6 | ||
|
|
cdc026ed3d | ||
|
|
f876460d35 | ||
|
|
ead85992d7 | ||
|
|
c143ea4686 | ||
|
|
0a9a04eabd | ||
|
|
c739bbfcac | ||
|
|
d63b63f7f9 | ||
|
|
04f06b69f0 | ||
|
|
0044a7af74 | ||
|
|
2fd4e24a58 | ||
|
|
6b5cd035da | ||
|
|
abeb30a48a | ||
|
|
ce6653e506 | ||
|
|
53bf33dd56 | ||
|
|
2f3b2cb4f1 | ||
|
|
e3e5b3cde4 | ||
|
|
fa2b5df05c | ||
|
|
0fe2f70769 | ||
|
|
cb1d1c2558 | ||
|
|
211ff0e889 | ||
|
|
5f80b9f3f6 | ||
|
|
b694c3f60c | ||
|
|
1d5216f2a3 | ||
|
|
b144ae1701 | ||
|
|
779ff094bb | ||
|
|
e5e81d5715 | ||
|
|
3efca22207 | ||
|
|
ed22764921 | ||
|
|
b7542fb536 | ||
|
|
20e04e6bff | ||
|
|
d4cdddd72a | ||
|
|
546842896d | ||
|
|
ffadca3553 | ||
|
|
5616f0f197 | ||
|
|
86bf8ccffa | ||
|
|
ed3ffcfdee | ||
|
|
4602df47aa | ||
|
|
2775dafffd | ||
|
|
f74b6ac78e | ||
|
|
b5b6788a3b | ||
|
|
0f5bce333e | ||
|
|
d2eeaf86ee | ||
|
|
82410429ba | ||
|
|
f2edf89543 | ||
|
|
5050f4663d | ||
|
|
87baac535d | ||
|
|
6ceeb99f4a | ||
|
|
3627c6e810 | ||
|
|
c090aaa0fe | ||
|
|
98111e7625 | ||
|
|
130a5a6cc2 | ||
|
|
59078f05da | ||
|
|
4ffce4ef12 | ||
|
|
296c216bae | ||
|
|
21b1bafc48 | ||
|
|
d5254081f8 | ||
|
|
f43ef436b7 | ||
|
|
5f682cadb9 | ||
|
|
0d3e274c4f | ||
|
|
7a945e3721 | ||
|
|
611ceac847 | ||
|
|
13925ed3d5 | ||
|
|
89119f0cda | ||
|
|
86d35c6d69 | ||
|
|
b5e36ec3b2 | ||
|
|
138c8f00a0 | ||
|
|
22ce8cf7a4 | ||
|
|
3b501b16ac | ||
|
|
3adb33dc8c | ||
|
|
670ebc0d9f | ||
|
|
bcd1bfdc62 | ||
|
|
f0c1898265 | ||
|
|
e303c907ae | ||
|
|
029f5e3eee | ||
|
|
dab6388cc7 | ||
|
|
21fb9f43ae | ||
|
|
5506d6633d | ||
|
|
1c24abd4db | ||
|
|
eb1485d99f | ||
|
|
661180b138 | ||
|
|
d317250a8f | ||
|
|
8425984ebb | ||
|
|
41173acdf3 | ||
|
|
3a4e4218a6 | ||
|
|
e198326340 | ||
|
|
ed0bc9b532 | ||
|
|
4b41f4050f | ||
|
|
1182350af3 | ||
|
|
ef5e361927 | ||
|
|
2d62ec9a26 | ||
|
|
8e2070e42d | ||
|
|
d59376e231 | ||
|
|
0041934318 | ||
|
|
ccb94a1c29 | ||
|
|
32abaae9b9 | ||
|
|
565ef2e96e | ||
|
|
930ea6a226 | ||
|
|
4f7589ca7f | ||
|
|
e687e9e7df | ||
|
|
30efccb959 | ||
|
|
c8014e5205 | ||
|
|
1aa6812bad | ||
|
|
6e1846ea9d | ||
|
|
2fda4d6670 | ||
|
|
d59b310cd3 | ||
|
|
2b5576c471 | ||
|
|
97d65ab222 | ||
|
|
4b25075b97 | ||
|
|
216e6c3b21 | ||
|
|
3836448cc3 | ||
|
|
2e83354c06 | ||
|
|
fce0f8d74a | ||
|
|
d9802fc032 | ||
|
|
9229d08e55 | ||
|
|
5eab8ebb16 | ||
|
|
67c8c3f394 | ||
|
|
c51952d6fd | ||
|
|
20bacfe87e | ||
|
|
c812b5d50f | ||
|
|
7575fb21a7 | ||
|
|
10148f47f8 | ||
|
|
93480c059f | ||
|
|
edc0eb2f84 | ||
|
|
f0e842ac41 | ||
|
|
4b0a896303 | ||
|
|
7f705aeb3a | ||
|
|
552dc33037 | ||
|
|
90aaa9d12a | ||
|
|
60a7778cea | ||
|
|
0e11d2ea0e | ||
|
|
4bb4b019ce | ||
|
|
73e2ccf46a | ||
|
|
04b3379172 | ||
|
|
8e3f0ab05f | ||
|
|
fcec7e1efd | ||
|
|
b58935c90d | ||
|
|
fc5175f7f3 | ||
|
|
544a0790b5 | ||
|
|
8130789d39 | ||
|
|
283d6f98df | ||
|
|
75d4f08c25 | ||
|
|
2d5a230fa4 | ||
|
|
f8b781a6d4 | ||
|
|
e172fa43f3 | ||
|
|
c84cfc0a29 | ||
|
|
d35cb1bc50 | ||
|
|
b44db992e7 | ||
|
|
a007828f87 | ||
|
|
f087ec599e | ||
|
|
776bfa2f4f | ||
|
|
39a11994f8 | ||
|
|
1bec5afc2f | ||
|
|
31de43b4a9 | ||
|
|
acf3ac0dcd | ||
|
|
fe1d512b38 | ||
|
|
c9cc6e2aed | ||
|
|
a446307d44 | ||
|
|
cad938d023 | ||
|
|
aab878744a | ||
|
|
4d918b92c6 | ||
|
|
6fa3b1d3d7 | ||
|
|
7e6cf8cb28 | ||
|
|
902c62c083 | ||
|
|
ad26e56b1d | ||
|
|
b038a7816b | ||
|
|
8fa041a0dd | ||
|
|
555e4b688a | ||
|
|
4a17db5864 | ||
|
|
1730cc34c9 | ||
|
|
3a8d5aed77 | ||
|
|
e95f763b04 | ||
|
|
364912b496 | ||
|
|
d9a888abda | ||
|
|
11929d8b34 | ||
|
|
2866bbde5f | ||
|
|
3cb6f3389d | ||
|
|
21667f2b06 | ||
|
|
237aacee7d | ||
|
|
263a45e2a5 | ||
|
|
35c272dda2 | ||
|
|
a07c9bea44 | ||
|
|
c4b4a4e8ad | ||
|
|
d2f9c73079 | ||
|
|
13ff39801b | ||
|
|
e5c826ccfd | ||
|
|
548575bfaf | ||
|
|
da87c0210c | ||
|
|
78efcf15de | ||
|
|
c830c2b5a4 | ||
|
|
7ace6fb8f1 | ||
|
|
2170a4fce2 | ||
|
|
ce1c255152 | ||
|
|
7d41378505 | ||
|
|
ed6a65a6ab | ||
|
|
3fa7988ce4 | ||
|
|
4f16f9c64e | ||
|
|
7ef30202d6 | ||
|
|
b2fccdb484 | ||
|
|
224daba414 | ||
|
|
46addd7e35 | ||
|
|
73fe27559d | ||
|
|
c83883d976 | ||
|
|
0c4e023c95 | ||
|
|
3cd216687e | ||
|
|
c2a50d76a4 | ||
|
|
3aa27f6d37 | ||
|
|
2aaeeb4e6b | ||
|
|
209648a936 | ||
|
|
6ed02b9a3e | ||
|
|
4cfca383d9 | ||
|
|
5b4ca2975a | ||
|
|
8fd16f9987 | ||
|
|
1b5bdc35da | ||
|
|
55be28c12b | ||
|
|
eec503b7d7 | ||
|
|
157cd25151 | ||
|
|
4e8ae1e15f | ||
|
|
4adebe6026 | ||
|
|
6ab6d1ddfd | ||
|
|
e4c3dafe4a | ||
|
|
5f12be3687 | ||
|
|
9f650e9adc | ||
|
|
46d61d23dc | ||
|
|
dd0d399d82 | ||
|
|
a90815f5df | ||
|
|
8c7af4bfc9 | ||
|
|
2b3e53a7d2 | ||
|
|
6783abb65f | ||
|
|
598d3600cd | ||
|
|
00c856eca0 | ||
|
|
4d3f02b618 | ||
|
|
e80f10aa99 | ||
|
|
d37d85dc26 | ||
|
|
a32b8a9807 | ||
|
|
951082c563 | ||
|
|
f0e4cf9ca7 | ||
|
|
f23c79eac9 | ||
|
|
f32b4411ae | ||
|
|
6d50112f97 | ||
|
|
aac97b1ae7 | ||
|
|
ed12509c77 | ||
|
|
e2eb79d656 | ||
|
|
2320570217 | ||
|
|
172a2b9b98 | ||
|
|
90d283b3b8 | ||
|
|
af2846570c | ||
|
|
fecfa90bb2 | ||
|
|
1f8593ebbf | ||
|
|
53414f5bee | ||
|
|
858fda7765 | ||
|
|
0647ed91c4 | ||
|
|
287fe1dcbc | ||
|
|
575d270e06 | ||
|
|
cde248f887 | ||
|
|
0a35e27d5d | ||
|
|
1857ec50b2 | ||
|
|
f3c8c93a2b | ||
|
|
1cbb8f2274 | ||
|
|
d85da54300 | ||
|
|
e138af8d37 | ||
|
|
13408779cb | ||
|
|
aaafbd5d14 | ||
|
|
28d589e8eb | ||
|
|
21f172e218 | ||
|
|
d790a3bb5c | ||
|
|
fb5f093bdf | ||
|
|
8131166dce | ||
|
|
a05b3f27a2 | ||
|
|
9446d388d0 | ||
|
|
cbc00630f2 | ||
|
|
25f209c3ca | ||
|
|
264023c30d | ||
|
|
f17984559a | ||
|
|
345581aa5b | ||
|
|
c73efe65af | ||
|
|
a5cb7b197b | ||
|
|
c8c7305cbf | ||
|
|
07d90fa476 | ||
|
|
b2d0d9ce2e | ||
|
|
dd49b7e6fe | ||
|
|
8d5fed48bc | ||
|
|
42662a97c3 | ||
|
|
fb5ed72c93 | ||
|
|
9300069817 | ||
|
|
1ca0859da1 | ||
|
|
6751880711 | ||
|
|
836623946d | ||
|
|
a7e311dbf2 | ||
|
|
e5b75c571f | ||
|
|
f586f8ec37 | ||
|
|
b94b36a4eb | ||
|
|
39a4bd3c73 | ||
|
|
6e66b9d487 | ||
|
|
6dac27d741 | ||
|
|
a559021447 | ||
|
|
96eb309ab7 | ||
|
|
33d9d7d93e | ||
|
|
114c73537e | ||
|
|
8f0fdcfcc1 | ||
|
|
af181cfe78 | ||
|
|
18103afcc2 | ||
|
|
1221350996 | ||
|
|
d77001d3bc | ||
|
|
9223899996 | ||
|
|
2c6ccd1dfa | ||
|
|
665b0cefaf | ||
|
|
0087f191dc | ||
|
|
c8a7ad36de | ||
|
|
f88e5f836d | ||
|
|
08b6ce6b8c | ||
|
|
b2a68ec1a1 | ||
|
|
72123f00f0 | ||
|
|
c28bcacdb3 | ||
|
|
2717c14e0c | ||
|
|
1e9fe12904 | ||
|
|
6541b5b4af | ||
|
|
a1a9cfef80 | ||
|
|
eb9974f18d | ||
|
|
0490930c5e | ||
|
|
e35479b080 | ||
|
|
dec08780f3 | ||
|
|
6f77a9242c | ||
|
|
3a0757d8ae | ||
|
|
df55926c39 | ||
|
|
2a16f39c80 | ||
|
|
732fc28081 | ||
|
|
3f0415cc09 | ||
|
|
1c1385762b | ||
|
|
34be98af51 | ||
|
|
f043bdfb12 | ||
|
|
6147da0676 | ||
|
|
1ded453284 | ||
|
|
60c96ae493 | ||
|
|
943c970286 | ||
|
|
f7f271afe2 | ||
|
|
4e8ed832ae | ||
|
|
86bf936ff9 | ||
|
|
c18dde68f4 | ||
|
|
c2fac4ac6f | ||
|
|
8275d23635 | ||
|
|
32a4bd9c16 | ||
|
|
54a07099b5 | ||
|
|
084068545b | ||
|
|
8e0d4d415e | ||
|
|
2cd0a26a12 | ||
|
|
6dbe1f5eed | ||
|
|
35305ad4c1 | ||
|
|
ae9837ada2 | ||
|
|
a86088910d | ||
|
|
91d83d1b79 | ||
|
|
6207d9419c | ||
|
|
ef18e78ca0 | ||
|
|
e0167abf8c | ||
|
|
58e1a027ef | ||
|
|
857395392e | ||
|
|
208453408f | ||
|
|
ba4407af13 | ||
|
|
2831e17ae1 | ||
|
|
6d91c95b30 | ||
|
|
847f3b5142 | ||
|
|
6a9d7c191d | ||
|
|
792852c1fc | ||
|
|
d2a005cf8a | ||
|
|
32c84cc86e | ||
|
|
7d727ecef3 | ||
|
|
334ade7e6b | ||
|
|
08380c199e | ||
|
|
9014222af6 | ||
|
|
4499ba73ec | ||
|
|
51564ff594 | ||
|
|
fa54d3145a | ||
|
|
4ddcb0fef4 | ||
|
|
0eb69a810f | ||
|
|
bdc66633b3 | ||
|
|
d07fe8376e | ||
|
|
b3b356e514 | ||
|
|
27f5d54240 | ||
|
|
a2e71cc4ca | ||
|
|
e96174a3c4 | ||
|
|
43e594fb43 | ||
|
|
500de6f081 | ||
|
|
76c7f034a7 | ||
|
|
cf622a75ea | ||
|
|
5ce0d97f91 | ||
|
|
73c5b688c1 | ||
|
|
bbc8894ac2 | ||
|
|
c5659018af | ||
|
|
3b04762653 | ||
|
|
868a099310 | ||
|
|
c7bd51b56f | ||
|
|
a02fd034e4 | ||
|
|
86bcf206fd | ||
|
|
1e0a768ccb | ||
|
|
3c29bcea77 | ||
|
|
4c34a1e62d | ||
|
|
da8bba89de | ||
|
|
ce2cea130c | ||
|
|
5fa05bce13 | ||
|
|
55c2d1f619 | ||
|
|
8ab9c429cc | ||
|
|
8a723febd3 | ||
|
|
c06acbb88a | ||
|
|
a786fdf611 | ||
|
|
4d4c48ca12 | ||
|
|
4384823cd1 | ||
|
|
cda763fcc5 | ||
|
|
11bd19febf | ||
|
|
f76aeba9bc | ||
|
|
3d62c19f42 | ||
|
|
7ddd3a5fdd | ||
|
|
a1bf00837d | ||
|
|
ebedca1e85 | ||
|
|
e3bc764515 | ||
|
|
d27e5b8329 | ||
|
|
12098bcc9d | ||
|
|
c2949a62b5 | ||
|
|
6ddb666234 | ||
|
|
07883c08df | ||
|
|
e6a5fbfb42 | ||
|
|
c2750e025e | ||
|
|
dfc30b794d | ||
|
|
c5fba836d3 | ||
|
|
978abc6872 | ||
|
|
47663ae4ee | ||
|
|
d4e3b99311 | ||
|
|
07b5b1eae9 | ||
|
|
f0fe282865 | ||
|
|
a55d1f2d03 | ||
|
|
0ada12ea00 | ||
|
|
742109f542 | ||
|
|
863d574261 | ||
|
|
ccc3dee28b | ||
|
|
aad6d23ed6 | ||
|
|
98c01cdc37 | ||
|
|
9a64af85d5 | ||
|
|
8178e5ff67 | ||
|
|
eef269507f | ||
|
|
c1653937d1 | ||
|
|
51a8312f1e | ||
|
|
6f95870a48 | ||
|
|
95e81f5383 | ||
|
|
998a3884fd | ||
|
|
fab29b4ecb | ||
|
|
8e694a2a2f | ||
|
|
67ff41e820 | ||
|
|
fcf901e620 | ||
|
|
e76fd29aec | ||
|
|
0618a45a28 | ||
|
|
cc0b8ad028 | ||
|
|
584e1d9d12 | ||
|
|
401d6fbf4f | ||
|
|
b03e9fb21f | ||
|
|
7bb7191526 | ||
|
|
b548c27384 | ||
|
|
e21a197006 | ||
|
|
6c0223f048 | ||
|
|
bf81c769f8 | ||
|
|
38c1c77508 | ||
|
|
0d3dc10b35 | ||
|
|
b0d969df0d | ||
|
|
e5d27d771d | ||
|
|
e933f6db44 | ||
|
|
9ede54ed9b | ||
|
|
f6f103d1c1 | ||
|
|
0f5bf97174 | ||
|
|
b3e649d1b3 | ||
|
|
e8254f489e | ||
|
|
ce30ed7b8a | ||
|
|
40b64139fd | ||
|
|
8933da7db4 | ||
|
|
8c93247aa0 | ||
|
|
9b442ebcf3 | ||
|
|
1d8c606005 | ||
|
|
e450baf0f8 | ||
|
|
37cd8aa88d | ||
|
|
cdb25d61ec | ||
|
|
6ef330aac5 | ||
|
|
4757fac383 | ||
|
|
11c9541b53 | ||
|
|
f2d34f4784 | ||
|
|
e297e6848a | ||
|
|
2a3b56c179 | ||
|
|
b04638246a | ||
|
|
3258a59e99 | ||
|
|
2a80dfae30 | ||
|
|
839f53e872 | ||
|
|
ba99c599c3 | ||
|
|
a949b07233 | ||
|
|
c2ec4cd3a3 | ||
|
|
548b397dbb | ||
|
|
7b2bc1bc38 | ||
|
|
e0316880b4 | ||
|
|
a19838327d | ||
|
|
14699d3195 | ||
|
|
e717be9681 | ||
|
|
cfe252d458 | ||
|
|
ecb40579df | ||
|
|
3101572440 | ||
|
|
0ddd52d14f | ||
|
|
90c9f227f3 | ||
|
|
4aab600e7c | ||
|
|
625cb4adde | ||
|
|
c0c33efade | ||
|
|
d6fd9f6b3b | ||
|
|
8266145443 | ||
|
|
d19ee9fa27 | ||
|
|
e219139e08 | ||
|
|
2e2349c46d | ||
|
|
06a701bc10 | ||
|
|
1e8e931464 | ||
|
|
6b199474a6 | ||
|
|
b3964ee8bb | ||
|
|
3fbdeb51c0 | ||
|
|
d9e4a5b767 | ||
|
|
ddc47c2272 | ||
|
|
a1f2d14c05 | ||
|
|
cb44046a8a | ||
|
|
6f959b3bd3 | ||
|
|
ddd8ab675e | ||
|
|
41a92528ca | ||
|
|
eb5587f4ca | ||
|
|
ff72e34826 | ||
|
|
4c7042da56 | ||
|
|
e40ba55bd3 | ||
|
|
632a60585c | ||
|
|
3f41930f7b | ||
|
|
d91512d07e | ||
|
|
43ff90da9a | ||
|
|
291edd7be0 | ||
|
|
61bf23588d | ||
|
|
ee3f0ad300 | ||
|
|
25aff68b50 | ||
|
|
3b6a2f3664 | ||
|
|
ca5a9cd8f9 | ||
|
|
741045c1be | ||
|
|
7f2ccd6b3a | ||
|
|
6fd5f0aaa9 | ||
|
|
07de8a0cc4 | ||
|
|
a0760ca171 | ||
|
|
772f745832 | ||
|
|
d14fcbc59a | ||
|
|
5ebc9eaecb | ||
|
|
24b492c610 | ||
|
|
448dc9b6b7 | ||
|
|
2331356333 | ||
|
|
2be78545fd | ||
|
|
48d0631c3d | ||
|
|
a1d453d46c | ||
|
|
1efb00b5f7 | ||
|
|
e5ee8fcd66 | ||
|
|
08c07c4be4 | ||
|
|
566a988f32 | ||
|
|
c234bce656 | ||
|
|
07fbd9cb89 | ||
|
|
29f9a10766 | ||
|
|
11919ef30d | ||
|
|
a7878c9342 | ||
|
|
a29c60c58d | ||
|
|
af3b081e27 | ||
|
|
977ba4c285 | ||
|
|
bd235df3c8 | ||
|
|
069f5dfa89 | ||
|
|
37c2467f59 | ||
|
|
29cf9e0b50 | ||
|
|
0ac80f365d | ||
|
|
5b70688402 | ||
|
|
7d6c8b29ce | ||
|
|
4fd4e21cc6 | ||
|
|
0f5fdd6d39 | ||
|
|
09f3ad10fb | ||
|
|
de829e4cfd | ||
|
|
ca7a5318ca | ||
|
|
f113b1d1e3 | ||
|
|
0c7f73bfe6 | ||
|
|
ca30542801 | ||
|
|
0e68966963 | ||
|
|
a0c749a3b1 | ||
|
|
df305fc14e | ||
|
|
6bf0df4d89 | ||
|
|
bf88c44ca1 | ||
|
|
b6defc4a45 | ||
|
|
3fccfe19e7 | ||
|
|
ad128d37c5 | ||
|
|
f4134f0918 | ||
|
|
6a4506a9da | ||
|
|
be8c3c4f02 | ||
|
|
867148dd45 | ||
|
|
0fd026a81e | ||
|
|
9be288b70d | ||
|
|
9d3a9611b2 | ||
|
|
4857292b5c | ||
|
|
6bd5bf7422 | ||
|
|
f22548000b | ||
|
|
c5c5208662 | ||
|
|
5a26721905 | ||
|
|
75bd145507 | ||
|
|
2fd74c8cbe | ||
|
|
05dbe56acf | ||
|
|
c759069c0d | ||
|
|
79e627d429 | ||
|
|
e9200ff6a2 | ||
|
|
9290b9132f | ||
|
|
e270e78bb3 | ||
|
|
c33e67fb81 | ||
|
|
214756f28c | ||
|
|
6ac71e0e7c | ||
|
|
734fdd61f6 | ||
|
|
9688eb3a2c | ||
|
|
aba0a40fbc | ||
|
|
93d29972ec | ||
|
|
29d80b7be0 | ||
|
|
ac942bff67 | ||
|
|
d404c8c62a | ||
|
|
9b00fd89d8 | ||
|
|
6280708aee | ||
|
|
2d62c8716b | ||
|
|
8bb25e3fb2 | ||
|
|
0441fe7354 | ||
|
|
f3a67ed1f8 | ||
|
|
fefaf658f0 | ||
|
|
9b6dba8511 | ||
|
|
281b537531 | ||
|
|
19b324f5f2 | ||
|
|
cdd0d21f33 | ||
|
|
036f786ed3 | ||
|
|
27682f130d | ||
|
|
0e693c5bfe | ||
|
|
d5d55d084c | ||
|
|
32dbba8c44 | ||
|
|
fcf0f003aa | ||
|
|
1db680eb4d | ||
|
|
5539ec26e9 | ||
|
|
37551b376f | ||
|
|
14f7da4e04 | ||
|
|
1699324304 | ||
|
|
3ad598c7b4 | ||
|
|
1c1aadedc0 | ||
|
|
23a4a96cb9 | ||
|
|
64e9ef7dc3 | ||
|
|
4e895b8635 | ||
|
|
9dbb359569 | ||
|
|
4f05962cb0 | ||
|
|
3bb6662e8e | ||
|
|
753e730906 | ||
|
|
f9f1e37655 | ||
|
|
9e12e29db6 | ||
|
|
3d39526531 | ||
|
|
add9f640b4 | ||
|
|
4426fbc1d3 | ||
|
|
406392db5d | ||
|
|
686441b5ab | ||
|
|
0bc03666ba | ||
|
|
2bbc92534d | ||
|
|
fa2f7ca540 | ||
|
|
23ae6eb03f | ||
|
|
69505f4c61 | ||
|
|
7e606f6123 | ||
|
|
b3e6f22518 | ||
|
|
96b1c08804 | ||
|
|
c78ac67ba9 | ||
|
|
f6088fd036 | ||
|
|
ed6d75f1db | ||
|
|
c7e6acc0ba | ||
|
|
25843580e0 | ||
|
|
f5a701a920 | ||
|
|
9816e0140d | ||
|
|
b093989220 | ||
|
|
63c990a320 | ||
|
|
b909d525f8 | ||
|
|
e8e55de165 | ||
|
|
f61ef5318d | ||
|
|
44c1d84a7e | ||
|
|
c9c2a43f43 | ||
|
|
5509868958 | ||
|
|
d30437b8fe | ||
|
|
c86ad88c96 | ||
|
|
ec708ea273 | ||
|
|
eb0833e180 | ||
|
|
d0a2331d86 | ||
|
|
73b8381ab8 | ||
|
|
8c77c57d4c | ||
|
|
0de3102c2c | ||
|
|
c9cfd0f1ed | ||
|
|
9d034b7ed6 | ||
|
|
cd72de610a | ||
|
|
af06e24f45 | ||
|
|
56d566e55a | ||
|
|
d5e8be4979 | ||
|
|
a321038a1d | ||
|
|
bb4939bfdb | ||
|
|
ea756175d3 | ||
|
|
226006f985 | ||
|
|
cef25b58da | ||
|
|
4af6da43ef | ||
|
|
8ea3b82c50 | ||
|
|
1fff7efff5 | ||
|
|
2fd4aaead3 | ||
|
|
8414d654d3 | ||
|
|
d9c2b7ba35 | ||
|
|
a741b5d639 | ||
|
|
5051e8949a | ||
|
|
9f26c6450c | ||
|
|
39472b9db4 | ||
|
|
2e2082fb55 | ||
|
|
6841b25264 | ||
|
|
05d2beb3dc | ||
|
|
eb1431e5db |
148 changed files with 13835 additions and 4547 deletions
49
.builds/archlinux-py313.yml
Normal file
49
.builds/archlinux-py313.yml
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
# Run tests using the packaged dependencies on ArchLinux.
|
||||||
|
|
||||||
|
image: archlinux
|
||||||
|
packages:
|
||||||
|
- docker
|
||||||
|
- docker-compose
|
||||||
|
# Build dependencies:
|
||||||
|
- python-wheel
|
||||||
|
- python-build
|
||||||
|
- python-installer
|
||||||
|
- python-setuptools-scm
|
||||||
|
# Runtime dependencies:
|
||||||
|
- python-click
|
||||||
|
- python-click-log
|
||||||
|
- python-click-threading
|
||||||
|
- python-requests
|
||||||
|
- python-aiohttp-oauthlib
|
||||||
|
- python-tenacity
|
||||||
|
# Test dependencies:
|
||||||
|
- python-hypothesis
|
||||||
|
- python-pytest-cov
|
||||||
|
- python-pytest-httpserver
|
||||||
|
- python-trustme
|
||||||
|
- python-pytest-asyncio
|
||||||
|
- python-aiohttp
|
||||||
|
- python-aiostream
|
||||||
|
- python-aioresponses
|
||||||
|
sources:
|
||||||
|
- https://github.com/pimutils/vdirsyncer
|
||||||
|
environment:
|
||||||
|
BUILD: test
|
||||||
|
CI: true
|
||||||
|
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
||||||
|
DAV_SERVER: radicale xandikos
|
||||||
|
REQUIREMENTS: release
|
||||||
|
# TODO: ETESYNC_TESTS
|
||||||
|
tasks:
|
||||||
|
- check-python:
|
||||||
|
python --version | grep 'Python 3.13'
|
||||||
|
- docker: |
|
||||||
|
sudo systemctl start docker
|
||||||
|
- setup: |
|
||||||
|
cd vdirsyncer
|
||||||
|
python -m build --wheel --skip-dependency-check --no-isolation
|
||||||
|
sudo python -m installer dist/*.whl
|
||||||
|
- test: |
|
||||||
|
cd vdirsyncer
|
||||||
|
make -e ci-test
|
||||||
|
make -e ci-test-storage
|
||||||
36
.builds/tests-minimal.yml
Normal file
36
.builds/tests-minimal.yml
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
# Run tests using oldest available dependency versions.
|
||||||
|
#
|
||||||
|
# TODO: It might make more sense to test with an older Ubuntu or Fedora version
|
||||||
|
# here, and consider that our "oldest suppported environment".
|
||||||
|
|
||||||
|
image: alpine/3.19 # python 3.11
|
||||||
|
packages:
|
||||||
|
- docker
|
||||||
|
- docker-cli
|
||||||
|
- docker-compose
|
||||||
|
- py3-pip
|
||||||
|
- python3-dev
|
||||||
|
sources:
|
||||||
|
- https://github.com/pimutils/vdirsyncer
|
||||||
|
environment:
|
||||||
|
BUILD: test
|
||||||
|
CI: true
|
||||||
|
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
||||||
|
DAV_SERVER: radicale xandikos
|
||||||
|
REQUIREMENTS: minimal
|
||||||
|
tasks:
|
||||||
|
- venv: |
|
||||||
|
python3 -m venv $HOME/venv
|
||||||
|
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
||||||
|
- docker: |
|
||||||
|
sudo addgroup $(whoami) docker
|
||||||
|
sudo service docker start
|
||||||
|
- setup: |
|
||||||
|
cd vdirsyncer
|
||||||
|
# Hack, no idea why it's needed
|
||||||
|
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
|
||||||
|
make -e install-dev
|
||||||
|
- test: |
|
||||||
|
cd vdirsyncer
|
||||||
|
make -e ci-test
|
||||||
|
make -e ci-test-storage
|
||||||
45
.builds/tests-pypi.yml
Normal file
45
.builds/tests-pypi.yml
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
# Run tests using latest dependencies from PyPI
|
||||||
|
|
||||||
|
image: archlinux
|
||||||
|
packages:
|
||||||
|
- docker
|
||||||
|
- docker-compose
|
||||||
|
- python-pip
|
||||||
|
sources:
|
||||||
|
- https://github.com/pimutils/vdirsyncer
|
||||||
|
secrets:
|
||||||
|
- 4d9a6dfe-5c8d-48bd-b864-a2f5d772c536
|
||||||
|
environment:
|
||||||
|
BUILD: test
|
||||||
|
CI: true
|
||||||
|
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
||||||
|
DAV_SERVER: baikal radicale xandikos
|
||||||
|
REQUIREMENTS: release
|
||||||
|
# TODO: ETESYNC_TESTS
|
||||||
|
tasks:
|
||||||
|
- venv: |
|
||||||
|
python -m venv $HOME/venv
|
||||||
|
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
||||||
|
- docker: |
|
||||||
|
sudo systemctl start docker
|
||||||
|
- setup: |
|
||||||
|
cd vdirsyncer
|
||||||
|
make -e install-dev
|
||||||
|
- test: |
|
||||||
|
cd vdirsyncer
|
||||||
|
make -e ci-test
|
||||||
|
make -e ci-test-storage
|
||||||
|
- check: |
|
||||||
|
cd vdirsyncer
|
||||||
|
make check
|
||||||
|
- check-secrets: |
|
||||||
|
# Stop here if this is a PR. PRs can't run with the below secrets.
|
||||||
|
[ -f ~/fastmail-secrets ] || complete-build
|
||||||
|
- extra-storages: |
|
||||||
|
set +x
|
||||||
|
source ~/fastmail-secrets
|
||||||
|
set -x
|
||||||
|
|
||||||
|
cd vdirsyncer
|
||||||
|
export PATH=$PATH:~/.local/bin/
|
||||||
|
DAV_SERVER=fastmail pytest tests/storage
|
||||||
4
.codecov.yml
Normal file
4
.codecov.yml
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
comment: false
|
||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
patch: false
|
||||||
22
.coveragerc
Normal file
22
.coveragerc
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
[run]
|
||||||
|
branch = True
|
||||||
|
|
||||||
|
[paths]
|
||||||
|
source = vdirsyncer/
|
||||||
|
|
||||||
|
[report]
|
||||||
|
exclude_lines =
|
||||||
|
# Have to re-enable the standard pragma
|
||||||
|
pragma: no cover
|
||||||
|
|
||||||
|
# Don't complain about missing debug-only code:
|
||||||
|
def __repr__
|
||||||
|
if self\.debug
|
||||||
|
|
||||||
|
# Don't complain if tests don't hit defensive assertion code:
|
||||||
|
raise AssertionError
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# Don't complain if non-runnable code isn't run:
|
||||||
|
if 0:
|
||||||
|
if __name__ == .__main__.:
|
||||||
1
.envrc
Normal file
1
.envrc
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
layout python3
|
||||||
9
.gitignore
vendored
9
.gitignore
vendored
|
|
@ -6,9 +6,12 @@ build
|
||||||
env
|
env
|
||||||
*.egg-info
|
*.egg-info
|
||||||
.cache
|
.cache
|
||||||
|
.pytest_cache
|
||||||
|
.eggs
|
||||||
|
.egg
|
||||||
.xprocess
|
.xprocess
|
||||||
dist
|
dist
|
||||||
tests/storage/dav/servers/*
|
|
||||||
!tests/storage/dav/servers/__init__.py
|
|
||||||
!tests/storage/dav/servers/radicale
|
|
||||||
docs/_build/
|
docs/_build/
|
||||||
|
vdirsyncer/version.py
|
||||||
|
.hypothesis
|
||||||
|
coverage.xml
|
||||||
|
|
|
||||||
39
.pre-commit-config.yaml
Normal file
39
.pre-commit-config.yaml
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v5.0.0
|
||||||
|
hooks:
|
||||||
|
- id: trailing-whitespace
|
||||||
|
args: [--markdown-linebreak-ext=md]
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: check-toml
|
||||||
|
- id: check-added-large-files
|
||||||
|
- id: debug-statements
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
|
rev: "v1.15.0"
|
||||||
|
hooks:
|
||||||
|
- id: mypy
|
||||||
|
files: vdirsyncer/.*
|
||||||
|
additional_dependencies:
|
||||||
|
- types-setuptools
|
||||||
|
- types-docutils
|
||||||
|
- types-requests
|
||||||
|
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||||
|
rev: 'v0.11.4'
|
||||||
|
hooks:
|
||||||
|
- id: ruff
|
||||||
|
args: [--fix, --exit-non-zero-on-fix]
|
||||||
|
- id: ruff-format
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: typos-syncroniz
|
||||||
|
name: typos-syncroniz
|
||||||
|
language: system
|
||||||
|
# Not how you spell "synchronise"
|
||||||
|
entry: sh -c "git grep -i syncroniz"
|
||||||
|
files: ".*/.*"
|
||||||
|
- id: typos-text-icalendar
|
||||||
|
name: typos-text-icalendar
|
||||||
|
language: system
|
||||||
|
# It's "text/calendar", no "i".
|
||||||
|
entry: sh -c "git grep -i 'text/icalendar'"
|
||||||
|
files: ".*/.*"
|
||||||
16
.readthedocs.yaml
Normal file
16
.readthedocs.yaml
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
sphinx:
|
||||||
|
configuration: docs/conf.py
|
||||||
|
|
||||||
|
build:
|
||||||
|
os: "ubuntu-22.04"
|
||||||
|
tools:
|
||||||
|
python: "3.9"
|
||||||
|
|
||||||
|
python:
|
||||||
|
install:
|
||||||
|
- method: pip
|
||||||
|
path: .
|
||||||
|
extra_requirements:
|
||||||
|
- docs
|
||||||
18
.travis.yml
18
.travis.yml
|
|
@ -1,18 +0,0 @@
|
||||||
language: python
|
|
||||||
python:
|
|
||||||
- "2.7"
|
|
||||||
- "3.3"
|
|
||||||
- "3.4"
|
|
||||||
env:
|
|
||||||
- BUILD=tests DAV_SERVER=radicale RADICALE_BACKEND=filesystem REQUIREMENTS=release
|
|
||||||
- BUILD=tests DAV_SERVER=radicale RADICALE_BACKEND=filesystem REQUIREMENTS=release PKGS='icalendar==3.6'
|
|
||||||
- BUILD=tests DAV_SERVER=radicale RADICALE_BACKEND=filesystem REQUIREMENTS=devel
|
|
||||||
- BUILD=tests DAV_SERVER=radicale RADICALE_BACKEND=database REQUIREMENTS=devel
|
|
||||||
- BUILD=tests DAV_SERVER=owncloud REQUIREMENTS=release
|
|
||||||
- BUILD=style
|
|
||||||
|
|
||||||
install:
|
|
||||||
- "./build.sh install"
|
|
||||||
- '[ -z "$PKGS" ] || pip install $PKGS'
|
|
||||||
script:
|
|
||||||
- "./build.sh run"
|
|
||||||
29
AUTHORS.rst
Normal file
29
AUTHORS.rst
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
Contributors
|
||||||
|
============
|
||||||
|
|
||||||
|
In alphabetical order:
|
||||||
|
|
||||||
|
- Ben Boeckel
|
||||||
|
- Bleala
|
||||||
|
- Christian Geier
|
||||||
|
- Clément Mondon
|
||||||
|
- Corey Hinshaw
|
||||||
|
- Kai Herlemann
|
||||||
|
- Hugo Osvaldo Barrera
|
||||||
|
- Jason Cox
|
||||||
|
- Julian Mehne
|
||||||
|
- Malte Kiefer
|
||||||
|
- Marek Marczykowski-Górecki
|
||||||
|
- Markus Unterwaditzer
|
||||||
|
- Michael Adler
|
||||||
|
- rEnr3n
|
||||||
|
- Thomas Weißschuh
|
||||||
|
- Witcher01
|
||||||
|
- samm81
|
||||||
|
|
||||||
|
Special thanks goes to:
|
||||||
|
|
||||||
|
* `FastMail <https://github.com/pimutils/vdirsyncer/issues/571>`_ sponsors a
|
||||||
|
paid account for testing their servers.
|
||||||
|
* `Packagecloud <https://packagecloud.io/>`_ provide repositories for
|
||||||
|
vdirsyncer's Debian packages.
|
||||||
707
CHANGELOG.rst
707
CHANGELOG.rst
|
|
@ -5,6 +5,696 @@ Changelog
|
||||||
This changelog only contains information that might be useful to end users and
|
This changelog only contains information that might be useful to end users and
|
||||||
package maintainers. For further info, see the git commit log.
|
package maintainers. For further info, see the git commit log.
|
||||||
|
|
||||||
|
Package maintainers and users who have to manually update their installation
|
||||||
|
may want to subscribe to `GitHub's tag feed
|
||||||
|
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
|
||||||
|
|
||||||
|
Version 0.21.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Implement retrying for ``google`` storage type when a rate limit is reached.
|
||||||
|
- ``tenacity`` is now a required dependency.
|
||||||
|
- Drop support for Python 3.8.
|
||||||
|
- Retry transient network errors for nullipotent requests.
|
||||||
|
|
||||||
|
Version 0.20.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Remove dependency on abandoned ``atomicwrites`` library.
|
||||||
|
- Implement ``filter_hook`` for the HTTP storage.
|
||||||
|
- Drop support for Python 3.7.
|
||||||
|
- Add support for Python 3.12 and Python 3.13.
|
||||||
|
- Properly close the status database after using. This especially affects tests,
|
||||||
|
where we were leaking a large amount of file descriptors.
|
||||||
|
- Extend supported versions of ``aiostream`` to include 0.7.x.
|
||||||
|
|
||||||
|
Version 0.19.3
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Added a no_delete option to the storage configuration. :gh:`1090`
|
||||||
|
- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
|
||||||
|
- Add an option to request vCard v4.0. :gh:`1066`
|
||||||
|
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
|
||||||
|
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
|
||||||
|
- Implement digest auth. :gh:`1137`
|
||||||
|
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
|
||||||
|
|
||||||
|
Version 0.19.2
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Improve the performance of ``SingleFileStorage``. :gh:`818`
|
||||||
|
- Properly document some caveats of the Google Contacts storage.
|
||||||
|
- Fix crash when using auth certs. :gh:`1033`
|
||||||
|
- The ``filesystem`` storage can be specified with ``type =
|
||||||
|
"filesystem/icalendar"`` or ``type = "filesystem/vcard"``. This has not
|
||||||
|
functional impact, and is merely for forward compatibility with the Rust
|
||||||
|
implementation of vdirsyncer.
|
||||||
|
- Python 3.10 and 3.11 are officially supported.
|
||||||
|
- Instructions for integrating with Google CalDav/CardDav have changed.
|
||||||
|
Applications now need to be registered as "Desktop applications". Using "Web
|
||||||
|
application" no longer works due to changes on Google's side. :gh:`1078`
|
||||||
|
|
||||||
|
Version 0.19.1
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Fixed crash when operating on Google Contacts. :gh:`994`
|
||||||
|
- The ``HTTP_PROXY`` and ``HTTPS_PROXY`` are now respected. :gh:`1031`
|
||||||
|
- Instructions for integrating with Google CalDav/CardDav have changed.
|
||||||
|
Applications now need to be registered as "Web Application". :gh:`975`
|
||||||
|
- Various documentation updates.
|
||||||
|
|
||||||
|
Version 0.19.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
- Add "shell" password fetch strategy to pass command string to a shell.
|
||||||
|
- Add "description" and "order" as metadata. These fetch the CalDAV:
|
||||||
|
calendar-description, ``CardDAV:addressbook-description`` and
|
||||||
|
``apple-ns:calendar-order`` properties respectively.
|
||||||
|
- Add a new ``showconfig`` status. This prints *some* configuration values as
|
||||||
|
JSON. This is intended to be used by external tools and helpers that interact
|
||||||
|
with ``vdirsyncer``, and considered experimental.
|
||||||
|
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
|
||||||
|
"create", it implicitly creates missing collections during sync without user
|
||||||
|
prompts. This simplifies workflows where collections should be automatically
|
||||||
|
created on both sides.
|
||||||
|
- Update TLS-related tests that were failing due to weak MDs. :gh:`903`
|
||||||
|
- ``pytest-httpserver`` and ``trustme`` are now required for tests.
|
||||||
|
- ``pytest-localserver`` is no longer required for tests.
|
||||||
|
- Multithreaded support has been dropped. The ``"--max-workers`` has been removed.
|
||||||
|
- A new ``asyncio`` backend is now used. So far, this shows substantial speed
|
||||||
|
improvements in ``discovery`` and ``metasync``, but little change in `sync`.
|
||||||
|
This will likely continue improving over time. :gh:`906`
|
||||||
|
- The ``google`` storage types no longer require ``requests-oauthlib``, but
|
||||||
|
require ``python-aiohttp-oauthlib`` instead.
|
||||||
|
- Vdirsyncer no longer includes experimental support for `EteSync
|
||||||
|
<https://www.etesync.com/>`_. The existing integration had not been supported
|
||||||
|
for a long time and no longer worked. Support for external storages may be
|
||||||
|
added if anyone is interested in maintaining an EteSync plugin. EteSync
|
||||||
|
users should consider using `etesync-dav`_.
|
||||||
|
- The ``plist`` for macOS has been dropped. It was broken and homebrew
|
||||||
|
generates their own based on package metadata. macOS users are encouraged to
|
||||||
|
use that as a reference.
|
||||||
|
|
||||||
|
.. _etesync-dav: https://github.com/etesync/etesync-dav
|
||||||
|
|
||||||
|
Changes to SSL configuration
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Support for ``md5`` and ``sha1`` certificate fingerprints has been dropped. If
|
||||||
|
you're validating certificate fingerprints, use ``sha256`` instead.
|
||||||
|
|
||||||
|
When using a custom ``verify_fingerprint``, CA validation is always disabled.
|
||||||
|
|
||||||
|
If ``verify_fingerprint`` is unset, CA verification is always active. Disabling
|
||||||
|
both features is insecure and no longer supported.
|
||||||
|
|
||||||
|
The ``verify`` parameter no longer takes boolean values, it is now optional and
|
||||||
|
only takes a string to a custom CA for verification.
|
||||||
|
|
||||||
|
The ``verify`` and ``verify_fingerprint`` will likely be merged into a single
|
||||||
|
parameter in future.
|
||||||
|
|
||||||
|
Version 0.18.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
Note: Version 0.17 has some alpha releases but ultimately was never finalised.
|
||||||
|
0.18 actually continues where 0.16 left off.
|
||||||
|
|
||||||
|
- Support for Python 3.5 and 3.6 has been dropped. This release mostly focuses
|
||||||
|
on keeping vdirsyncer compatible with newer environments.
|
||||||
|
- click 8 and click-threading 0.5.0 are now required.
|
||||||
|
- For those using ``pipsi``, we now recommend using ``pipx``, it's successor.
|
||||||
|
- Python 3.9 is now supported.
|
||||||
|
- Our Debian/Ubuntu build scripts have been updated. New versions should be
|
||||||
|
pushed to those repositories soon.
|
||||||
|
|
||||||
|
Version 0.16.8
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released 09 June 2020*
|
||||||
|
|
||||||
|
- Support Python 3.7 and 3.8.
|
||||||
|
|
||||||
|
This release is functionally identical to 0.16.7.
|
||||||
|
It's been tested with recent Python versions, and has been marked as supporting
|
||||||
|
them. It will also be the final release supporting Python 3.5 and 3.6.
|
||||||
|
|
||||||
|
Version 0.16.7
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 19 July 2018*
|
||||||
|
|
||||||
|
- Fixes for Python 3.7
|
||||||
|
|
||||||
|
Version 0.16.6
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 13 June 2018*
|
||||||
|
|
||||||
|
- **Packagers:** Documentation building no longer needs a working installation
|
||||||
|
of vdirsyncer.
|
||||||
|
|
||||||
|
Version 0.16.5
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 13 June 2018*
|
||||||
|
|
||||||
|
- **Packagers:** click-log 0.3 is required.
|
||||||
|
- All output will now happen on stderr (because of the upgrade of ``click-log``).
|
||||||
|
|
||||||
|
Version 0.16.4
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 05 February 2018*
|
||||||
|
|
||||||
|
- Fix tests for new Hypothesis version. (Literally no other change included)
|
||||||
|
|
||||||
|
Version 0.16.3
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 03 October 2017*
|
||||||
|
|
||||||
|
- First version with custom Debian and Ubuntu packages. See :gh:`663`.
|
||||||
|
- Remove invalid ASCII control characters from server responses. See :gh:`626`.
|
||||||
|
- **packagers:** Python 3.3 is no longer supported. See :ghpr:`674`.
|
||||||
|
|
||||||
|
Version 0.16.2
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 24 August 2017*
|
||||||
|
|
||||||
|
- Fix crash when using daterange or item_type filters in
|
||||||
|
:storage:`google_calendar`, see :gh:`657`.
|
||||||
|
- **Packagers:** Fixes for new version ``0.2.0`` of ``click-log``. The version
|
||||||
|
requirements for the dependency ``click-log`` changed.
|
||||||
|
|
||||||
|
Version 0.16.1
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 8 August 2017*
|
||||||
|
|
||||||
|
- Removed remoteStorage support, see :gh:`647`.
|
||||||
|
- Fixed test failures caused by latest requests version, see :gh:`660`.
|
||||||
|
|
||||||
|
Version 0.16.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 2 June 2017*
|
||||||
|
|
||||||
|
- Strip ``METHOD:PUBLISH`` added by some calendar providers, see :gh:`502`.
|
||||||
|
- Fix crash of Google storages when saving token file.
|
||||||
|
- Make DAV discovery more RFC-conformant, see :ghpr:`585`.
|
||||||
|
- Vdirsyncer is now tested against Xandikos, see :ghpr:`601`.
|
||||||
|
- Subfolders with a leading dot are now ignored during discover for
|
||||||
|
``filesystem`` storage. This makes it easier to combine it with version
|
||||||
|
control.
|
||||||
|
- Statuses are now stored in a sqlite database. Old data is automatically
|
||||||
|
migrated. Users with really large datasets should encounter performance
|
||||||
|
improvements. This means that **sqlite3 is now a dependency of vdirsyncer**.
|
||||||
|
- **Vdirsyncer is now licensed under the 3-clause BSD license**, see :gh:`610`.
|
||||||
|
- Vdirsyncer now includes experimental support for `EteSync
|
||||||
|
<https://www.etesync.com/>`_, see :ghpr:`614`.
|
||||||
|
- Vdirsyncer now uses more filesystem metadata for determining whether an item
|
||||||
|
changed. You will notice a **possibly heavy CPU/IO spike on the first sync
|
||||||
|
after upgrading**.
|
||||||
|
- **Packagers:** Reference ``systemd.service`` and ``systemd.timer`` unit files
|
||||||
|
are provided. It is recommended to install these as documentation if your
|
||||||
|
distribution is systemd-based.
|
||||||
|
|
||||||
|
Version 0.15.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 28 February 2017*
|
||||||
|
|
||||||
|
- Deprecated syntax for configuration values is now completely rejected. All
|
||||||
|
values now have to be valid JSON.
|
||||||
|
- A few UX improvements for Google storages, see :gh:`549` and :gh:`552`.
|
||||||
|
- Fix collection discovery for :storage:`google_contacts`, see :gh:`564`.
|
||||||
|
- iCloud is now tested on Travis, see :gh:`567`.
|
||||||
|
|
||||||
|
Version 0.14.1
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 05 January 2017*
|
||||||
|
|
||||||
|
- ``vdirsyncer repair`` no longer changes "unsafe" UIDs by default, an extra
|
||||||
|
option has to be specified. See :gh:`527`.
|
||||||
|
- A lot of important documentation updates.
|
||||||
|
|
||||||
|
Version 0.14.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 26 October 2016*
|
||||||
|
|
||||||
|
- ``vdirsyncer sync`` now continues other uploads if one upload failed. The
|
||||||
|
exit code in such situations is still non-zero.
|
||||||
|
- Add ``partial_sync`` option to pair section. See :ref:`the config docs
|
||||||
|
<partial_sync_def>`.
|
||||||
|
- Vdirsyncer will now warn if there's a string without quotes in your config.
|
||||||
|
Please file issues if you find documentation that uses unquoted strings.
|
||||||
|
- Fix an issue that would break khal's config setup wizard.
|
||||||
|
|
||||||
|
Version 0.13.1
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 30 September 2016*
|
||||||
|
|
||||||
|
- Fix a bug that would completely break collection discovery.
|
||||||
|
|
||||||
|
Version 0.13.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 29 September 2016*
|
||||||
|
|
||||||
|
- Python 2 is no longer supported at all. See :gh:`219`.
|
||||||
|
- Config sections are now checked for duplicate names. This also means that you
|
||||||
|
cannot have a storage section ``[storage foo]`` and a pair ``[pair foo]`` in
|
||||||
|
your config, they have to have different names. This is done such that
|
||||||
|
console output is always unambiguous. See :gh:`459`.
|
||||||
|
- Custom commands can now be used for conflict resolution during sync. See
|
||||||
|
:gh:`127`.
|
||||||
|
- :storage:`http` now completely ignores UIDs. This avoids a lot of unnecessary
|
||||||
|
down- and uploads.
|
||||||
|
|
||||||
|
Version 0.12.1
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 20 August 2016*
|
||||||
|
|
||||||
|
- Fix a crash for Google and DAV storages. See :ghpr:`492`.
|
||||||
|
- Fix an URL-encoding problem with DavMail. See :gh:`491`.
|
||||||
|
|
||||||
|
Version 0.12
|
||||||
|
============
|
||||||
|
|
||||||
|
*released on 19 August 2016*
|
||||||
|
|
||||||
|
- :storage:`singlefile` now supports collections. See :ghpr:`488`.
|
||||||
|
|
||||||
|
Version 0.11.3
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 29 July 2016*
|
||||||
|
|
||||||
|
- Default value of ``auth`` parameter was changed from ``guess`` to ``basic``
|
||||||
|
to resolve issues with the Apple Calendar Server (:gh:`457`) and improve
|
||||||
|
performance. See :gh:`461`.
|
||||||
|
- **Packagers:** The ``click-threading`` requirement is now ``>=0.2``. It was
|
||||||
|
incorrect before. See :gh:`478`.
|
||||||
|
- Fix a bug in the DAV XML parsing code that would make vdirsyncer crash on
|
||||||
|
certain input. See :gh:`480`.
|
||||||
|
- Redirect chains should now be properly handled when resolving ``well-known``
|
||||||
|
URLs. See :ghpr:`481`.
|
||||||
|
|
||||||
|
Version 0.11.2
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 15 June 2016*
|
||||||
|
|
||||||
|
- Fix typo that would break tests.
|
||||||
|
|
||||||
|
Version 0.11.1
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 15 June 2016*
|
||||||
|
|
||||||
|
- Fix a bug in collection validation.
|
||||||
|
- Fix a cosmetic bug in debug output.
|
||||||
|
- Various documentation improvements.
|
||||||
|
|
||||||
|
Version 0.11.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 19 May 2016*
|
||||||
|
|
||||||
|
- Discovery is no longer automatically done when running ``vdirsyncer sync``.
|
||||||
|
``vdirsyncer discover`` now has to be explicitly called.
|
||||||
|
- Add a ``.plist`` example for Mac OS X.
|
||||||
|
- Usage under Python 2 now requires a special config parameter to be set.
|
||||||
|
- Various deprecated configuration parameters do no longer have specialized
|
||||||
|
errormessages. The generic error message for unknown parameters is shown.
|
||||||
|
|
||||||
|
- Vdirsyncer no longer warns that the ``passwordeval`` parameter has been
|
||||||
|
renamed to ``password_command``.
|
||||||
|
|
||||||
|
- The ``keyring`` fetching strategy has been dropped some versions ago, but
|
||||||
|
the specialized error message has been dropped.
|
||||||
|
|
||||||
|
- An old status format from version 0.4 is no longer supported. If you're
|
||||||
|
experiencing problems, just delete your status folder.
|
||||||
|
|
||||||
|
Version 0.10.0
|
||||||
|
==============
|
||||||
|
|
||||||
|
*released on 23 April 2016*
|
||||||
|
|
||||||
|
- New storage types :storage:`google_calendar` and :storage:`google_contacts`
|
||||||
|
have been added.
|
||||||
|
- New global command line option `--config`, to specify an alternative config
|
||||||
|
file. See :gh:`409`.
|
||||||
|
- The ``collections`` parameter can now be used to synchronize
|
||||||
|
differently-named collections with each other.
|
||||||
|
- **Packagers:** The ``lxml`` dependency has been dropped.
|
||||||
|
- XML parsing is now a lot stricter. Malfunctioning servers that used to work
|
||||||
|
with vdirsyncer may stop working.
|
||||||
|
|
||||||
|
Version 0.9.3
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 22 March 2016*
|
||||||
|
|
||||||
|
- :storage:`singlefile` and :storage:`http` now handle recurring events
|
||||||
|
properly.
|
||||||
|
- Fix a typo in the packaging guidelines.
|
||||||
|
- Moved to ``pimutils`` organization on GitHub. Old links *should* redirect,
|
||||||
|
but be aware of client software that doesn't properly handle redirects.
|
||||||
|
|
||||||
|
Version 0.9.2
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 13 March 2016*
|
||||||
|
|
||||||
|
- Fixed testsuite for environments that don't have any web browser installed.
|
||||||
|
See :ghpr:`384`.
|
||||||
|
|
||||||
|
Version 0.9.1
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 13 March 2016*
|
||||||
|
|
||||||
|
- Removed leftover debug print statement in ``vdirsyncer discover``, see commit
|
||||||
|
``3d856749f37639821b148238ef35f1acba82db36``.
|
||||||
|
|
||||||
|
- ``metasync`` will now strip whitespace from the start and the end of the
|
||||||
|
values. See :gh:`358`.
|
||||||
|
|
||||||
|
- New ``Packaging Guidelines`` have been added to the documentation.
|
||||||
|
|
||||||
|
Version 0.9.0
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 15 February 2016*
|
||||||
|
|
||||||
|
- The ``collections`` parameter is now required in pair configurations.
|
||||||
|
Vdirsyncer will tell you what to do in its error message. See :gh:`328`.
|
||||||
|
|
||||||
|
Version 0.8.1
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 30 January 2016*
|
||||||
|
|
||||||
|
- Fix error messages when invalid parameter fetching strategy is used. This is
|
||||||
|
important because users would receive awkward errors for using deprecated
|
||||||
|
``keyring`` fetching.
|
||||||
|
|
||||||
|
Version 0.8.0
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 27 January 2016*
|
||||||
|
|
||||||
|
- Keyring support has been removed, which means that ``password.fetch =
|
||||||
|
["keyring", "example.com", "myuser"]`` doesn't work anymore.
|
||||||
|
|
||||||
|
For existing setups: Use ``password.fetch = ["command", "keyring", "get",
|
||||||
|
"example.com", "myuser"]`` instead, which is more generic. See the
|
||||||
|
documentation for details.
|
||||||
|
|
||||||
|
- Now emitting a warning when running under Python 2. See :gh:`219`.
|
||||||
|
|
||||||
|
Version 0.7.5
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 23 December 2015*
|
||||||
|
|
||||||
|
- Fixed a bug in :storage:`remotestorage` that would try to open a CLI browser
|
||||||
|
for OAuth.
|
||||||
|
- Fix a packaging bug that would prevent vdirsyncer from working with newer
|
||||||
|
lxml versions.
|
||||||
|
|
||||||
|
Version 0.7.4
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 22 December 2015*
|
||||||
|
|
||||||
|
- Improved error messages instead of faulty server behavior, see :gh:`290` and
|
||||||
|
:gh:`300`.
|
||||||
|
- Safer shutdown of threadpool, avoid exceptions, see :gh:`291`.
|
||||||
|
- Fix a sync bug for read-only storages see commit
|
||||||
|
``ed22764921b2e5bf6a934cf14aa9c5fede804d8e``.
|
||||||
|
- Etag changes are no longer sufficient to trigger sync operations. An actual
|
||||||
|
content change is also necessary. See :gh:`257`.
|
||||||
|
- :storage:`remotestorage` now automatically opens authentication dialogs in
|
||||||
|
your configured GUI browser.
|
||||||
|
- **Packagers:** ``lxml>=3.1`` is now required (newer lower-bound version).
|
||||||
|
|
||||||
|
Version 0.7.3
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 05 November 2015*
|
||||||
|
|
||||||
|
- Make remotestorage-dependencies actually optional.
|
||||||
|
|
||||||
|
Version 0.7.2
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 05 November 2015*
|
||||||
|
|
||||||
|
- Un-break testsuite.
|
||||||
|
|
||||||
|
Version 0.7.1
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 05 November 2015*
|
||||||
|
|
||||||
|
- **Packagers:** The setuptools extras ``keyring`` and ``remotestorage`` have
|
||||||
|
been added. They're basically optional dependencies. See ``setup.py`` for
|
||||||
|
more details.
|
||||||
|
|
||||||
|
- Highly experimental remoteStorage support has been added. It may be
|
||||||
|
completely overhauled or even removed in any version.
|
||||||
|
|
||||||
|
- Removed mentions of old ``password_command`` in documentation.
|
||||||
|
|
||||||
|
Version 0.7.0
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 27 October 2015*
|
||||||
|
|
||||||
|
- **Packagers:** New dependencies are ``click_threading``, ``click_log`` and
|
||||||
|
``click>=5.0``.
|
||||||
|
- ``password_command`` is gone. Keyring support got completely overhauled. See
|
||||||
|
:doc:`keyring`.
|
||||||
|
|
||||||
|
Version 0.6.0
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 06 August 2015*
|
||||||
|
|
||||||
|
- ``password_command`` invocations with non-zero exit code are now fatal (and
|
||||||
|
will abort synchronization) instead of just producing a warning.
|
||||||
|
- Vdirsyncer is now able to synchronize metadata of collections. Set ``metadata
|
||||||
|
= ["displayname"]`` and run ``vdirsyncer metasync``.
|
||||||
|
- **Packagers:** Don't use the GitHub tarballs, but the PyPI ones.
|
||||||
|
- **Packagers:** ``build.sh`` is gone, and ``Makefile`` is included in
|
||||||
|
tarballs. See the content of ``Makefile`` on how to run tests post-packaging.
|
||||||
|
- ``verify_fingerprint`` doesn't automatically disable ``verify`` anymore.
|
||||||
|
|
||||||
|
Version 0.5.2
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 15 June 2015*
|
||||||
|
|
||||||
|
- Vdirsyncer now checks and corrects the permissions of status files.
|
||||||
|
- Vdirsyncer is now more robust towards changing UIDs inside items.
|
||||||
|
- Vdirsyncer is now handling unicode hrefs and UIDs correctly. Software that
|
||||||
|
produces non-ASCII UIDs is broken, but apparently it exists.
|
||||||
|
|
||||||
|
Version 0.5.1
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 29 May 2015*
|
||||||
|
|
||||||
|
- **N.b.: The PyPI upload of 0.5.0 is completely broken.**
|
||||||
|
- Raise version of required requests-toolbelt to ``0.4.0``.
|
||||||
|
- Command line should be a lot faster when no work is done, e.g. for help
|
||||||
|
output.
|
||||||
|
- Fix compatibility with iCloud again.
|
||||||
|
- Use only one worker if debug mode is activated.
|
||||||
|
- ``verify=false`` is now disallowed in vdirsyncer, please use
|
||||||
|
``verify_fingerprint`` instead.
|
||||||
|
- Fixed a bug where vdirsyncer's DAV storage was not using the configured
|
||||||
|
useragent for collection discovery.
|
||||||
|
|
||||||
|
Version 0.4.4
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 12 March 2015*
|
||||||
|
|
||||||
|
- Support for client certificates via the new ``auth_cert``
|
||||||
|
parameter, see :gh:`182` and :ghpr:`183`.
|
||||||
|
- The ``icalendar`` package is no longer required.
|
||||||
|
- Several bugfixes related to collection creation.
|
||||||
|
|
||||||
|
Version 0.4.3
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 20 February 2015*
|
||||||
|
|
||||||
|
- More performance improvements to ``singlefile``-storage.
|
||||||
|
- Add ``post_hook`` param to ``filesystem``-storage.
|
||||||
|
- Collection creation now also works with SabreDAV-based servers, such as
|
||||||
|
Baikal or ownCloud.
|
||||||
|
- Removed some workarounds for Radicale. Upgrading to the latest Radicale will
|
||||||
|
fix the issues.
|
||||||
|
- Fixed issues with iCloud discovery.
|
||||||
|
- Vdirsyncer now includes a simple ``repair`` command that seeks to fix some
|
||||||
|
broken items.
|
||||||
|
|
||||||
|
Version 0.4.2
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 30 January 2015*
|
||||||
|
|
||||||
|
- Vdirsyncer now respects redirects when uploading and updating items. This
|
||||||
|
might fix issues with Zimbra.
|
||||||
|
- Relative ``status_path`` values are now interpreted as relative to the
|
||||||
|
configuration file's directory.
|
||||||
|
- Fixed compatibility with custom SabreDAV servers. See :gh:`166`.
|
||||||
|
- Catch harmless threading exceptions that occur when shutting down vdirsyncer.
|
||||||
|
See :gh:`167`.
|
||||||
|
- Vdirsyncer now depends on ``atomicwrites``.
|
||||||
|
- Massive performance improvements to ``singlefile``-storage.
|
||||||
|
- Items with extremely long UIDs should now be saved properly in
|
||||||
|
``filesystem``-storage. See :gh:`173`.
|
||||||
|
|
||||||
|
Version 0.4.1
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 05 January 2015*
|
||||||
|
|
||||||
|
- All ``create`` arguments from all storages are gone. Vdirsyncer now asks if
|
||||||
|
it should try to create collections.
|
||||||
|
- The old config values ``True``, ``False``, ``on``, ``off`` and ``None`` are
|
||||||
|
now invalid.
|
||||||
|
- UID conflicts are now properly handled instead of ignoring one item. Card-
|
||||||
|
and CalDAV servers are already supposed to take care of those though.
|
||||||
|
- Official Baikal support added.
|
||||||
|
|
||||||
|
Version 0.4.0
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 31 December 2014*
|
||||||
|
|
||||||
|
- The ``passwordeval`` parameter has been renamed to ``password_command``.
|
||||||
|
- The old way of writing certain config values such as lists is now gone.
|
||||||
|
- Collection discovery has been rewritten. Old configuration files should be
|
||||||
|
compatible with it, but vdirsyncer now caches the results of the collection
|
||||||
|
discovery. You have to run ``vdirsyncer discover`` if collections were added
|
||||||
|
or removed on one side.
|
||||||
|
- Pair and storage names are now restricted to certain characters. Vdirsyncer
|
||||||
|
will issue a clear error message if your configuration file is invalid in
|
||||||
|
that regard.
|
||||||
|
- Vdirsyncer now supports the XDG-Basedir specification. If the
|
||||||
|
``VDIRSYNCER_CONFIG`` environment variable isn't set and the
|
||||||
|
``~/.vdirsyncer/config`` file doesn't exist, it will look for the
|
||||||
|
configuration file at ``$XDG_CONFIG_HOME/vdirsyncer/config``.
|
||||||
|
- Some improvements to CardDAV and CalDAV discovery, based on problems found
|
||||||
|
with FastMail. Support for ``.well-known``-URIs has been added.
|
||||||
|
|
||||||
|
Version 0.3.4
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 8 December 2014*
|
||||||
|
|
||||||
|
- Some more bugfixes to config handling.
|
||||||
|
|
||||||
|
Version 0.3.3
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 8 December 2014*
|
||||||
|
|
||||||
|
- Vdirsyncer now also works with iCloud. Particularly collection discovery and
|
||||||
|
etag handling were fixed.
|
||||||
|
- Vdirsyncer now encodes Cal- and CardDAV requests differently. This hasn't
|
||||||
|
been well-tested with servers like Zimbra or SoGo, but isn't expected to
|
||||||
|
cause any problems.
|
||||||
|
- Vdirsyncer is now more robust regarding invalid responses from CalDAV
|
||||||
|
servers. This should help with future compatibility with Davmail/Outlook.
|
||||||
|
- Fix a bug when specifying ``item_types`` of :storage:`caldav` in the
|
||||||
|
deprecated config format.
|
||||||
|
- Fix a bug where vdirsyncer would ignore all but one character specified in
|
||||||
|
``unsafe_href_chars`` of :storage:`caldav` and :storage:`carddav`.
|
||||||
|
|
||||||
|
Version 0.3.2
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 3 December 2014*
|
||||||
|
|
||||||
|
- The current config format has been deprecated, and support for it will be
|
||||||
|
removed in version 0.4.0. Vdirsyncer warns about this now.
|
||||||
|
|
||||||
|
Version 0.3.1
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 24 November 2014*
|
||||||
|
|
||||||
|
- Fixed a bug where vdirsyncer would delete items if they're deleted on side A
|
||||||
|
but modified on side B. Instead vdirsyncer will now upload the new items to
|
||||||
|
side A. See :gh:`128`.
|
||||||
|
|
||||||
|
- Synchronization continues with the remaining pairs if one pair crashes, see
|
||||||
|
:gh:`121`.
|
||||||
|
|
||||||
|
- The ``processes`` config key is gone. There is now a ``--max-workers`` option
|
||||||
|
on the CLI which has a similar purpose. See :ghpr:`126`.
|
||||||
|
|
||||||
|
- The Read The Docs-theme is no longer required for building the docs. If it is
|
||||||
|
not installed, the default theme will be used. See :gh:`134`.
|
||||||
|
|
||||||
|
Version 0.3.0
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 20 September 2014*
|
||||||
|
|
||||||
|
- Add ``verify_fingerprint`` parameter to :storage:`http`, :storage:`caldav`
|
||||||
|
and :storage:`carddav`, see :gh:`99` and :ghpr:`106`.
|
||||||
|
|
||||||
|
- Add ``passwordeval`` parameter to :ref:`general_config`, see :gh:`108` and
|
||||||
|
:ghpr:`117`.
|
||||||
|
|
||||||
|
- Emit warnings (instead of exceptions) about certain invalid responses from
|
||||||
|
the server, see :gh:`113`. This is apparently required for compatibility
|
||||||
|
with Davmail.
|
||||||
|
|
||||||
|
Version 0.2.5
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 27 August 2014*
|
||||||
|
|
||||||
|
- Don't ask for the password of one server more than once and fix multiple
|
||||||
|
concurrency issues, see :gh:`101`.
|
||||||
|
|
||||||
|
- Better validation of DAV endpoints.
|
||||||
|
|
||||||
|
Version 0.2.4
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 18 August 2014*
|
||||||
|
|
||||||
|
- Include workaround for collection discovery with latest version of Radicale.
|
||||||
|
|
||||||
|
- Include metadata files such as the changelog or license in source
|
||||||
|
distribution, see :gh:`97` and :gh:`98`.
|
||||||
|
|
||||||
|
Version 0.2.3
|
||||||
|
=============
|
||||||
|
|
||||||
|
*released on 11 August 2014*
|
||||||
|
|
||||||
|
- Vdirsyncer now has a ``--version`` flag, see :gh:`92`.
|
||||||
|
|
||||||
|
- Fix a lot of bugs related to special characters in URLs, see :gh:`49`.
|
||||||
|
|
||||||
Version 0.2.2
|
Version 0.2.2
|
||||||
=============
|
=============
|
||||||
|
|
||||||
|
|
@ -12,16 +702,12 @@ Version 0.2.2
|
||||||
|
|
||||||
- Remove a security check that caused problems with special characters in DAV
|
- Remove a security check that caused problems with special characters in DAV
|
||||||
URLs and certain servers. On top of that, the security check was nonsensical.
|
URLs and certain servers. On top of that, the security check was nonsensical.
|
||||||
See issues `#87`_ and `#91`_.
|
See :gh:`87` and :gh:`91`.
|
||||||
|
|
||||||
- Change some errors to warnings, see issue `#88`_.
|
- Change some errors to warnings, see :gh:`88`.
|
||||||
|
|
||||||
- Improve collection autodiscovery for servers without full support.
|
- Improve collection autodiscovery for servers without full support.
|
||||||
|
|
||||||
.. _`#87`: https://github.com/untitaker/vdirsyncer/issues/87
|
|
||||||
.. _`#88`: https://github.com/untitaker/vdirsyncer/issues/88
|
|
||||||
.. _`#91`: https://github.com/untitaker/vdirsyncer/issues/91
|
|
||||||
|
|
||||||
Version 0.2.1
|
Version 0.2.1
|
||||||
=============
|
=============
|
||||||
|
|
||||||
|
|
@ -51,8 +737,7 @@ Version 0.2.0
|
||||||
instead of the proper etag would have been returned from the upload method.
|
instead of the proper etag would have been returned from the upload method.
|
||||||
vdirsyncer might do unnecessary copying when upgrading to this version.
|
vdirsyncer might do unnecessary copying when upgrading to this version.
|
||||||
|
|
||||||
- Add the storage :py:class:`vdirsyncer.storage.SingleFileStorage`. See issue
|
- Add the storage :storage:`singlefile`. See :gh:`48`.
|
||||||
`#48`_.
|
|
||||||
|
|
||||||
- The ``collections`` parameter for pair sections now accepts the special
|
- The ``collections`` parameter for pair sections now accepts the special
|
||||||
values ``from a`` and ``from b`` for automatically discovering collections.
|
values ``from a`` and ``from b`` for automatically discovering collections.
|
||||||
|
|
@ -61,8 +746,6 @@ Version 0.2.0
|
||||||
- The ``read_only`` parameter was added to storage sections. See
|
- The ``read_only`` parameter was added to storage sections. See
|
||||||
:ref:`storage_config`.
|
:ref:`storage_config`.
|
||||||
|
|
||||||
.. _`#48`: https://github.com/untitaker/vdirsyncer/issues/48
|
|
||||||
|
|
||||||
Version 0.1.5
|
Version 0.1.5
|
||||||
=============
|
=============
|
||||||
|
|
||||||
|
|
@ -77,6 +760,4 @@ Version 0.1.5
|
||||||
- vdirsyncer now doesn't necessarily need UIDs anymore for synchronization.
|
- vdirsyncer now doesn't necessarily need UIDs anymore for synchronization.
|
||||||
|
|
||||||
- vdirsyncer now aborts if one collection got completely emptied between
|
- vdirsyncer now aborts if one collection got completely emptied between
|
||||||
synchronizations. See `#42`_.
|
synchronizations. See :gh:`42`.
|
||||||
|
|
||||||
.. _`#42`: https://github.com/untitaker/vdirsyncer/issues/42
|
|
||||||
|
|
|
||||||
1
CODE_OF_CONDUCT.rst
Normal file
1
CODE_OF_CONDUCT.rst
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
See `the pimutils CoC <http://pimutils.org/coc>`_.
|
||||||
|
|
@ -1,28 +1,3 @@
|
||||||
* If you're reporting an issue with vdirsyncer:
|
Please see `the documentation
|
||||||
|
<https://vdirsyncer.pimutils.org/en/stable/contributing.html>`_ for how to
|
||||||
* Make sure you have the latest version by executing ``pip install --user
|
contribute to this project.
|
||||||
--upgrade vdirsyncer``.
|
|
||||||
|
|
||||||
* Include the Python version, your configuration, the commands you're
|
|
||||||
executing, and their output.
|
|
||||||
|
|
||||||
* Use ``--verbosity=DEBUG`` when including output from vdirsyncer.
|
|
||||||
|
|
||||||
* If you're suggesting a feature, keep in mind that vdirsyncer tries not to be
|
|
||||||
a full calendar or contacts client, but rather just the piece of software
|
|
||||||
that synchronizes all the data. If you're looking for a viewer for the
|
|
||||||
calendar data you've synced, `khal <https://github.com/geier/khal>`_ is what
|
|
||||||
you're looking for.
|
|
||||||
|
|
||||||
* If you're submitting pull requests:
|
|
||||||
|
|
||||||
* Make sure your tests pass on Travis.
|
|
||||||
|
|
||||||
* But not because you wrote too few tests.
|
|
||||||
|
|
||||||
* Write descriptive commit messages, mostly because i need to write a
|
|
||||||
changelog at some point. Use ``git rebase -i`` and ``git commit --ammend``
|
|
||||||
if needed.
|
|
||||||
|
|
||||||
* Add yourself to ``CONTRIBUTORS.rst`` and also add an entry to
|
|
||||||
``CHANGELOG.rst`` if you think your change is relevant to end users.
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
Contributors
|
|
||||||
============
|
|
||||||
|
|
||||||
In alphabetical order:
|
|
||||||
|
|
||||||
- Christian Geier
|
|
||||||
- Clément Mondon
|
|
||||||
- Julian Mehne
|
|
||||||
- Markus Unterwaditzer
|
|
||||||
12
ISSUE_TEMPLATE.md
Normal file
12
ISSUE_TEMPLATE.md
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
Before you submit bug reports: https://vdirsyncer.pimutils.org/en/stable/contributing.html
|
||||||
|
|
||||||
|
Things to include in your bugreport:
|
||||||
|
|
||||||
|
* Your vdirsyncer version
|
||||||
|
* If applicable, which server software (and which version) you're using
|
||||||
|
* Your Python version
|
||||||
|
* Your operating system
|
||||||
|
* Your config file
|
||||||
|
* Use `vdirsyncer -vdebug` for debug output. The output is sensitive, but
|
||||||
|
please attach at least the last few lines before the error (if applicable),
|
||||||
|
censored as necessary. This is almost always the most useful information.
|
||||||
46
LICENSE
46
LICENSE
|
|
@ -1,19 +1,33 @@
|
||||||
Copyright (c) 2014 Markus Unterwaditzer & contributors
|
Copyright (c) 2014-2020 by Markus Unterwaditzer & contributors. See
|
||||||
|
AUTHORS.rst for more details.
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
Some rights reserved.
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
|
||||||
of the Software, and to permit persons to whom the Software is furnished to do
|
|
||||||
so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
Redistribution and use in source and binary forms of the software as well
|
||||||
copies or substantial portions of the Software.
|
as documentation, with or without modification, are permitted provided
|
||||||
|
that the following conditions are met:
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
* Redistributions of source code must retain the above copyright
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
notice, this list of conditions and the following disclaimer.
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
* Redistributions in binary form must reproduce the above
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
copyright notice, this list of conditions and the following
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
disclaimer in the documentation and/or other materials provided
|
||||||
SOFTWARE.
|
with the distribution.
|
||||||
|
|
||||||
|
* The names of the contributors may not be used to endorse or
|
||||||
|
promote products derived from this software without specific
|
||||||
|
prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
|
||||||
|
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT
|
||||||
|
NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
|
||||||
|
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||||
|
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||||
|
DAMAGE.
|
||||||
|
|
|
||||||
9
MANIFEST.in
Normal file
9
MANIFEST.in
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
# setuptools-scm includes everything tracked by git
|
||||||
|
prune docker
|
||||||
|
prune scripts
|
||||||
|
prune tests/storage/servers
|
||||||
|
recursive-include tests/storage/servers/radicale *
|
||||||
|
recursive-include tests/storage/servers/skip *
|
||||||
|
|
||||||
|
prune docs/_build
|
||||||
|
global-exclude *.py[cdo] __pycache__ *.so *.pyd
|
||||||
63
Makefile
Normal file
63
Makefile
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
# See the documentation on how to run the tests:
|
||||||
|
# https://vdirsyncer.pimutils.org/en/stable/contributing.html
|
||||||
|
|
||||||
|
# Which DAV server to run the tests against (radicale, xandikos, skip, owncloud, nextcloud, ...)
|
||||||
|
export DAV_SERVER := skip
|
||||||
|
|
||||||
|
# release (install release versions of dependencies)
|
||||||
|
# development (install development versions of some of vdirsyncer's dependencies)
|
||||||
|
# or minimal (install oldest version of each dependency that is supported by vdirsyncer)
|
||||||
|
export REQUIREMENTS := release
|
||||||
|
|
||||||
|
# Set this to true if you run vdirsyncer's test as part of e.g. packaging.
|
||||||
|
export DETERMINISTIC_TESTS := false
|
||||||
|
|
||||||
|
# Assume to run in CI. Don't use this outside of a virtual machine. It will
|
||||||
|
# heavily "pollute" your system, such as attempting to install a new Python
|
||||||
|
# systemwide.
|
||||||
|
export CI := false
|
||||||
|
|
||||||
|
# Whether to generate coverage data while running tests.
|
||||||
|
export COVERAGE := $(CI)
|
||||||
|
|
||||||
|
# Variables below this line are not very interesting for getting started.
|
||||||
|
|
||||||
|
CODECOV_PATH = /tmp/codecov.sh
|
||||||
|
|
||||||
|
all:
|
||||||
|
$(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation)
|
||||||
|
|
||||||
|
ci-test:
|
||||||
|
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
||||||
|
pytest --cov vdirsyncer --cov-append tests/unit/ tests/system/
|
||||||
|
bash $(CODECOV_PATH) -c
|
||||||
|
|
||||||
|
ci-test-storage:
|
||||||
|
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
||||||
|
set -ex; \
|
||||||
|
for server in $(DAV_SERVER); do \
|
||||||
|
DAV_SERVER=$$server pytest --cov vdirsyncer --cov-append tests/storage; \
|
||||||
|
done
|
||||||
|
bash $(CODECOV_PATH) -c
|
||||||
|
|
||||||
|
check:
|
||||||
|
ruff check
|
||||||
|
ruff format --diff
|
||||||
|
#mypy vdirsyncer
|
||||||
|
|
||||||
|
release-deb:
|
||||||
|
sh scripts/release-deb.sh debian jessie
|
||||||
|
sh scripts/release-deb.sh debian stretch
|
||||||
|
sh scripts/release-deb.sh ubuntu trusty
|
||||||
|
sh scripts/release-deb.sh ubuntu xenial
|
||||||
|
sh scripts/release-deb.sh ubuntu zesty
|
||||||
|
|
||||||
|
install-dev:
|
||||||
|
pip install -U pip setuptools wheel
|
||||||
|
pip install -e '.[test,check,docs]'
|
||||||
|
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
|
||||||
|
pip install pyproject-dependencies && \
|
||||||
|
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
|
||||||
|
fi
|
||||||
|
|
||||||
|
.PHONY: docs
|
||||||
80
README.rst
80
README.rst
|
|
@ -2,37 +2,73 @@
|
||||||
vdirsyncer
|
vdirsyncer
|
||||||
==========
|
==========
|
||||||
|
|
||||||
vdirsyncer synchronizes your calendars and addressbooks between two storages.
|
.. image:: https://builds.sr.ht/~whynothugo/vdirsyncer.svg
|
||||||
The supported storages are CalDAV, CardDAV, arbitrary HTTP resources and `some
|
:target: https://builds.sr.ht/~whynothugo/vdirsyncer
|
||||||
more <https://vdirsyncer.readthedocs.org/en/latest/api.html#storages>`_.
|
:alt: CI status
|
||||||
|
|
||||||
It aims to be for CalDAV and CardDAV what `OfflineIMAP
|
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=main
|
||||||
<http://offlineimap.org/>`_ is for IMAP.
|
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=main
|
||||||
|
:alt: Codecov coverage report
|
||||||
|
|
||||||
.. image:: https://travis-ci.org/untitaker/vdirsyncer.png?branch=master
|
.. image:: https://readthedocs.org/projects/vdirsyncer/badge/
|
||||||
:target: https://travis-ci.org/untitaker/vdirsyncer
|
:target: https://vdirsyncer.rtfd.org/
|
||||||
|
:alt: documentation
|
||||||
|
|
||||||
.. image:: https://coveralls.io/repos/untitaker/vdirsyncer/badge.png?branch=master
|
.. image:: https://img.shields.io/pypi/v/vdirsyncer.svg
|
||||||
:target: https://coveralls.io/r/untitaker/vdirsyncer?branch=master
|
:target: https://pypi.python.org/pypi/vdirsyncer
|
||||||
|
:alt: version on pypi
|
||||||
|
|
||||||
How to use
|
.. image:: https://img.shields.io/badge/deb-packagecloud.io-844fec.svg
|
||||||
==========
|
:target: https://packagecloud.io/pimutils/vdirsyncer
|
||||||
|
:alt: Debian packages
|
||||||
|
|
||||||
vdirsyncer requires Python >= 2.7 or Python >= 3.3.
|
.. image:: https://img.shields.io/pypi/l/vdirsyncer.svg
|
||||||
|
:target: https://github.com/pimutils/vdirsyncer/blob/main/LICENCE
|
||||||
|
:alt: licence: BSD
|
||||||
|
|
||||||
As all Python packages, vdirsyncer can be installed with ``pip``::
|
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
||||||
|
- `Source code <https://github.com/pimutils/vdirsyncer>`_
|
||||||
|
|
||||||
pip install --user vdirsyncer
|
Vdirsyncer is a command-line tool for synchronizing calendars and addressbooks
|
||||||
|
between a variety of servers and the local filesystem. The most popular usecase
|
||||||
|
is to synchronize a server with a local folder and use a set of other programs_
|
||||||
|
to change the local events and contacts. Vdirsyncer can then synchronize those
|
||||||
|
changes back to the server.
|
||||||
|
|
||||||
Then copy ``example.cfg`` to ``~/.vdirsyncer/config`` and edit it.
|
However, vdirsyncer is not limited to synchronizing between clients and
|
||||||
|
servers. It can also be used to synchronize calendars and/or addressbooks
|
||||||
|
between two servers directly.
|
||||||
|
|
||||||
Run ``vdirsyncer --help`` and check out `the documentation
|
It aims to be for calendars and contacts what `OfflineIMAP
|
||||||
<https://vdirsyncer.readthedocs.org/>`_.
|
<https://www.offlineimap.org/>`_ is for emails.
|
||||||
|
|
||||||
How to run the tests
|
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
|
||||||
====================
|
|
||||||
|
|
||||||
::
|
Links of interest
|
||||||
|
=================
|
||||||
|
|
||||||
sh build.sh install
|
* Check out `the tutorial
|
||||||
sh build.sh run
|
<https://vdirsyncer.pimutils.org/en/stable/tutorial.html>`_ for basic
|
||||||
|
usage.
|
||||||
|
|
||||||
|
* `Contact information
|
||||||
|
<https://vdirsyncer.pimutils.org/en/stable/contact.html>`_
|
||||||
|
|
||||||
|
* `How to contribute to this project
|
||||||
|
<https://vdirsyncer.pimutils.org/en/stable/contributing.html>`_
|
||||||
|
|
||||||
|
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
|
||||||
|
|
||||||
|
Dockerized
|
||||||
|
=================
|
||||||
|
If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
|
||||||
|
Docker environment, you can check out the following GitHub Repository:
|
||||||
|
|
||||||
|
* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
|
||||||
|
|
||||||
|
Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
|
||||||
|
|
||||||
|
License
|
||||||
|
=======
|
||||||
|
|
||||||
|
Licensed under the 3-clause BSD license, see ``LICENSE``.
|
||||||
|
|
|
||||||
90
build.sh
90
build.sh
|
|
@ -1,90 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
set -e
|
|
||||||
_davserver() {
|
|
||||||
# Maybe tmpfs is mounted on /tmp/, can't harm anyway.
|
|
||||||
if [ ! -d $TESTSERVER_BASE$1/ ]; then
|
|
||||||
git clone --depth=1 \
|
|
||||||
https://github.com/vdirsyncer/$1-testserver.git \
|
|
||||||
/tmp/$1-testserver
|
|
||||||
ln -s /tmp/$1-testserver $TESTSERVER_BASE$1
|
|
||||||
fi
|
|
||||||
(cd $TESTSERVER_BASE$1 && sh install.sh)
|
|
||||||
}
|
|
||||||
|
|
||||||
install_build_tests() {
|
|
||||||
$PIP_INSTALL \
|
|
||||||
coverage \
|
|
||||||
pytest \
|
|
||||||
pytest-xprocess
|
|
||||||
_davserver $DAV_SERVER
|
|
||||||
if [ "$TRAVIS" = "true" ]; then
|
|
||||||
export CFLAGS=-O0 # speed up builds of packages which don't have wheels
|
|
||||||
$PIP_INSTALL --upgrade pip
|
|
||||||
$PIP_INSTALL wheel
|
|
||||||
PIP_INSTALL="pip install --use-wheel --find-links=http://travis-wheels.unterwaditzer.net/wheels/"
|
|
||||||
$PIP_INSTALL coveralls
|
|
||||||
fi
|
|
||||||
|
|
||||||
$PIP_INSTALL --editable .
|
|
||||||
}
|
|
||||||
|
|
||||||
run_build_tests() {
|
|
||||||
if [ "$TRAVIS" = "true" ]; then
|
|
||||||
coverage run --source=vdirsyncer/,tests/ --module pytest
|
|
||||||
coveralls
|
|
||||||
else
|
|
||||||
py.test
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
install_build_style() {
|
|
||||||
$PIP_INSTALL flake8
|
|
||||||
}
|
|
||||||
|
|
||||||
run_build_style() {
|
|
||||||
flake8 vdirsyncer tests
|
|
||||||
! git grep -il syncroniz $(ls | grep -v 'build.sh')
|
|
||||||
}
|
|
||||||
|
|
||||||
install_build_docs() {
|
|
||||||
$PIP_INSTALL sphinx sphinx_rtd_theme
|
|
||||||
$PIP_INSTALL -e .
|
|
||||||
}
|
|
||||||
|
|
||||||
run_build_docs() {
|
|
||||||
cd docs
|
|
||||||
make html
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
[ -n "$BUILD" ] || BUILD=tests
|
|
||||||
[ -n "$DAV_SERVER" ] || DAV_SERVER=radicale
|
|
||||||
[ -n "$REQUIREMENTS" ] || REQUIREMENTS=release
|
|
||||||
COMMAND="$1"
|
|
||||||
if [ -z "$COMMAND" ]; then
|
|
||||||
echo "Usage:"
|
|
||||||
echo "build.sh run # run build"
|
|
||||||
echo "build.sh install # install dependencies"
|
|
||||||
echo
|
|
||||||
echo "Environment variable combinations:"
|
|
||||||
echo "BUILD=tests # install and run tests"
|
|
||||||
echo " # (using Radicale, see .travis.yml for more)"
|
|
||||||
echo "BUILD=style # install and run stylechecker (flake8)"
|
|
||||||
echo "BUILD=docs # install sphinx and build HTML docs"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
TESTSERVER_BASE=./tests/storage/dav/servers/
|
|
||||||
|
|
||||||
install_builds() {
|
|
||||||
echo "Installing for $BUILD"
|
|
||||||
PIP_INSTALL="pip install"
|
|
||||||
install_build_$BUILD
|
|
||||||
}
|
|
||||||
|
|
||||||
run_builds() {
|
|
||||||
echo "Running $BUILD"
|
|
||||||
run_build_$BUILD
|
|
||||||
}
|
|
||||||
|
|
||||||
${COMMAND}_builds
|
|
||||||
70
config.example
Normal file
70
config.example
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
# An example configuration for vdirsyncer.
|
||||||
|
#
|
||||||
|
# Move it to ~/.vdirsyncer/config or ~/.config/vdirsyncer/config and edit it.
|
||||||
|
# Run `vdirsyncer --help` for CLI usage.
|
||||||
|
#
|
||||||
|
# Optional parameters are commented out.
|
||||||
|
# This file doesn't document all available parameters, see
|
||||||
|
# http://vdirsyncer.pimutils.org/ for the rest of them.
|
||||||
|
|
||||||
|
[general]
|
||||||
|
# A folder where vdirsyncer can store some metadata about each pair.
|
||||||
|
status_path = "~/.vdirsyncer/status/"
|
||||||
|
|
||||||
|
# CARDDAV
|
||||||
|
[pair bob_contacts]
|
||||||
|
# A `[pair <name>]` block defines two storages `a` and `b` that should be
|
||||||
|
# synchronized. The definition of these storages follows in `[storage <name>]`
|
||||||
|
# blocks. This is similar to accounts in OfflineIMAP.
|
||||||
|
a = "bob_contacts_local"
|
||||||
|
b = "bob_contacts_remote"
|
||||||
|
|
||||||
|
# Synchronize all collections that can be found.
|
||||||
|
# You need to run `vdirsyncer discover` if new calendars/addressbooks are added
|
||||||
|
# on the server.
|
||||||
|
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
|
||||||
|
# Synchronize the "display name" property into a local file (~/.contacts/displayname).
|
||||||
|
metadata = ["displayname"]
|
||||||
|
|
||||||
|
# To resolve a conflict the following values are possible:
|
||||||
|
# `null` - abort when collisions occur (default)
|
||||||
|
# `"a wins"` - assume a's items to be more up-to-date
|
||||||
|
# `"b wins"` - assume b's items to be more up-to-date
|
||||||
|
#conflict_resolution = null
|
||||||
|
|
||||||
|
[storage bob_contacts_local]
|
||||||
|
# A storage references actual data on a remote server or on the local disk.
|
||||||
|
# Similar to repositories in OfflineIMAP.
|
||||||
|
type = "filesystem"
|
||||||
|
path = "~/.contacts/"
|
||||||
|
fileext = ".vcf"
|
||||||
|
|
||||||
|
[storage bob_contacts_remote]
|
||||||
|
type = "carddav"
|
||||||
|
url = "https://owncloud.example.com/remote.php/carddav/"
|
||||||
|
#username =
|
||||||
|
# The password can also be fetched from the system password storage, netrc or a
|
||||||
|
# custom command. See http://vdirsyncer.pimutils.org/en/stable/keyring.html
|
||||||
|
#password =
|
||||||
|
|
||||||
|
# CALDAV
|
||||||
|
[pair bob_calendar]
|
||||||
|
a = "bob_calendar_local"
|
||||||
|
b = "bob_calendar_remote"
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
|
||||||
|
# Calendars also have a color property
|
||||||
|
metadata = ["displayname", "color"]
|
||||||
|
|
||||||
|
[storage bob_calendar_local]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "~/.calendars/"
|
||||||
|
fileext = ".ics"
|
||||||
|
|
||||||
|
[storage bob_calendar_remote]
|
||||||
|
type = "caldav"
|
||||||
|
url = "https://owncloud.example.com/remote.php/caldav/"
|
||||||
|
#username =
|
||||||
|
#password =
|
||||||
75
contrib/conflict_resolution/resolve_interactively.py
Executable file
75
contrib/conflict_resolution/resolve_interactively.py
Executable file
|
|
@ -0,0 +1,75 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Ask user to resolve a vdirsyncer sync conflict interactively.
|
||||||
|
|
||||||
|
Needs a way to ask the user.
|
||||||
|
The use of https://apps.kde.org/kdialog/ for GNU/Linix is hardcoded.
|
||||||
|
|
||||||
|
Depends on python>3.5 and KDialog.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
Ensure the file executable and use it in the vdirsyncer.conf file, e.g.
|
||||||
|
|
||||||
|
conflict_resolution = ["command", "/home/bern/vdirsyncer/resolve_interactively.py"]
|
||||||
|
|
||||||
|
This file is Free Software under the following license:
|
||||||
|
SPDX-License-Identifier: BSD-3-Clause
|
||||||
|
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
|
||||||
|
Author: <bernhard.reiter@intevation.de>
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
KDIALOG = "/usr/bin/kdialog"
|
||||||
|
|
||||||
|
SUMMARY_PATTERN = re.compile("^(SUMMARY:.*)$", re.MULTILINE)
|
||||||
|
|
||||||
|
|
||||||
|
def get_summary(icalendar_text: str):
|
||||||
|
"""Get the first SUMMARY: line from an iCalendar text.
|
||||||
|
|
||||||
|
Do not care about the line being continued.
|
||||||
|
"""
|
||||||
|
match = re.search(SUMMARY_PATTERN, icalendar_text)
|
||||||
|
return match[1]
|
||||||
|
|
||||||
|
|
||||||
|
def main(ical1_filename, ical2_filename):
|
||||||
|
ical1 = ical1_filename.read_text()
|
||||||
|
ical2 = ical2_filename.read_text()
|
||||||
|
|
||||||
|
additional_args = ["--yes-label", "take first"] # return code == 0
|
||||||
|
additional_args += ["--no-label", "take second"] # return code == 1
|
||||||
|
additional_args += ["--cancel-label", "do not resolve"] # return code == 2
|
||||||
|
|
||||||
|
r = subprocess.run(
|
||||||
|
args=[
|
||||||
|
KDIALOG,
|
||||||
|
"--warningyesnocancel",
|
||||||
|
"There was a sync conflict, do you prefer the first entry: \n"
|
||||||
|
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
|
||||||
|
"or the second entry:\n"
|
||||||
|
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
|
||||||
|
*additional_args,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
if r.returncode == 2:
|
||||||
|
# cancel was pressed
|
||||||
|
return # shall lead to items not changed, because not copied
|
||||||
|
|
||||||
|
if r.returncode == 0:
|
||||||
|
# we want to take the first item, so overwrite the second
|
||||||
|
ical2_filename.write_text(ical1)
|
||||||
|
else: # r.returncode == 1, we want the second item, so overwrite the first
|
||||||
|
ical1_filename.write_text(ical2)
|
||||||
|
|
||||||
|
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
sys.stdout.write(__doc__)
|
||||||
|
else:
|
||||||
|
main(Path(sys.argv[1]), Path(sys.argv[2]))
|
||||||
9
contrib/vdirsyncer.service
Normal file
9
contrib/vdirsyncer.service
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
[Unit]
|
||||||
|
Description=Synchronize calendars and contacts
|
||||||
|
Documentation=https://vdirsyncer.readthedocs.org/
|
||||||
|
StartLimitBurst=2
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
ExecStart=/usr/bin/vdirsyncer sync
|
||||||
|
RuntimeMaxSec=3m
|
||||||
|
Restart=on-failure
|
||||||
10
contrib/vdirsyncer.timer
Normal file
10
contrib/vdirsyncer.timer
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
[Unit]
|
||||||
|
Description=Synchronize vdirs
|
||||||
|
|
||||||
|
[Timer]
|
||||||
|
OnBootSec=5m
|
||||||
|
OnUnitActiveSec=15m
|
||||||
|
AccuracySec=5m
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=timers.target
|
||||||
0
docs/_static/.gitkeep
vendored
Normal file
0
docs/_static/.gitkeep
vendored
Normal file
113
docs/api.rst
113
docs/api.rst
|
|
@ -1,113 +0,0 @@
|
||||||
===
|
|
||||||
API
|
|
||||||
===
|
|
||||||
|
|
||||||
Config Parameters
|
|
||||||
=================
|
|
||||||
|
|
||||||
.. _general_config:
|
|
||||||
|
|
||||||
General Section
|
|
||||||
---------------
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[general]
|
|
||||||
status_path = ...
|
|
||||||
#processes = 0
|
|
||||||
|
|
||||||
|
|
||||||
- ``status_path``: A directory where vdirsyncer will store metadata for the
|
|
||||||
next sync. The data is needed to determine whether a new item means it has
|
|
||||||
been added on one side or deleted on the other.
|
|
||||||
|
|
||||||
- ``processes``: Optional, defines the amount of maximal connections to use for
|
|
||||||
syncing. By default there is no limit, which means vdirsyncer will try to
|
|
||||||
open a connection for each collection to be synced. The value ``0`` is
|
|
||||||
ignored. Setting this to ``1`` will only synchronize one collection at a
|
|
||||||
time.
|
|
||||||
|
|
||||||
While this often greatly increases performance, you might have valid reasons
|
|
||||||
to set this to a smaller number. For example, your DAV server running on a
|
|
||||||
Raspberry Pi is so slow that multiple connections don't help much, since the
|
|
||||||
CPU and not the network is the bottleneck.
|
|
||||||
|
|
||||||
.. _pair_config:
|
|
||||||
|
|
||||||
Pair Section
|
|
||||||
------------
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[pair pair_name]
|
|
||||||
a = ...
|
|
||||||
b = ...
|
|
||||||
#conflict_resolution = ...
|
|
||||||
|
|
||||||
- ``a`` and ``b`` reference the storages to sync by their names.
|
|
||||||
|
|
||||||
- ``collections``: Optional, a comma-separated list of collections to
|
|
||||||
synchronize. If this parameter is omitted, it is assumed the storages are
|
|
||||||
already directly pointing to one collection each. Specifying a collection
|
|
||||||
multiple times won't make vdirsyncer sync that collection more than once.
|
|
||||||
|
|
||||||
Furthermore, there are the special values ``from a`` and ``from b``, which
|
|
||||||
tell vdirsyncer to try autodiscovery on a specific storage::
|
|
||||||
|
|
||||||
collections = from b,foo,bar # all in storage b + "foo" + "bar"
|
|
||||||
collections = from b,from a # all in storage a + all in storage b
|
|
||||||
|
|
||||||
- ``conflict_resolution``: Optional, define how conflicts should be handled. A
|
|
||||||
conflict occurs when one item changed on both sides since the last sync.
|
|
||||||
Valid values are ``a wins`` and ``b wins``. By default, vdirsyncer will show
|
|
||||||
an error and abort the synchronization.
|
|
||||||
|
|
||||||
.. _storage_config:
|
|
||||||
|
|
||||||
Storage Section
|
|
||||||
---------------
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
[storage storage_name]
|
|
||||||
type = ...
|
|
||||||
|
|
||||||
- ``type`` defines which kind of storage is defined. See :ref:`storages`.
|
|
||||||
|
|
||||||
- ``read_only`` defines whether the storage should be regarded as a read-only
|
|
||||||
storage. The value ``True`` means synchronization will discard any changes
|
|
||||||
made to the other side. The value ``False`` implies normal 2-way
|
|
||||||
synchronization.
|
|
||||||
|
|
||||||
- Any further parameters are passed on to the storage class.
|
|
||||||
|
|
||||||
.. _storages:
|
|
||||||
|
|
||||||
Supported Storages
|
|
||||||
==================
|
|
||||||
|
|
||||||
.. module:: vdirsyncer.storage
|
|
||||||
|
|
||||||
Read-write storages
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
These storages generally support reading and changing of their items. Their
|
|
||||||
default value for ``read_only`` is ``False``, but can be set to ``True`` if
|
|
||||||
wished.
|
|
||||||
|
|
||||||
.. autoclass:: CaldavStorage
|
|
||||||
|
|
||||||
.. autoclass:: CarddavStorage
|
|
||||||
|
|
||||||
.. autoclass:: FilesystemStorage
|
|
||||||
|
|
||||||
.. autoclass:: SingleFileStorage
|
|
||||||
|
|
||||||
Read-only storages
|
|
||||||
------------------
|
|
||||||
|
|
||||||
These storages don't support writing of their items, consequently ``read_only``
|
|
||||||
is set to ``True`` by default. Changing ``read_only`` to ``False`` on them
|
|
||||||
leads to an error.
|
|
||||||
|
|
||||||
.. autoclass:: HttpStorage
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
.. include:: ../CHANGELOG.rst
|
.. include:: ../CHANGELOG.rst
|
||||||
|
|
|
||||||
122
docs/conf.py
122
docs/conf.py
|
|
@ -1,58 +1,106 @@
|
||||||
# -*- coding: utf-8 -*-
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import datetime
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
import pkg_resources
|
from pkg_resources import get_distribution
|
||||||
|
|
||||||
extensions = ['sphinx.ext.autodoc']
|
extensions = ["sphinx.ext.autodoc"]
|
||||||
|
|
||||||
templates_path = ['_templates']
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
source_suffix = '.rst'
|
source_suffix = ".rst"
|
||||||
master_doc = 'index'
|
master_doc = "index"
|
||||||
|
|
||||||
project = u'vdirsyncer'
|
project = "vdirsyncer"
|
||||||
copyright = u'2014, Markus Unterwaditzer & contributors'
|
copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
|
||||||
|
datetime.date.today().strftime("%Y")
|
||||||
|
)
|
||||||
|
|
||||||
|
release = get_distribution("vdirsyncer").version
|
||||||
|
version = ".".join(release.split(".")[:2]) # The short X.Y version.
|
||||||
|
|
||||||
|
rst_epilog = f".. |vdirsyncer_version| replace:: {release}"
|
||||||
|
|
||||||
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
|
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# The full version, including alpha/beta/rc tags.
|
|
||||||
release = pkg_resources.require('vdirsyncer')[0].version
|
|
||||||
except pkg_resources.DistributionNotFound:
|
|
||||||
print('To build the documentation, the distribution information of'
|
|
||||||
'vdirsyncer has to be available. Run "setup.py develop" to do'
|
|
||||||
'this.')
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
version = '.'.join(release.split('.')[:2]) # The short X.Y version.
|
|
||||||
|
|
||||||
exclude_patterns = ['_build']
|
|
||||||
|
|
||||||
pygments_style = 'sphinx'
|
|
||||||
|
|
||||||
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
|
||||||
|
|
||||||
if not on_rtd:
|
|
||||||
import sphinx_rtd_theme
|
import sphinx_rtd_theme
|
||||||
html_theme = 'sphinx_rtd_theme'
|
|
||||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
|
||||||
|
|
||||||
html_static_path = ['_static']
|
html_theme = "sphinx_rtd_theme"
|
||||||
htmlhelp_basename = 'vdirsyncerdoc'
|
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||||
|
except ImportError:
|
||||||
|
html_theme = "default"
|
||||||
|
if not on_rtd:
|
||||||
|
print("-" * 74)
|
||||||
|
print("Warning: sphinx-rtd-theme not installed, building with default theme.")
|
||||||
|
print("-" * 74)
|
||||||
|
|
||||||
|
html_static_path = ["_static"]
|
||||||
|
htmlhelp_basename = "vdirsyncerdoc"
|
||||||
|
|
||||||
latex_elements = {}
|
latex_elements = {}
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
('index', 'vdirsyncer.tex', u'vdirsyncer Documentation',
|
(
|
||||||
u'Markus Unterwaditzer', 'manual'),
|
"index",
|
||||||
|
"vdirsyncer.tex",
|
||||||
|
"vdirsyncer Documentation",
|
||||||
|
"Markus Unterwaditzer",
|
||||||
|
"manual",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
man_pages = [
|
man_pages = [
|
||||||
('index', 'vdirsyncer', u'vdirsyncer Documentation',
|
("index", "vdirsyncer", "vdirsyncer Documentation", ["Markus Unterwaditzer"], 1)
|
||||||
[u'Markus Unterwaditzer'], 1)
|
|
||||||
]
|
]
|
||||||
|
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
('index', 'vdirsyncer', u'vdirsyncer Documentation',
|
(
|
||||||
u'Markus Unterwaditzer', 'vdirsyncer',
|
"index",
|
||||||
'One line description of project.', 'Miscellaneous'),
|
"vdirsyncer",
|
||||||
|
"vdirsyncer Documentation",
|
||||||
|
"Markus Unterwaditzer",
|
||||||
|
"vdirsyncer",
|
||||||
|
"Synchronize calendars and contacts.",
|
||||||
|
"Miscellaneous",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def github_issue_role(name, rawtext, text, lineno, inliner, options=None, content=()):
|
||||||
|
options = options or {}
|
||||||
|
try:
|
||||||
|
issue_num = int(text)
|
||||||
|
if issue_num <= 0:
|
||||||
|
raise ValueError
|
||||||
|
except ValueError:
|
||||||
|
msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
|
||||||
|
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||||
|
return [prb], [msg]
|
||||||
|
|
||||||
|
from docutils import nodes
|
||||||
|
|
||||||
|
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
|
||||||
|
link = "{}/{}/{}".format(
|
||||||
|
PROJECT_HOME, "issues" if name == "gh" else "pull", issue_num
|
||||||
|
)
|
||||||
|
linktext = ("issue #{}" if name == "gh" else "pull request #{}").format(issue_num)
|
||||||
|
node = nodes.reference(rawtext, linktext, refuri=link, **options)
|
||||||
|
return [node], []
|
||||||
|
|
||||||
|
|
||||||
|
def setup(app):
|
||||||
|
from sphinx.domains.python import PyObject
|
||||||
|
|
||||||
|
app.add_object_type(
|
||||||
|
"storage",
|
||||||
|
"storage",
|
||||||
|
"pair: %s; storage",
|
||||||
|
doc_field_types=PyObject.doc_field_types,
|
||||||
|
)
|
||||||
|
app.add_role("gh", github_issue_role)
|
||||||
|
app.add_role("ghpr", github_issue_role)
|
||||||
|
|
|
||||||
526
docs/config.rst
Normal file
526
docs/config.rst
Normal file
|
|
@ -0,0 +1,526 @@
|
||||||
|
=========================
|
||||||
|
Full configuration manual
|
||||||
|
=========================
|
||||||
|
|
||||||
|
Vdirsyncer uses an ini-like format for storing its configuration. All values
|
||||||
|
are JSON, invalid JSON will get interpreted as string::
|
||||||
|
|
||||||
|
x = "foo" # String
|
||||||
|
x = foo # Shorthand for same string
|
||||||
|
|
||||||
|
x = 42 # Integer
|
||||||
|
|
||||||
|
x = ["a", "b", "c"] # List of strings
|
||||||
|
|
||||||
|
x = true # Boolean
|
||||||
|
x = false
|
||||||
|
|
||||||
|
x = null # Also known as None
|
||||||
|
|
||||||
|
|
||||||
|
.. _general_config:
|
||||||
|
|
||||||
|
General Section
|
||||||
|
===============
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[general]
|
||||||
|
status_path = ...
|
||||||
|
|
||||||
|
|
||||||
|
- ``status_path``: A directory where vdirsyncer will store some additional data
|
||||||
|
for the next sync.
|
||||||
|
|
||||||
|
The data is needed to determine whether a new item means it has been added on
|
||||||
|
one side or deleted on the other. Relative paths will be interpreted as
|
||||||
|
relative to the configuration file's directory.
|
||||||
|
|
||||||
|
See `A simple synchronization algorithm
|
||||||
|
<https://unterwaditzer.net/2016/sync-algorithm.html>`_ for what exactly is in
|
||||||
|
there.
|
||||||
|
|
||||||
|
.. _pair_config:
|
||||||
|
|
||||||
|
Pair Section
|
||||||
|
============
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[pair pair_name]
|
||||||
|
a = ...
|
||||||
|
b = ...
|
||||||
|
#collections = null
|
||||||
|
#conflict_resolution = null
|
||||||
|
|
||||||
|
- Pair names can consist of any alphanumeric characters and the underscore.
|
||||||
|
|
||||||
|
- ``a`` and ``b`` reference the storages to sync by their names.
|
||||||
|
|
||||||
|
- ``collections``: A list of collections to synchronize when ``vdirsyncer
|
||||||
|
sync`` is executed. See also :ref:`collections_tutorial`.
|
||||||
|
|
||||||
|
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
|
||||||
|
autodiscovery on a specific storage. It means all the collections on side A /
|
||||||
|
side B.
|
||||||
|
|
||||||
|
If the collection you want to sync doesn't have the same name on each side,
|
||||||
|
you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
|
||||||
|
This will synchronize the collection ``name_a`` on side A with the collection
|
||||||
|
``name_b`` on side B. The ``config_name`` will be used for representation in
|
||||||
|
CLI arguments and logging.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all
|
||||||
|
the collections from side B, and also the collections named "foo" and "bar".
|
||||||
|
|
||||||
|
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
|
||||||
|
existing collections on either side.
|
||||||
|
|
||||||
|
- ``collections = [["bar", "bar_a", "bar_b"], "foo"]`` makes vdirsyncer
|
||||||
|
synchronize ``bar_a`` from side A with ``bar_b`` from side B, and also
|
||||||
|
synchronize ``foo`` on both sides with each other.
|
||||||
|
|
||||||
|
- ``conflict_resolution``: Optional, define how conflicts should be handled. A
|
||||||
|
conflict occurs when one item (event, task) changed on both sides since the
|
||||||
|
last sync. See also :ref:`conflict_resolution_tutorial`.
|
||||||
|
|
||||||
|
Valid values are:
|
||||||
|
|
||||||
|
- ``null``, where an error is shown and no changes are done.
|
||||||
|
- ``"a wins"`` and ``"b wins"``, where the whole item is taken from one side.
|
||||||
|
- ``["command", "vimdiff"]``: ``vimdiff <a> <b>`` will be called where
|
||||||
|
``<a>`` and ``<b>`` are temporary files that contain the item of each side
|
||||||
|
respectively. The files need to be exactly the same when the command
|
||||||
|
returns.
|
||||||
|
|
||||||
|
- ``vimdiff`` can be replaced with any other command. For example, in POSIX
|
||||||
|
``["command", "cp"]`` is equivalent to ``"a wins"``.
|
||||||
|
- Additional list items will be forwarded as arguments. For example,
|
||||||
|
``["command", "vimdiff", "--noplugin"]`` runs ``vimdiff --noplugin``.
|
||||||
|
|
||||||
|
Vdirsyncer never attempts to "automatically merge" the two items.
|
||||||
|
|
||||||
|
.. _partial_sync_def:
|
||||||
|
|
||||||
|
- ``partial_sync``: Assume A is read-only, B not. If you change items on B,
|
||||||
|
vdirsyncer can't sync the changes to A. What should happen instead?
|
||||||
|
|
||||||
|
- ``error``: An error is shown.
|
||||||
|
- ``ignore``: The change is ignored. However: Events deleted in B still
|
||||||
|
reappear if they're updated in A.
|
||||||
|
- ``revert`` (default): The change is reverted on next sync.
|
||||||
|
|
||||||
|
See also :ref:`partial_sync_tutorial`.
|
||||||
|
|
||||||
|
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
|
||||||
|
metasync`` is executed. Example::
|
||||||
|
|
||||||
|
metadata = ["color", "displayname", "description", "order"]
|
||||||
|
|
||||||
|
This synchronizes the following properties:
|
||||||
|
|
||||||
|
- color: ``http://apple.com/ns/ical/:calendar-color``
|
||||||
|
- displayname: ``DAV:displayname``
|
||||||
|
- description: ``CalDAV:calendar-description`` and ``CardDAV:addressbook-description``
|
||||||
|
- order: ``http://apple.com/ns/ical/:calendar-order``
|
||||||
|
|
||||||
|
The ``conflict_resolution`` parameter applies for these properties too.
|
||||||
|
|
||||||
|
.. _implicit_def:
|
||||||
|
|
||||||
|
- ``implicit``: Opt into implicitly creating collections. Example::
|
||||||
|
|
||||||
|
implicit = "create"
|
||||||
|
|
||||||
|
When set to "create", missing collections are automatically created on both
|
||||||
|
sides during sync without prompting the user. This simplifies workflows where
|
||||||
|
all collections should be synchronized bidirectionally.
|
||||||
|
|
||||||
|
.. _storage_config:
|
||||||
|
|
||||||
|
Storage Section
|
||||||
|
===============
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage storage_name]
|
||||||
|
type = ...
|
||||||
|
|
||||||
|
- Storage names can consist of any alphanumeric characters and the underscore.
|
||||||
|
|
||||||
|
- ``type`` defines which kind of storage is defined. See :ref:`storages`.
|
||||||
|
|
||||||
|
- ``read_only`` defines whether the storage should be regarded as a read-only
|
||||||
|
storage. The value ``true`` means synchronization will discard any changes
|
||||||
|
made to the other side. The value ``false`` implies normal 2-way
|
||||||
|
synchronization.
|
||||||
|
|
||||||
|
- Any further parameters are passed on to the storage class.
|
||||||
|
|
||||||
|
.. _storages:
|
||||||
|
|
||||||
|
Supported Storages
|
||||||
|
------------------
|
||||||
|
|
||||||
|
CalDAV and CardDAV
|
||||||
|
++++++++++++++++++
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Please also see :ref:`supported-servers`, as some servers may not work
|
||||||
|
well.
|
||||||
|
|
||||||
|
.. storage:: caldav
|
||||||
|
|
||||||
|
CalDAV.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage example_for_caldav]
|
||||||
|
type = "caldav"
|
||||||
|
#start_date = null
|
||||||
|
#end_date = null
|
||||||
|
#item_types = []
|
||||||
|
url = "..."
|
||||||
|
#username = ""
|
||||||
|
#password = ""
|
||||||
|
#verify = /path/to/custom_ca.pem
|
||||||
|
#auth = null
|
||||||
|
#useragent = "vdirsyncer/0.16.4"
|
||||||
|
#verify_fingerprint = null
|
||||||
|
#auth_cert = null
|
||||||
|
|
||||||
|
You can set a timerange to synchronize with the parameters ``start_date``
|
||||||
|
and ``end_date``. Inside those parameters, you can use any Python
|
||||||
|
expression to return a valid :py:class:`datetime.datetime` object. For
|
||||||
|
example, the following would synchronize the timerange from one year in the
|
||||||
|
past to one year in the future::
|
||||||
|
|
||||||
|
start_date = "datetime.now() - timedelta(days=365)"
|
||||||
|
end_date = "datetime.now() + timedelta(days=365)"
|
||||||
|
|
||||||
|
Either both or none have to be specified. The default is to synchronize
|
||||||
|
everything.
|
||||||
|
|
||||||
|
You can set ``item_types`` to restrict the *kind of items* you want to
|
||||||
|
synchronize. For example, if you want to only synchronize events (but don't
|
||||||
|
download any tasks from the server), set ``item_types = ["VEVENT"]``. If
|
||||||
|
you want to synchronize events and tasks, but have some ``VJOURNAL`` items
|
||||||
|
on the server you don't want to synchronize, use ``item_types = ["VEVENT",
|
||||||
|
"VTODO"]``.
|
||||||
|
|
||||||
|
:param start_date: Start date of timerange to show, default -inf.
|
||||||
|
:param end_date: End date of timerange to show, default +inf.
|
||||||
|
:param item_types: Kind of items to show. The default, the empty list, is
|
||||||
|
to show all. This depends on particular features on the server, the
|
||||||
|
results are not validated.
|
||||||
|
:param url: Base URL or an URL to a calendar.
|
||||||
|
:param username: Username for authentication.
|
||||||
|
:param password: Password for authentication.
|
||||||
|
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||||
|
See :ref:`ssl-tutorial` for more information.
|
||||||
|
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||||
|
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||||
|
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||||
|
default is preemptive Basic auth, sending credentials even if server
|
||||||
|
didn't request them. This saves from an additional roundtrip per
|
||||||
|
request. Consider setting ``guess`` if this causes issues with your
|
||||||
|
server.
|
||||||
|
:param auth_cert: Optional. Either a path to a certificate with a client
|
||||||
|
certificate and the key or a list of paths to the files with them.
|
||||||
|
:param useragent: Default ``vdirsyncer``.
|
||||||
|
|
||||||
|
|
||||||
|
.. storage:: carddav
|
||||||
|
|
||||||
|
CardDAV.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage example_for_carddav]
|
||||||
|
type = "carddav"
|
||||||
|
url = "..."
|
||||||
|
#username = ""
|
||||||
|
#password = ""
|
||||||
|
#verify = /path/to/custom_ca.pem
|
||||||
|
#auth = null
|
||||||
|
#useragent = "vdirsyncer/0.16.4"
|
||||||
|
#verify_fingerprint = null
|
||||||
|
#auth_cert = null
|
||||||
|
#use_vcard_4 = false
|
||||||
|
|
||||||
|
:param url: Base URL or an URL to an addressbook.
|
||||||
|
:param username: Username for authentication.
|
||||||
|
:param password: Password for authentication.
|
||||||
|
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||||
|
See :ref:`ssl-tutorial` for more information.
|
||||||
|
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||||
|
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||||
|
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||||
|
default is preemptive Basic auth, sending credentials even if
|
||||||
|
server didn't request them. This saves from an additional
|
||||||
|
roundtrip per request. Consider setting ``guess`` if this
|
||||||
|
causes issues with your server.
|
||||||
|
:param auth_cert: Optional. Either a path to a certificate with a client
|
||||||
|
certificate and the key or a list of paths to the files
|
||||||
|
with them.
|
||||||
|
:param useragent: Default ``vdirsyncer``.
|
||||||
|
:param use_vcard_4: Whether the server use vCard 4.0.
|
||||||
|
|
||||||
|
Google
|
||||||
|
++++++
|
||||||
|
|
||||||
|
Vdirsyncer supports synchronization with Google calendars with the restriction
|
||||||
|
that ``VTODO`` files are rejected by the server.
|
||||||
|
|
||||||
|
Synchronization with Google contacts is less reliable due to negligence of
|
||||||
|
Google's CardDAV API. **Google's CardDAV implementation is allegedly a disaster
|
||||||
|
in terms of data safety**. See `this blog post
|
||||||
|
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
|
||||||
|
up your data.
|
||||||
|
|
||||||
|
Another caveat is that Google group labels are not synced with vCard's
|
||||||
|
`CATEGORIES <https://www.rfc-editor.org/rfc/rfc6350#section-6.7.1>`_ property
|
||||||
|
(also see :gh:`814` and
|
||||||
|
`upstream issue #36761530 <https://issuetracker.google.com/issues/36761530>`_
|
||||||
|
for reference) and the
|
||||||
|
`BDAY <https://www.rfc-editor.org/rfc/rfc6350#section-6.2.5>`_ property is not
|
||||||
|
synced when only partial date information is present (e.g. the year is missing).
|
||||||
|
|
||||||
|
At first run you will be asked to authorize application for Google account
|
||||||
|
access.
|
||||||
|
|
||||||
|
To use this storage type, you need to install some additional dependencies::
|
||||||
|
|
||||||
|
pip install vdirsyncer[google]
|
||||||
|
|
||||||
|
Furthermore you need to register vdirsyncer as an application yourself to
|
||||||
|
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
|
||||||
|
Service to hardcode those into opensource software [googleterms]_:
|
||||||
|
|
||||||
|
1. Go to the `Google API Manager <https://console.developers.google.com>`_
|
||||||
|
|
||||||
|
2. Create a new project under any name.
|
||||||
|
|
||||||
|
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
|
||||||
|
Calendar and Contacts APIs, those are different and won't work). There should
|
||||||
|
be a search box where you can just enter those terms.
|
||||||
|
|
||||||
|
3. In the sidebar, select "Credentials", then "Create Credentials" and create a
|
||||||
|
new "OAuth Client ID".
|
||||||
|
|
||||||
|
You'll be prompted to create a OAuth consent screen first. Fill out that
|
||||||
|
form however you like.
|
||||||
|
|
||||||
|
After setting up the consent screen, finish creating the new "OAuth Client
|
||||||
|
ID'. The correct application type is "Desktop application".
|
||||||
|
|
||||||
|
4. Finally you should have a Client ID and a Client secret. Provide these in
|
||||||
|
your storage config.
|
||||||
|
|
||||||
|
The ``token_file`` parameter should be a path to a file where vdirsyncer can
|
||||||
|
later store authentication-related data. You do not need to create the file
|
||||||
|
itself or write anything to it.
|
||||||
|
|
||||||
|
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
|
||||||
|
section "Confidential Matters".
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
You need to configure which calendars Google should offer vdirsyncer using
|
||||||
|
a secret `settings page
|
||||||
|
<https://calendar.google.com/calendar/syncselect>`_.
|
||||||
|
|
||||||
|
.. storage:: google_calendar
|
||||||
|
|
||||||
|
Google calendar.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage example_for_google_calendar]
|
||||||
|
type = "google_calendar"
|
||||||
|
token_file = "..."
|
||||||
|
client_id = "..."
|
||||||
|
client_secret = "..."
|
||||||
|
#start_date = null
|
||||||
|
#end_date = null
|
||||||
|
#item_types = []
|
||||||
|
|
||||||
|
Please refer to :storage:`caldav` regarding the ``item_types`` and timerange parameters.
|
||||||
|
|
||||||
|
:param token_file: A filepath where access tokens are stored.
|
||||||
|
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
||||||
|
API Manager.
|
||||||
|
|
||||||
|
.. storage:: google_contacts
|
||||||
|
|
||||||
|
Google contacts.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage example_for_google_contacts]
|
||||||
|
type = "google_contacts"
|
||||||
|
token_file = "..."
|
||||||
|
client_id = "..."
|
||||||
|
client_secret = "..."
|
||||||
|
|
||||||
|
:param token_file: A filepath where access tokens are stored.
|
||||||
|
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
||||||
|
API Manager.
|
||||||
|
|
||||||
|
The current flow is not ideal, but Google has deprecated the previous APIs used
|
||||||
|
for this without providing a suitable replacement. See :gh:`975` for discussion
|
||||||
|
on the topic.
|
||||||
|
|
||||||
|
Local
|
||||||
|
+++++
|
||||||
|
|
||||||
|
.. storage:: filesystem
|
||||||
|
|
||||||
|
Saves each item in its own file, given a directory.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage example_for_filesystem]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "..."
|
||||||
|
fileext = "..."
|
||||||
|
#encoding = "utf-8"
|
||||||
|
#post_hook = null
|
||||||
|
#pre_deletion_hook = null
|
||||||
|
#fileignoreext = ".tmp"
|
||||||
|
|
||||||
|
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
|
||||||
|
a more formal description of the format.
|
||||||
|
|
||||||
|
Directories with a leading dot are ignored to make usage of e.g. version
|
||||||
|
control easier.
|
||||||
|
|
||||||
|
:param path: Absolute path to a vdir/collection. If this is used in
|
||||||
|
combination with the ``collections`` parameter in a pair-section, this
|
||||||
|
should point to a directory of vdirs instead.
|
||||||
|
:param fileext: The file extension to use (e.g. ``.txt``). Contained in the
|
||||||
|
href, so if you change the file extension after a sync, this will
|
||||||
|
trigger a re-download of everything (but *should* not cause data-loss
|
||||||
|
of any kind). To be compatible with the ``vset`` format you have
|
||||||
|
to either use ``.vcf`` or ``.ics``. Note that metasync won't work
|
||||||
|
if you use an empty string here.
|
||||||
|
:param encoding: File encoding for items, both content and filename.
|
||||||
|
:param post_hook: A command to call for each item creation and
|
||||||
|
modification. The command will be called with the path of the
|
||||||
|
new/updated file.
|
||||||
|
:param pre_deletion_hook: A command to call for each item deletion.
|
||||||
|
The command will be called with the path of the deleted file.
|
||||||
|
:param fileeignoreext: The file extention to ignore. It is only useful
|
||||||
|
if fileext is set to the empty string. The default is ``.tmp``.
|
||||||
|
|
||||||
|
.. storage:: singlefile
|
||||||
|
|
||||||
|
Save data in single local ``.vcf`` or ``.ics`` file.
|
||||||
|
|
||||||
|
The storage basically guesses how items should be joined in the file.
|
||||||
|
|
||||||
|
.. versionadded:: 0.1.6
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
This storage is very slow, and that is unlikely to change. You should
|
||||||
|
consider using :storage:`filesystem` if it fits your usecase.
|
||||||
|
|
||||||
|
:param path: The filepath to the file to be written to. If collections are
|
||||||
|
used, this should contain ``%s`` as a placeholder for the collection
|
||||||
|
name.
|
||||||
|
:param encoding: Which encoding the file should use. Defaults to UTF-8.
|
||||||
|
|
||||||
|
Example for syncing with :storage:`caldav`::
|
||||||
|
|
||||||
|
[pair my_calendar]
|
||||||
|
a = my_calendar_local
|
||||||
|
b = my_calendar_remote
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
|
||||||
|
[storage my_calendar_local]
|
||||||
|
type = "singlefile"
|
||||||
|
path = ~/.calendars/%s.ics
|
||||||
|
|
||||||
|
[storage my_calendar_remote]
|
||||||
|
type = "caldav"
|
||||||
|
url = https://caldav.example.org/
|
||||||
|
#username =
|
||||||
|
#password =
|
||||||
|
|
||||||
|
Example for syncing with :storage:`caldav` using a ``null`` collection::
|
||||||
|
|
||||||
|
[pair my_calendar]
|
||||||
|
a = my_calendar_local
|
||||||
|
b = my_calendar_remote
|
||||||
|
|
||||||
|
[storage my_calendar_local]
|
||||||
|
type = "singlefile"
|
||||||
|
path = ~/my_calendar.ics
|
||||||
|
|
||||||
|
[storage my_calendar_remote]
|
||||||
|
type = "caldav"
|
||||||
|
url = https://caldav.example.org/username/my_calendar/
|
||||||
|
#username =
|
||||||
|
#password =
|
||||||
|
|
||||||
|
Read-only storages
|
||||||
|
++++++++++++++++++
|
||||||
|
|
||||||
|
These storages don't support writing of their items, consequently ``read_only``
|
||||||
|
is set to ``true`` by default. Changing ``read_only`` to ``false`` on them
|
||||||
|
leads to an error.
|
||||||
|
|
||||||
|
.. storage:: http
|
||||||
|
|
||||||
|
Use a simple ``.ics`` file (or similar) from the web.
|
||||||
|
``webcal://``-calendars are supposed to be used with this, but you have to
|
||||||
|
replace ``webcal://`` with ``http://``, or better, ``https://``.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[pair holidays]
|
||||||
|
a = holidays_local
|
||||||
|
b = holidays_remote
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage holidays_local]
|
||||||
|
type = "filesystem"
|
||||||
|
path = ~/.config/vdir/calendars/holidays/
|
||||||
|
fileext = .ics
|
||||||
|
|
||||||
|
[storage holidays_remote]
|
||||||
|
type = "http"
|
||||||
|
url = https://example.com/holidays_from_hicksville.ics
|
||||||
|
#filter_hook = null
|
||||||
|
|
||||||
|
Too many WebCAL providers generate UIDs of all ``VEVENT``-components
|
||||||
|
on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
|
||||||
|
This leads many synchronization programs to believe that all events have
|
||||||
|
been deleted and new ones created, and accordingly causes a lot of
|
||||||
|
unnecessary uploads and deletions on the other side. Vdirsyncer completely
|
||||||
|
ignores UIDs coming from :storage:`http` and will replace them with a hash
|
||||||
|
of the normalized item content.
|
||||||
|
|
||||||
|
:param url: URL to the ``.ics`` file.
|
||||||
|
:param username: Username for authentication.
|
||||||
|
:param password: Password for authentication.
|
||||||
|
:param verify: Optional. Local path to a self-signed SSL certificate.
|
||||||
|
See :ref:`ssl-tutorial` for more information.
|
||||||
|
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
||||||
|
server certificate. See :ref:`ssl-tutorial` for more information.
|
||||||
|
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||||
|
default is preemptive Basic auth, sending credentials even if server
|
||||||
|
didn't request them. This saves from an additional roundtrip per
|
||||||
|
request. Consider setting ``guess`` if this causes issues with your
|
||||||
|
server.
|
||||||
|
:param auth_cert: Optional. Either a path to a certificate with a client
|
||||||
|
certificate and the key or a list of paths to the files with them.
|
||||||
|
:param useragent: Default ``vdirsyncer``.
|
||||||
|
:param filter_hook: Optional. A filter command to call for each fetched
|
||||||
|
item, passed in raw form to stdin and returned via stdout.
|
||||||
|
If nothing is returned by the filter command, the item is skipped.
|
||||||
|
This can be used to alter fields as needed when dealing with providers
|
||||||
|
generating malformed events.
|
||||||
12
docs/contact.rst
Normal file
12
docs/contact.rst
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
===================
|
||||||
|
Support and Contact
|
||||||
|
===================
|
||||||
|
|
||||||
|
* The ``#pimutils`` `IRC channel on Libera.Chat <https://pimutils.org/contact>`_
|
||||||
|
might be active, depending on your timezone. Use it for support and general
|
||||||
|
(including off-topic) discussion.
|
||||||
|
|
||||||
|
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
|
||||||
|
concrete bug reports and feature requests.
|
||||||
|
|
||||||
|
* For security issues, contact ``contact@pimutils.org``.
|
||||||
127
docs/contributing.rst
Normal file
127
docs/contributing.rst
Normal file
|
|
@ -0,0 +1,127 @@
|
||||||
|
============================
|
||||||
|
Contributing to this project
|
||||||
|
============================
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
- Please read :doc:`contact` for questions and support requests.
|
||||||
|
|
||||||
|
- All participants must follow the `pimutils Code of Conduct
|
||||||
|
<http://pimutils.org/coc>`_.
|
||||||
|
|
||||||
|
The issue tracker
|
||||||
|
=================
|
||||||
|
|
||||||
|
We use `GitHub issues <https://github.com/pimutils/vdirsyncer/issues>`_ for
|
||||||
|
organizing bug reports and feature requests.
|
||||||
|
|
||||||
|
The following `labels <https://github.com/pimutils/vdirsyncer/labels>`_ are of
|
||||||
|
interest:
|
||||||
|
|
||||||
|
* "Planning" is for issues that are still undecided, but where at least some
|
||||||
|
discussion exists.
|
||||||
|
|
||||||
|
* "Blocked" is for issues that can't be worked on at the moment because some
|
||||||
|
other unsolved problem exists. This problem may be a bug in some software
|
||||||
|
dependency, for instance.
|
||||||
|
|
||||||
|
* "Ready" contains issues that are ready to work on.
|
||||||
|
|
||||||
|
If you just want to get started with contributing, the "ready" issues are an
|
||||||
|
option. Issues that are still in "Planning" are also an option, but require
|
||||||
|
more upfront thinking and may turn out to be impossible to solve, or at least
|
||||||
|
harder than anticipated. On the flip side those tend to be the more interesting
|
||||||
|
issues as well, depending on how one looks at it.
|
||||||
|
|
||||||
|
All of those labels are also available as a kanban board on `waffle.io
|
||||||
|
<https://waffle.io/pimutils/vdirsyncer>`_. It is really just an alternative
|
||||||
|
overview over all issues, but might be easier to comprehend.
|
||||||
|
|
||||||
|
Feel free to :doc:`contact <contact>` me or comment on the relevant issues for
|
||||||
|
further information.
|
||||||
|
|
||||||
|
Reporting bugs
|
||||||
|
--------------
|
||||||
|
|
||||||
|
* Make sure your problem isn't already listed in :doc:`problems`.
|
||||||
|
|
||||||
|
* Make sure you have the absolutely latest version of vdirsyncer. For users of
|
||||||
|
some Linux distributions such as Debian or Fedora this may not be the version
|
||||||
|
that your distro offers. In those cases please file a bug against the distro
|
||||||
|
package, not against upstream vdirsyncer.
|
||||||
|
|
||||||
|
* Use ``--verbosity=DEBUG`` when including output from vdirsyncer.
|
||||||
|
|
||||||
|
Suggesting features
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
If you're suggesting a feature, keep in mind that vdirsyncer tries not to be a
|
||||||
|
full calendar or contacts client, but rather just the piece of software that
|
||||||
|
synchronizes all the data. :doc:`Take a look at the documentation for software
|
||||||
|
working with vdirsyncer <tutorials/index>`.
|
||||||
|
|
||||||
|
Submitting patches, pull requests
|
||||||
|
=================================
|
||||||
|
|
||||||
|
* **Discuss everything in the issue tracker first** (or contact me somehow
|
||||||
|
else) before implementing it.
|
||||||
|
|
||||||
|
* Make sure the tests pass. See below for running them.
|
||||||
|
|
||||||
|
* But not because you wrote too few tests.
|
||||||
|
|
||||||
|
* Add yourself to ``AUTHORS.rst``, and add a note to ``CHANGELOG.rst`` too.
|
||||||
|
|
||||||
|
Running tests, how to set up your development environment
|
||||||
|
---------------------------------------------------------
|
||||||
|
|
||||||
|
For many patches, it might suffice to just let CI run the tests. However,
|
||||||
|
CI is slow, so you might want to run them locally too. For this, set up a
|
||||||
|
virtualenv_ and run this inside of it::
|
||||||
|
|
||||||
|
# Install development dependencies, including:
|
||||||
|
# - vdirsyncer from the repo into the virtualenv
|
||||||
|
# - style checks and formatting (ruff)
|
||||||
|
make install-dev
|
||||||
|
|
||||||
|
# Install git commit hook for some extra linting and checking
|
||||||
|
pre-commit install
|
||||||
|
|
||||||
|
Then you can run::
|
||||||
|
|
||||||
|
pytest # The normal testsuite
|
||||||
|
pre-commit run --all # Run all linters (which also run via pre-commit)
|
||||||
|
make -C docs html # Build the HTML docs, output is at docs/_build/html/
|
||||||
|
make -C docs linkcheck # Check docs for any broken links
|
||||||
|
|
||||||
|
The ``Makefile`` has a lot of options that allow you to control which tests are
|
||||||
|
run, and which servers are tested. Take a look at its code where they are all
|
||||||
|
initialized and documented.
|
||||||
|
|
||||||
|
To tests against a specific DAV server, use ``DAV_SERVER``::
|
||||||
|
|
||||||
|
make DAV_SERVER=xandikos test
|
||||||
|
|
||||||
|
The server will be initialised in a docker container and terminated at the end
|
||||||
|
of the test suite.
|
||||||
|
|
||||||
|
If you have any questions, feel free to open issues about it.
|
||||||
|
|
||||||
|
Structure of the testsuite
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
Within ``tests/``, there are three main folders:
|
||||||
|
|
||||||
|
- ``system`` contains system- and also integration tests. A rough rule is: If
|
||||||
|
the test is using temporary files, put it here.
|
||||||
|
|
||||||
|
- ``unit``, where each testcase tests a single class or function.
|
||||||
|
|
||||||
|
- ``storage`` runs a generic storage testsuite against all storages.
|
||||||
|
|
||||||
|
The reason for this separation is: We are planning to generate separate
|
||||||
|
coverage reports for each of those testsuites. Ideally ``unit`` would generate
|
||||||
|
palatable coverage of the entire codebase *on its own*, and the *combination*
|
||||||
|
of ``system`` and ``storage`` as well.
|
||||||
|
|
||||||
|
.. _virtualenv: http://virtualenv.readthedocs.io/
|
||||||
15
docs/donations.rst
Normal file
15
docs/donations.rst
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
=========
|
||||||
|
Donations
|
||||||
|
=========
|
||||||
|
|
||||||
|
vdirsyncer is and will always be free and open source software. We appreciate
|
||||||
|
sponsors willing to fund our continued work on it.
|
||||||
|
|
||||||
|
If you found my work useful, please consider donating. Thank you!
|
||||||
|
|
||||||
|
- Bitcoin: ``13p42uWDL62bNRH3KWA6cSpSgvnHy1fs2E``.
|
||||||
|
- Sponsor via one-time tips or recurring donations `via Ko-fi`_.
|
||||||
|
- Sponsor via recurring donations `via liberapay`_.
|
||||||
|
|
||||||
|
.. _via Ko-fi: https://ko-fi.com/whynothugo
|
||||||
|
.. _via liberapay: https://liberapay.com/WhyNotHugo/
|
||||||
|
|
@ -2,24 +2,49 @@
|
||||||
vdirsyncer
|
vdirsyncer
|
||||||
==========
|
==========
|
||||||
|
|
||||||
vdirsyncer synchronizes your calendars and addressbooks between two storages.
|
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
||||||
The supported storages are CalDAV, CardDAV, arbitrary HTTP resources, `vdir
|
- `Source code <https://github.com/pimutils/vdirsyncer>`_
|
||||||
<https://github.com/untitaker/vdir>`_ and :ref:`some more <storages>`.
|
|
||||||
|
|
||||||
It aims to be for CalDAV and CardDAV what `OfflineIMAP
|
Vdirsyncer is a command-line tool for synchronizing calendars and addressbooks
|
||||||
<http://offlineimap.org/>`_ is for IMAP.
|
between a variety of servers and the local filesystem. The most popular usecase
|
||||||
|
is to synchronize a server with a local folder and use a set of other
|
||||||
|
:doc:`programs <tutorials/index>` to change the local events and contacts.
|
||||||
|
Vdirsyncer can then synchronize those changes back to the server.
|
||||||
|
|
||||||
Table of Contents
|
However, vdirsyncer is not limited to synchronizing between clients and
|
||||||
=================
|
servers. It can also be used to synchronize calendars and/or addressbooks
|
||||||
|
between two servers directly.
|
||||||
|
|
||||||
|
It aims to be for calendars and contacts what `OfflineIMAP
|
||||||
|
<http://offlineimap.org/>`_ is for emails.
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
|
:caption: Users
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
|
when
|
||||||
|
installation
|
||||||
tutorial
|
tutorial
|
||||||
api
|
ssl-tutorial
|
||||||
keyring
|
keyring
|
||||||
server_support
|
partial-sync
|
||||||
troubleshooting
|
config
|
||||||
changelog
|
tutorials/index
|
||||||
|
problems
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:caption: Developers
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
contributing
|
||||||
vdir
|
vdir
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:caption: General
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
packaging
|
||||||
|
contact
|
||||||
|
changelog
|
||||||
license
|
license
|
||||||
|
donations
|
||||||
|
|
|
||||||
122
docs/installation.rst
Normal file
122
docs/installation.rst
Normal file
|
|
@ -0,0 +1,122 @@
|
||||||
|
.. _installation:
|
||||||
|
|
||||||
|
============
|
||||||
|
Installation
|
||||||
|
============
|
||||||
|
|
||||||
|
OS/distro packages
|
||||||
|
------------------
|
||||||
|
|
||||||
|
The following packages are community-contributed and were up-to-date at the
|
||||||
|
time of writing:
|
||||||
|
|
||||||
|
- `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_
|
||||||
|
- `Ubuntu and Debian, x86_64-only
|
||||||
|
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
|
||||||
|
in the official repositories but may be out of date)
|
||||||
|
- `GNU Guix <https://packages.guix.gnu.org/packages/vdirsyncer/>`_
|
||||||
|
- `macOS (homebrew) <https://formulae.brew.sh/formula/vdirsyncer>`_
|
||||||
|
- `NetBSD <https://ftp.netbsd.org/pub/pkgsrc/current/pkgsrc/time/py-vdirsyncer/index.html>`_
|
||||||
|
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
|
||||||
|
- `Slackware (SlackBuild at Slackbuilds.org) <https://slackbuilds.org/repository/15.0/network/vdirsyncer/>`_
|
||||||
|
|
||||||
|
We only support the latest version of vdirsyncer, which is at the time of this
|
||||||
|
writing |vdirsyncer_version|. Please **do not file bugs if you use an older
|
||||||
|
version**.
|
||||||
|
|
||||||
|
Some distributions have multiple release channels. Debian and Fedora for
|
||||||
|
example have a "stable" release channel that ships an older version of
|
||||||
|
vdirsyncer. Those versions aren't supported either.
|
||||||
|
|
||||||
|
If there is no suitable package for your distribution, you'll need to
|
||||||
|
:ref:`install vdirsyncer manually <manual-installation>`. There is an easy
|
||||||
|
command to copy-and-paste for this as well, but you should be aware of its
|
||||||
|
consequences.
|
||||||
|
|
||||||
|
.. _manual-installation:
|
||||||
|
|
||||||
|
Manual installation
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
If your distribution doesn't provide a package for vdirsyncer, you still can
|
||||||
|
use Python's package manager "pip". First, you'll have to check that the
|
||||||
|
following things are installed:
|
||||||
|
|
||||||
|
- Python 3.9 to 3.13 and pip.
|
||||||
|
- ``libxml`` and ``libxslt``
|
||||||
|
- ``zlib``
|
||||||
|
- Linux or macOS. **Windows is not supported**, see :gh:`535`.
|
||||||
|
|
||||||
|
On Linux systems, using the distro's package manager is the best
|
||||||
|
way to do this, for example, using Ubuntu::
|
||||||
|
|
||||||
|
sudo apt-get install libxml2 libxslt1.1 zlib1g python3
|
||||||
|
|
||||||
|
Then you have several options. The following text applies for most Python
|
||||||
|
software by the way.
|
||||||
|
|
||||||
|
pipx: The clean, easy way
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
pipx_ is a new package manager for Python-based software that automatically
|
||||||
|
sets up a virtual environment for each program it installs. Please note that
|
||||||
|
installing via pipx will not include manual pages nor systemd services.
|
||||||
|
|
||||||
|
pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
|
||||||
|
|
||||||
|
Assuming that pipx is installed, vdirsyncer can be installed with::
|
||||||
|
|
||||||
|
pipx install vdirsyncer
|
||||||
|
|
||||||
|
It can later be updated to the latest version with::
|
||||||
|
|
||||||
|
pipx upgrade vdirsyncer
|
||||||
|
|
||||||
|
And can be uninstalled with::
|
||||||
|
|
||||||
|
pipx uninstall vdirsyncer
|
||||||
|
|
||||||
|
This last command will remove vdirsyncer and any dependencies installed into
|
||||||
|
the above location.
|
||||||
|
|
||||||
|
.. _pipx: https://github.com/pipxproject/pipx
|
||||||
|
|
||||||
|
The dirty, easy way
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
If pipx is not available on your distribution, the easiest way to install
|
||||||
|
vdirsyncer at this point would be to run::
|
||||||
|
|
||||||
|
pip install --ignore-installed vdirsyncer
|
||||||
|
|
||||||
|
- ``--ignore-installed`` is to work around Debian's potentially broken packages
|
||||||
|
(see :ref:`debian-urllib3`).
|
||||||
|
|
||||||
|
This method has a major flaw though: Pip doesn't keep track of the files it
|
||||||
|
installs. Vdirsyncer's files would be located somewhere in
|
||||||
|
``~/.local/lib/python*``, but you can't possibly know which packages were
|
||||||
|
installed as dependencies of vdirsyncer and which ones were not, should you
|
||||||
|
decide to uninstall it. In other words, using pip that way would pollute your
|
||||||
|
home directory.
|
||||||
|
|
||||||
|
The clean, hard way
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
There is a way to install Python software without scattering stuff across
|
||||||
|
your filesystem: virtualenv_. There are a lot of resources on how to use it,
|
||||||
|
the simplest possible way would look something like::
|
||||||
|
|
||||||
|
virtualenv ~/vdirsyncer_env
|
||||||
|
~/vdirsyncer_env/bin/pip install vdirsyncer
|
||||||
|
alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer"
|
||||||
|
|
||||||
|
You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``.
|
||||||
|
|
||||||
|
This method has two advantages:
|
||||||
|
|
||||||
|
- It separately installs all Python packages into ``~/vdirsyncer_env/``,
|
||||||
|
without relying on the system packages. This works around OS- or
|
||||||
|
distro-specific issues.
|
||||||
|
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
|
||||||
|
|
||||||
|
.. _virtualenv: https://virtualenv.readthedocs.io/
|
||||||
135
docs/keyring.rst
135
docs/keyring.rst
|
|
@ -1,55 +1,96 @@
|
||||||
===============
|
=================
|
||||||
Keyring Support
|
Storing passwords
|
||||||
|
=================
|
||||||
|
|
||||||
|
.. versionchanged:: 0.7.0
|
||||||
|
|
||||||
|
Password configuration got completely overhauled.
|
||||||
|
|
||||||
|
Vdirsyncer can fetch passwords from several sources other than the config file.
|
||||||
|
|
||||||
|
Command
|
||||||
|
=======
|
||||||
|
|
||||||
|
Say you have the following configuration::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "caldav"
|
||||||
|
url = ...
|
||||||
|
username = "foo"
|
||||||
|
password = "bar"
|
||||||
|
|
||||||
|
But it bugs you that the password is stored in cleartext in the config file.
|
||||||
|
You can do this::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "caldav"
|
||||||
|
url = ...
|
||||||
|
username = "foo"
|
||||||
|
password.fetch = ["command", "~/get-password.sh", "more", "args"]
|
||||||
|
|
||||||
|
You can fetch the username as well::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "caldav"
|
||||||
|
url = ...
|
||||||
|
username.fetch = ["command", "~/get-username.sh"]
|
||||||
|
password.fetch = ["command", "~/get-password.sh"]
|
||||||
|
|
||||||
|
Or really any kind of parameter in a storage section.
|
||||||
|
|
||||||
|
You can also pass the command as a string to be executed in a shell::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
...
|
||||||
|
password.fetch = ["shell", "~/.local/bin/get-my-password | head -n1"]
|
||||||
|
|
||||||
|
With pass_ for example, you might find yourself writing something like this in
|
||||||
|
your configuration file::
|
||||||
|
|
||||||
|
password.fetch = ["command", "pass", "caldav"]
|
||||||
|
|
||||||
|
.. _pass: https://www.passwordstore.org/
|
||||||
|
|
||||||
|
Accessing the system keyring
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
As shown above, you can use the ``command`` strategy to fetch your credentials
|
||||||
|
from arbitrary sources. A very common usecase is to fetch your password from
|
||||||
|
the system keyring.
|
||||||
|
|
||||||
|
The keyring_ Python package contains a command-line utility for fetching
|
||||||
|
passwords from the OS's password store. Installation::
|
||||||
|
|
||||||
|
pip install keyring
|
||||||
|
|
||||||
|
Basic usage::
|
||||||
|
|
||||||
|
password.fetch = ["command", "keyring", "get", "example.com", "foouser"]
|
||||||
|
|
||||||
|
.. _keyring: https://github.com/jaraco/keyring/
|
||||||
|
|
||||||
|
Password Prompt
|
||||||
===============
|
===============
|
||||||
|
|
||||||
*vdirsyncer* will try the following storages if no password (but a username) is
|
You can also simply prompt for the password::
|
||||||
set in your config. If that fails too, it will prompt for the password and
|
|
||||||
store the password in the system keyring (if possible and wished).
|
|
||||||
|
|
||||||
netrc
|
[storage foo]
|
||||||
=====
|
type = "caldav"
|
||||||
|
username = "myusername"
|
||||||
|
password.fetch = ["prompt", "Password for CalDAV"]
|
||||||
|
|
||||||
*vdirsyncer* can use ``~/.netrc`` for retrieving a password. An example
|
Environment variable
|
||||||
``.netrc`` looks like this::
|
===============
|
||||||
|
|
||||||
machine owncloud.example.com
|
To read the password from an environment variable::
|
||||||
login foouser
|
|
||||||
password foopass
|
|
||||||
|
|
||||||
System Keyring
|
[storage foo]
|
||||||
==============
|
type = "caldav"
|
||||||
|
username = "myusername"
|
||||||
|
password.fetch = ["command", "printenv", "DAV_PW"]
|
||||||
|
|
||||||
*vdirsyncer* can also use your system's password storage for saving password in
|
This is especially handy if you use the same password multiple times
|
||||||
a (more) secure way.
|
(say, for a CardDAV and a CalDAV storage).
|
||||||
|
On bash, you can read and export the password without printing::
|
||||||
|
|
||||||
To use it, you must install keyring_.
|
read -s DAV_PW "DAV Password: " && export DAV_PW
|
||||||
|
|
||||||
.. _keyring: https://bitbucket.org/kang/python-keyring-lib
|
|
||||||
|
|
||||||
*vdirsyncer* will use the full resource URL as the key when saving.
|
|
||||||
|
|
||||||
When retrieving the key, it will try to remove segments of the URL's path until
|
|
||||||
it finds a password. For example, if you save a password under the key
|
|
||||||
``vdirsyncer:http://example.com``, it will be used as a fallback for all
|
|
||||||
resources on ``example.com``. If you additionally save a password under the key
|
|
||||||
``vdirsyncer:http://example.com/special/``, that password will be used for all
|
|
||||||
resources on ``example.com`` whose path starts with ``/special/``.
|
|
||||||
|
|
||||||
*keyring* support these keyrings:
|
|
||||||
|
|
||||||
- **OSXKeychain:** The Keychain service in Mac OS X.
|
|
||||||
- **KDEKWallet:** The KDE's Kwallet service.
|
|
||||||
- **GnomeKeyring** For Gnome 2 environment.
|
|
||||||
- **SecretServiceKeyring:** For newer GNOME and KDE environments.
|
|
||||||
- **WinVaultKeyring:** The Windows Credential Vault
|
|
||||||
- **Win32CryptoKeyring:** for Windows 2k+.
|
|
||||||
- **CryptedFileKeyring:** A command line interface keyring base on PyCrypto.
|
|
||||||
- **UncryptedFileKeyring:** A keyring which leaves passwords directly in file.
|
|
||||||
|
|
||||||
Changing the Password
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
If your password on the server changed or you misspelled it you need to use
|
|
||||||
your system's password manager (e.g. seahorse for most Linux distrubutions) to
|
|
||||||
either delete or directly change it, *vdirsyncer* currently has no means to do
|
|
||||||
it for you.
|
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
Credits and License
|
Credits and License
|
||||||
===================
|
===================
|
||||||
|
|
||||||
.. include:: ../CONTRIBUTORS.rst
|
.. include:: ../AUTHORS.rst
|
||||||
|
|
||||||
License
|
License
|
||||||
=======
|
=======
|
||||||
|
|
|
||||||
92
docs/packaging.rst
Normal file
92
docs/packaging.rst
Normal file
|
|
@ -0,0 +1,92 @@
|
||||||
|
====================
|
||||||
|
Packaging guidelines
|
||||||
|
====================
|
||||||
|
|
||||||
|
Thank you very much for packaging vdirsyncer! The following guidelines should
|
||||||
|
help you to avoid some common pitfalls.
|
||||||
|
|
||||||
|
If you find yourself needing to patch anything, or going in a different direction,
|
||||||
|
please open an issue so we can also address in a way that works for everyone. Otherwise
|
||||||
|
we get bug reports for code or scenarios that don't exist in upstream vdirsycner.
|
||||||
|
|
||||||
|
Obtaining the source code
|
||||||
|
=========================
|
||||||
|
|
||||||
|
The main distribution channel is `PyPI
|
||||||
|
<https://pypi.python.org/pypi/vdirsyncer>`_, and source tarballs can be
|
||||||
|
obtained there. We mirror the same package tarball and wheel as GitHub
|
||||||
|
releases. Please do not confuse these with the auto-generated GitHub "Source
|
||||||
|
Code" tarball. Those are missing some important metadata and your build will fail.
|
||||||
|
|
||||||
|
We give each release a tag in the git repo. If you want to get notified of new
|
||||||
|
releases, `GitHub's feed
|
||||||
|
<https://github.com/pimutils/vdirsyncer/releases.atom>`_ is a good way.
|
||||||
|
|
||||||
|
Tags will be signed by the maintainer who is doing the release (starting with
|
||||||
|
0.16.8), and generation of the tarball and wheel is done by CI. Hence, only the
|
||||||
|
tag itself is signed.
|
||||||
|
|
||||||
|
Dependency versions
|
||||||
|
===================
|
||||||
|
|
||||||
|
As with most Python packages, ``setup.py`` denotes the dependencies of
|
||||||
|
vdirsyncer. It also contains lower-bound versions of each dependency. Older
|
||||||
|
versions will be rejected by the testsuite.
|
||||||
|
|
||||||
|
Testing
|
||||||
|
=======
|
||||||
|
|
||||||
|
Everything testing-related goes through the ``Makefile`` in the root of the
|
||||||
|
repository or PyPI package. Trying to e.g. run ``pytest`` directly will
|
||||||
|
require a lot of environment variables to be set (for configuration) and you
|
||||||
|
probably don't want to deal with that.
|
||||||
|
|
||||||
|
You can install the all development dependencies with::
|
||||||
|
|
||||||
|
make install-dev
|
||||||
|
|
||||||
|
You probably don't want this since it will use pip to download the
|
||||||
|
dependencies. Alternatively test dependencies are listed as ``test`` optional
|
||||||
|
dependencies in ``pyproject.toml``, again with lower-bound version
|
||||||
|
requirements.
|
||||||
|
|
||||||
|
You also have to have vdirsyncer fully installed at this point. Merely
|
||||||
|
``cd``-ing into the tarball will not be sufficient.
|
||||||
|
|
||||||
|
Running the tests happens with::
|
||||||
|
|
||||||
|
pytest
|
||||||
|
|
||||||
|
Hypothesis will randomly generate test input. If you care about deterministic
|
||||||
|
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
|
||||||
|
|
||||||
|
make DETERMINISTIC_TESTS=true test
|
||||||
|
|
||||||
|
There are a lot of additional variables that allow you to test vdirsyncer
|
||||||
|
against a particular server. Those variables are not "stable" and may change
|
||||||
|
drastically between minor versions. Just don't use them, you are unlikely to
|
||||||
|
find bugs that vdirsyncer's CI hasn't found.
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
=============
|
||||||
|
|
||||||
|
Using Sphinx_ you can generate the documentation you're reading right now in a
|
||||||
|
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
|
||||||
|
take care of the HTML docs' formatting.
|
||||||
|
|
||||||
|
You can find a list of dependencies in ``pyproject.toml``, in the
|
||||||
|
``project.optional-dependencies`` section as ``docs``. Again, you can install
|
||||||
|
those using pip with::
|
||||||
|
|
||||||
|
pip install '.[docs]'
|
||||||
|
|
||||||
|
Then change into the ``docs/`` directory and build whatever format you want
|
||||||
|
using the ``Makefile`` in there (run ``make`` for the formats you can build).
|
||||||
|
|
||||||
|
.. _Sphinx: www.sphinx-doc.org/
|
||||||
|
|
||||||
|
Contrib files
|
||||||
|
=============
|
||||||
|
|
||||||
|
Reference ``systemd.service`` and ``systemd.timer`` unit files are provided. It
|
||||||
|
is recommended to install this if your distribution is systemd-based.
|
||||||
72
docs/partial-sync.rst
Normal file
72
docs/partial-sync.rst
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
.. _partial_sync_tutorial:
|
||||||
|
|
||||||
|
===============================
|
||||||
|
Syncing with read-only storages
|
||||||
|
===============================
|
||||||
|
|
||||||
|
If you want to subscribe to a public, read-only `WebCAL
|
||||||
|
<https://en.wikipedia.org/wiki/Webcal>`_-calendar but neither your server nor
|
||||||
|
your calendar apps support that (or support it insufficiently), vdirsyncer can
|
||||||
|
be used to synchronize such a public calendar ``A`` with a new calendar ``B``
|
||||||
|
of your own and keep ``B`` updated.
|
||||||
|
|
||||||
|
Step 1: Create the target calendar
|
||||||
|
==================================
|
||||||
|
|
||||||
|
First you need to create the calendar you want to sync the WebCAL-calendar
|
||||||
|
with. Most servers offer a web interface for this. You then need to note the
|
||||||
|
CalDAV URL of your calendar. Note that this URL should directly point to the
|
||||||
|
calendar you just created, which means you would have one such URL for each
|
||||||
|
calendar you have.
|
||||||
|
|
||||||
|
Step 2: Creating the config
|
||||||
|
===========================
|
||||||
|
|
||||||
|
Paste this into your vdirsyncer config::
|
||||||
|
|
||||||
|
[pair holidays]
|
||||||
|
a = "holidays_public"
|
||||||
|
b = "holidays_private"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage holidays_public]
|
||||||
|
type = "http"
|
||||||
|
# The URL to your iCalendar file.
|
||||||
|
url = "..."
|
||||||
|
|
||||||
|
[storage holidays_private]
|
||||||
|
type = "caldav"
|
||||||
|
# The direct URL to your calendar.
|
||||||
|
url = "..."
|
||||||
|
# The credentials to your CalDAV server
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
Then run ``vdirsyncer discover holidays`` and ``vdirsyncer sync holidays``, and
|
||||||
|
your previously created calendar should be filled with events.
|
||||||
|
|
||||||
|
Step 3: The partial_sync parameter
|
||||||
|
==================================
|
||||||
|
|
||||||
|
.. versionadded:: 0.14
|
||||||
|
|
||||||
|
You may get into a situation where you want to hide or modify some events from
|
||||||
|
your ``holidays`` calendar. If you try to do that at this point, you'll notice
|
||||||
|
that vdirsyncer will revert any changes you've made after a few times of
|
||||||
|
running ``sync``. This is because vdirsyncer wants to keep everything in sync,
|
||||||
|
and it can't synchronize changes to the public holidays-calendar because it
|
||||||
|
doesn't have the rights to do so.
|
||||||
|
|
||||||
|
For such purposes you can set the ``partial_sync`` parameter to ``ignore``::
|
||||||
|
|
||||||
|
[pair holidays]
|
||||||
|
a = "holidays_public"
|
||||||
|
b = "holidays_private"
|
||||||
|
collections = null
|
||||||
|
partial_sync = ignore
|
||||||
|
|
||||||
|
See :ref:`the config docs <partial_sync_def>` for more information.
|
||||||
|
|
||||||
|
.. _nextCloud: https://nextcloud.com/
|
||||||
|
.. _Baikal: http://sabre.io/baikal/
|
||||||
|
.. _DAViCal: http://www.davical.org/
|
||||||
22
docs/problems.rst
Normal file
22
docs/problems.rst
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
==============
|
||||||
|
Known Problems
|
||||||
|
==============
|
||||||
|
|
||||||
|
For any unanswered questions or problems, see :doc:`contact`.
|
||||||
|
|
||||||
|
.. _debian-urllib3:
|
||||||
|
|
||||||
|
Requests-related ImportErrors
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
ImportError: No module named packages.urllib3.poolmanager
|
||||||
|
|
||||||
|
ImportError: cannot import name iter_field_objects
|
||||||
|
|
||||||
|
Debian and nowadays even other distros make modifications to the ``requests``
|
||||||
|
package that don't play well with packages assuming a normal ``requests``. This
|
||||||
|
is due to stubbornness on both sides.
|
||||||
|
|
||||||
|
See :gh:`82` and :gh:`140` for past discussions. You have one option to work
|
||||||
|
around this, that is, to install vdirsyncer in a virtual environment, see
|
||||||
|
:ref:`manual-installation`.
|
||||||
|
|
@ -1,49 +0,0 @@
|
||||||
==============
|
|
||||||
Server Support
|
|
||||||
==============
|
|
||||||
|
|
||||||
vdirsyncer is currently regularly and automatically tested against the latest
|
|
||||||
versions of Radicale and ownCloud. In principle, vdirsyncer is supposed to run
|
|
||||||
correctly with any remotely popular CalDAV or CardDAV server.
|
|
||||||
|
|
||||||
vdirsyncer's synchronization works best if the items have ``UID`` properties.
|
|
||||||
Items which don't have this property still should be synchronized fine as of
|
|
||||||
version 1.5, but for performance reasons, such items should rather be the
|
|
||||||
exception than the rule. For a possible way to automatically fix such items,
|
|
||||||
take a look at `vfix <https://github.com/geier/vfix>`_.
|
|
||||||
|
|
||||||
Radicale
|
|
||||||
========
|
|
||||||
|
|
||||||
Vdirsyncer is tested against the git version and the latest PyPI release of
|
|
||||||
Radicale.
|
|
||||||
|
|
||||||
- Radicale doesn't `support time ranges in the calendar-query of CalDAV
|
|
||||||
<https://github.com/Kozea/Radicale/issues/146>`_, so setting ``start_date``
|
|
||||||
and ``end_date`` for :py:class:`vdirsyncer.storage.CaldavStorage` will have
|
|
||||||
no or unpredicted consequences.
|
|
||||||
|
|
||||||
- `Versions of Radicale older than 0.9b1 choke on RFC-conform queries for all
|
|
||||||
items of a collection <https://github.com/Kozea/Radicale/issues/143>`_.
|
|
||||||
|
|
||||||
Vdirsyncer's default value ``'VTODO, VEVENT'`` for
|
|
||||||
:py:class:`vdirsyncer.storage.CaldavStorage`'s ``item_types`` parameter will
|
|
||||||
work fine with these versions, and so will all values, except for the empty
|
|
||||||
one.
|
|
||||||
|
|
||||||
The empty value ``''`` will get vdirsyncer to send a single HTTP request to
|
|
||||||
fetch all items, instead of one HTTP request for each possible item type. As
|
|
||||||
the linked issue describes, old versions of Radicale expect a
|
|
||||||
non-RFC-compliant format for such queries, one which vdirsyncer doesn't
|
|
||||||
support.
|
|
||||||
|
|
||||||
ownCloud
|
|
||||||
========
|
|
||||||
|
|
||||||
Vdirsyncer is tested against the latest version of ownCloud.
|
|
||||||
|
|
||||||
- *Versions older than 7.0.0:* ownCloud uses SabreDAV, which had problems
|
|
||||||
detecting collisions and race-conditions. The problems were reported and are
|
|
||||||
fixed in SabreDAV's repo, and the corresponding fix is also in ownCloud since
|
|
||||||
7.0.0. See `Bug #16 <https://github.com/untitaker/vdirsyncer/issues/16>`_ for
|
|
||||||
more information.
|
|
||||||
72
docs/ssl-tutorial.rst
Normal file
72
docs/ssl-tutorial.rst
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
.. _ssl-tutorial:
|
||||||
|
|
||||||
|
==============================
|
||||||
|
SSL and certificate validation
|
||||||
|
==============================
|
||||||
|
|
||||||
|
All SSL configuration is done per-storage.
|
||||||
|
|
||||||
|
Pinning by fingerprint
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
To pin the certificate by fingerprint::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "caldav"
|
||||||
|
...
|
||||||
|
verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E"
|
||||||
|
|
||||||
|
SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported.
|
||||||
|
CA validation is disabled when pinning a fingerprint.
|
||||||
|
|
||||||
|
You can use the following command for obtaining a SHA256 fingerprint::
|
||||||
|
|
||||||
|
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256
|
||||||
|
|
||||||
|
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
|
||||||
|
that you can forget about all of that. It is easier to deploy a free
|
||||||
|
certificate from them than configuring all of your clients to accept the
|
||||||
|
self-signed certificate.
|
||||||
|
|
||||||
|
.. _ssl-cas:
|
||||||
|
|
||||||
|
Custom root CAs
|
||||||
|
---------------
|
||||||
|
|
||||||
|
To point vdirsyncer to a custom set of root CAs::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "caldav"
|
||||||
|
...
|
||||||
|
verify = "/path/to/cert.pem"
|
||||||
|
|
||||||
|
Vdirsyncer uses the aiohttp_ library, which uses the default `ssl.SSLContext
|
||||||
|
https://docs.python.org/3/library/ssl.html#ssl.SSLContext`_ by default.
|
||||||
|
|
||||||
|
There are cases where certificate validation fails even though you can access
|
||||||
|
the server fine through e.g. your browser. This usually indicates that your
|
||||||
|
installation of ``python`` or the ``aiohttp`` or library is somehow broken. In
|
||||||
|
such cases, it makes sense to explicitly set ``verify`` or
|
||||||
|
``verify_fingerprint`` as shown above.
|
||||||
|
|
||||||
|
.. _aiohttp: https://docs.aiohttp.org/en/stable/index.html
|
||||||
|
|
||||||
|
.. _ssl-client-certs:
|
||||||
|
|
||||||
|
Client Certificates
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
Client certificates may be specified with the ``auth_cert`` parameter. If the
|
||||||
|
key and certificate are stored in the same file, it may be a string::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "caldav"
|
||||||
|
...
|
||||||
|
auth_cert = "/path/to/certificate.pem"
|
||||||
|
|
||||||
|
If the key and certificate are separate, a list may be used::
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "caldav"
|
||||||
|
...
|
||||||
|
auth_cert = ["/path/to/certificate.crt", "/path/to/key.key"]
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
===============
|
|
||||||
Troubleshooting
|
|
||||||
===============
|
|
||||||
|
|
||||||
- **[Errno 185090050] _ssl.c:343: error:0B084002:x509 certificate
|
|
||||||
routines:X509_load_cert_crl_file:system lib**
|
|
||||||
|
|
||||||
vdirsyncer cannot find the path to your certificate bundle, you need to
|
|
||||||
supply it as a parameter to ``verify`` in your config file, e.g.::
|
|
||||||
|
|
||||||
verify = /usr/share/ca-certificates/cacert.org/cacert.org_root.crt
|
|
||||||
|
|
||||||
- **During sync an error occurs: TypeError: request() got an unexpected keyword
|
|
||||||
argument 'verify'**
|
|
||||||
|
|
||||||
You need to update your version of requests.
|
|
||||||
|
|
@ -2,33 +2,47 @@
|
||||||
Tutorial
|
Tutorial
|
||||||
========
|
========
|
||||||
|
|
||||||
|
Before starting, :doc:`consider if you actually need vdirsyncer <when>`. There
|
||||||
|
are better alternatives available for particular usecases.
|
||||||
|
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
|
|
||||||
- Make sure you have Python 2.7+ or Python 3.3+ installed.
|
See :ref:`installation`.
|
||||||
|
|
||||||
- ``pip install --user vdirsyncer``
|
|
||||||
|
|
||||||
- Check if the ``vdirsyncer`` command is available.
|
|
||||||
|
|
||||||
Configuration
|
Configuration
|
||||||
=============
|
=============
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
The `example.cfg from the repository
|
|
||||||
<https://github.com/untitaker/vdirsyncer/blob/master/example.cfg>`_
|
|
||||||
contains a very terse version of this.
|
|
||||||
|
|
||||||
By default, *vdirsyncer* looks for its configuration file at
|
- The `config.example from the repository
|
||||||
``~/.vdirsyncer/config``. You can use the ``VDIRSYNCER_CONFIG`` environment
|
<https://github.com/pimutils/vdirsyncer/blob/main/config.example>`_
|
||||||
variable to change this path.
|
contains a very terse version of this.
|
||||||
|
|
||||||
|
- In this example we set up contacts synchronization, but calendar sync
|
||||||
|
works almost the same. Just swap ``type = "carddav"``
|
||||||
|
for ``type = "caldav"`` and ``fileext = ".vcf"``
|
||||||
|
for ``fileext = ".ics"``.
|
||||||
|
|
||||||
|
- Take a look at the :doc:`problems` page if anything doesn't work like
|
||||||
|
planned.
|
||||||
|
|
||||||
|
By default, vdirsyncer looks for its configuration file in the following
|
||||||
|
locations:
|
||||||
|
|
||||||
|
- The file pointed to by the ``VDIRSYNCER_CONFIG`` environment variable.
|
||||||
|
- ``~/.vdirsyncer/config``.
|
||||||
|
- ``$XDG_CONFIG_HOME/vdirsyncer/config``, which is normally
|
||||||
|
``~/.config/vdirsyncer/config``. See the XDG-Basedir_ specification.
|
||||||
|
|
||||||
|
.. _XDG-Basedir: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html#variables
|
||||||
|
|
||||||
The config file should start with a :ref:`general section <general_config>`,
|
The config file should start with a :ref:`general section <general_config>`,
|
||||||
where the only required parameter is ``status_path``. The following is a
|
where the only required parameter is ``status_path``. The following is a
|
||||||
minimal example::
|
minimal example::
|
||||||
|
|
||||||
[general]
|
[general]
|
||||||
status_path = ~/.vdirsyncer/status/
|
status_path = "~/.vdirsyncer/status/"
|
||||||
|
|
||||||
After the general section, an arbitrary amount of *pair and storage sections*
|
After the general section, an arbitrary amount of *pair and storage sections*
|
||||||
might come.
|
might come.
|
||||||
|
|
@ -36,71 +50,236 @@ might come.
|
||||||
In vdirsyncer, synchronization is always done between two storages. Such
|
In vdirsyncer, synchronization is always done between two storages. Such
|
||||||
storages are defined in :ref:`storage sections <storage_config>`, and which
|
storages are defined in :ref:`storage sections <storage_config>`, and which
|
||||||
pairs of storages should actually be synchronized is defined in :ref:`pair
|
pairs of storages should actually be synchronized is defined in :ref:`pair
|
||||||
section <pair_config>`.
|
section <pair_config>`. This format is copied from OfflineIMAP, where storages
|
||||||
|
are called repositories and pairs are called accounts.
|
||||||
|
|
||||||
This format is copied from OfflineIMAP, where storages are called
|
The following example synchronizes ownCloud's addressbooks to ``~/.contacts/``::
|
||||||
repositories and pairs are called accounts.
|
|
||||||
|
|
||||||
The following example synchronizes a single CardDAV-addressbook to
|
|
||||||
``~/.contacts/``::
|
|
||||||
|
|
||||||
[pair my_contacts]
|
[pair my_contacts]
|
||||||
a = my_contacts_local
|
a = "my_contacts_local"
|
||||||
b = my_contacts_remote
|
b = "my_contacts_remote"
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
|
||||||
[storage my_contacts_local]
|
[storage my_contacts_local]
|
||||||
type = filesystem
|
type = "filesystem"
|
||||||
path = ~/.contacts/
|
path = "~/.contacts/"
|
||||||
fileext = .vcf
|
fileext = ".vcf"
|
||||||
|
|
||||||
[storage my_contacts_remote]
|
[storage my_contacts_remote]
|
||||||
type = carddav
|
type = "carddav"
|
||||||
url = https://owncloud.example.com/remote.php/carddav/addressbooks/bob/default/
|
|
||||||
username = bob
|
|
||||||
password = asdf
|
|
||||||
|
|
||||||
After running ``vdirsyncer sync``, ``~/.contacts/`` will contain a bunch of
|
# We can simplify this URL here as well. In theory it shouldn't matter.
|
||||||
``.vcf`` files which all contain a contact in ``VCARD`` format each. You can
|
url = "https://owncloud.example.com/remote.php/carddav/"
|
||||||
modify their content, add new ones and delete some, and your changes will be
|
username = "bob"
|
||||||
synchronized to the CalDAV server after you run ``vdirsyncer sync`` again. For
|
password = "asdf"
|
||||||
further reference, it uses the storages
|
|
||||||
:py:class:`vdirsyncer.storage.FilesystemStorage` and
|
|
||||||
:py:class:`vdirsyncer.storage.CarddavStorage`.
|
|
||||||
|
|
||||||
But what if we want to synchronize multiple addressbooks from the same server?
|
.. note::
|
||||||
Of course we could create new pairs and storages for each addressbook, but that
|
|
||||||
is very tedious to do. Instead we will use a shortcut:
|
|
||||||
|
|
||||||
- Remove the last segment from the URL, so that it ends with ``.../bob/``
|
Configuration for other servers can be found at :ref:`supported-servers`.
|
||||||
instead of ``.../bob/default/``.
|
|
||||||
|
|
||||||
- Add the following line to the *pair* section::
|
After running ``vdirsyncer discover`` and ``vdirsyncer sync``, ``~/.contacts/``
|
||||||
|
will contain subfolders for each addressbook, which in turn will contain a
|
||||||
|
bunch of ``.vcf`` files which all contain a contact in ``VCARD`` format each.
|
||||||
|
You can modify their contents, add new ones and delete some [1]_, and your
|
||||||
|
changes will be synchronized to the CalDAV server after you run ``vdirsyncer
|
||||||
|
sync`` again. For further reference, it uses the storages :storage:`filesystem`
|
||||||
|
and :storage:`carddav`.
|
||||||
|
|
||||||
[pair my_contacts]
|
However, if new collections are created on the server, it will not
|
||||||
...
|
automatically start synchronizing those [2]_. You need to run ``vdirsyncer
|
||||||
collections = default,work
|
discover`` again to re-fetch this list instead.
|
||||||
|
|
||||||
This will synchronize
|
.. [1] You'll want to :doc:`use a helper program for this <tutorials/index>`.
|
||||||
``https://owncloud.example.com/remote.php/carddav/addressbooks/bob/default/``
|
|
||||||
with ``~/.contacts/default/`` and
|
|
||||||
``https://owncloud.example.com/remote.php/carddav/addressbooks/bob/work/`` with
|
|
||||||
``~/.contacts/work/``. Under the hood, vdirsyncer also just copies the pairs
|
|
||||||
and storages for each collection and appends the collection name to the path or
|
|
||||||
URL.
|
|
||||||
|
|
||||||
It almost seems like it could work. But what if the same item is changed on
|
.. [2] Because collections are added rarely, and checking for this case before
|
||||||
both sides? What should vdirsyncer do? By default, it will show an ugly error
|
every synchronization isn't worth the overhead.
|
||||||
message, which is surely a way to avoid the problem. Another way to solve that
|
|
||||||
ambiguity is to add another line to the *pair* section::
|
More Configuration
|
||||||
|
==================
|
||||||
|
|
||||||
|
.. _conflict_resolution_tutorial:
|
||||||
|
|
||||||
|
Conflict resolution
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
What if the same item is changed on both sides? What should vdirsyncer
|
||||||
|
do? Three options are currently provided:
|
||||||
|
|
||||||
|
1. vdirsyncer displays an error message (the default);
|
||||||
|
2. vdirsyncer chooses one alternative version over the other;
|
||||||
|
3. vdirsyncer starts a command of your choice that is supposed to merge the two alternative versions.
|
||||||
|
|
||||||
|
Options 2 and 3 require adding a ``"conflict_resolution"``
|
||||||
|
parameter to the pair section. Option 2 requires giving either ``"a
|
||||||
|
wins"`` or ``"b wins"`` as value to the parameter::
|
||||||
|
|
||||||
[pair my_contacts]
|
[pair my_contacts]
|
||||||
...
|
...
|
||||||
conflict_resolution = b wins
|
conflict_resolution = "b wins"
|
||||||
|
|
||||||
Earlier we wrote that ``b = my_contacts_remote``, so when vdirsyncer encounters
|
Earlier we wrote that ``b = "my_contacts_remote"``, so when vdirsyncer encounters
|
||||||
the situation where an item changed on both sides, it will simply overwrite the
|
the situation where an item changed on both sides, it will simply overwrite the
|
||||||
local item with the one from the server. Of course ``a wins`` is also a valid
|
local item with the one from the server.
|
||||||
value.
|
|
||||||
|
|
||||||
Calendar sync works almost the same. Just swap ``type = carddav`` for ``type =
|
Option 3 requires specifying as value of ``"conflict_resolution"`` an
|
||||||
caldav`` and ``fileext = .vcf`` for ``fileext = .ics``.
|
array starting with ``"command"`` and containing paths and arguments
|
||||||
|
to a command. For example::
|
||||||
|
|
||||||
|
[pair my_contacts]
|
||||||
|
...
|
||||||
|
conflict_resolution = ["command", "vimdiff"]
|
||||||
|
|
||||||
|
In this example, ``vimdiff <a> <b>`` will be called with ``<a>`` and
|
||||||
|
``<b>`` being two temporary files containing the conflicting
|
||||||
|
files. The files need to be exactly the same when the command
|
||||||
|
returns. More arguments can be passed to the command by adding more
|
||||||
|
elements to the array.
|
||||||
|
|
||||||
|
See :ref:`pair_config` for the reference documentation.
|
||||||
|
|
||||||
|
.. _metasync_tutorial:
|
||||||
|
|
||||||
|
Metadata synchronization
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Besides items, vdirsyncer can also synchronize metadata like the addressbook's
|
||||||
|
or calendar's "human-friendly" name (internally called "displayname") or the
|
||||||
|
color associated with a calendar. For the purpose of explaining this feature,
|
||||||
|
let's switch to a different base example. This time we'll synchronize calendars::
|
||||||
|
|
||||||
|
[pair my_calendars]
|
||||||
|
a = "my_calendars_local"
|
||||||
|
b = "my_calendars_remote"
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
metadata = ["color"]
|
||||||
|
|
||||||
|
[storage my_calendars_local]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "~/.calendars/"
|
||||||
|
fileext = ".ics"
|
||||||
|
|
||||||
|
[storage my_calendars_remote]
|
||||||
|
type = "caldav"
|
||||||
|
|
||||||
|
url = "https://owncloud.example.com/remote.php/caldav/"
|
||||||
|
username = "bob"
|
||||||
|
password = "asdf"
|
||||||
|
|
||||||
|
Run ``vdirsyncer discover`` for discovery. Then you can use ``vdirsyncer
|
||||||
|
metasync`` to synchronize the ``color`` property between your local calendars
|
||||||
|
in ``~/.calendars/`` and your ownCloud. Locally the color is just represented
|
||||||
|
as a file called ``color`` within the calendar folder.
|
||||||
|
|
||||||
|
.. _collections_tutorial:
|
||||||
|
|
||||||
|
More information about collections
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
"Collection" is a collective term for addressbooks and calendars. A Cardav or
|
||||||
|
Caldav server can contains several "collections" which correspond to several
|
||||||
|
addressbooks or calendar.
|
||||||
|
|
||||||
|
Each collection from a storage has a "collection name", a unique identifier for each
|
||||||
|
collection. In the case of :storage:`filesystem`-storage, this is the name of the
|
||||||
|
directory that represents the collection, in the case of the DAV-storages this
|
||||||
|
is the last segment of the URL. We use this identifier in the ``collections``
|
||||||
|
parameter in the ``pair``-section.
|
||||||
|
|
||||||
|
This identifier doesn't change even if you rename your calendar in whatever UI
|
||||||
|
you have, because that only changes the so-called "displayname" property [3]_.
|
||||||
|
On some servers (iCloud, Google) this identifier is randomly generated and has
|
||||||
|
no correlation with the displayname you chose.
|
||||||
|
|
||||||
|
.. [3] Which you can also synchronize with ``metasync`` using ``metadata =
|
||||||
|
["displayname"]``.
|
||||||
|
|
||||||
|
There are three collection names that have a special meaning:
|
||||||
|
|
||||||
|
- ``"from a"``, ``"from b"``: A placeholder for all collections that can be
|
||||||
|
found on side A/B when running ``vdirsyncer discover``.
|
||||||
|
- ``null``: The parameters give to the storage are exact and require no discovery.
|
||||||
|
|
||||||
|
The last one requires a bit more explanation. Assume this config which
|
||||||
|
synchronizes two directories of addressbooks::
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".vcf"
|
||||||
|
path = "./contacts_foo/"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".vcf"
|
||||||
|
path = "./contacts_bar/"
|
||||||
|
|
||||||
|
As we saw previously this will synchronize all collections in
|
||||||
|
``./contacts_foo/`` with each same-named collection in ``./contacts_bar/``. If
|
||||||
|
there's a collection that exists on one side but not the other, vdirsyncer will
|
||||||
|
ask whether to create that folder on the other side.
|
||||||
|
|
||||||
|
If we set ``collections = null``, ``./contacts_foo/`` and ``./contacts_bar/``
|
||||||
|
are no longer treated as folders with collections, but as collections
|
||||||
|
themselves. This means that ``./contacts_foo/`` and ``./contacts_bar/`` will
|
||||||
|
contain ``.vcf``-files, not subfolders that contain ``.vcf``-files.
|
||||||
|
|
||||||
|
This is useful in situations where listing all collections fails because your
|
||||||
|
DAV-server doesn't support it, for example. In this case, you can set ``url``
|
||||||
|
of your :storage:`carddav`- or :storage:`caldav`-storage to a URL that points
|
||||||
|
to your CalDAV/CardDAV collection directly.
|
||||||
|
|
||||||
|
Note that not all storages support the ``null``-collection, for example
|
||||||
|
:storage:`google_contacts` and :storage:`google_calendar` don't.
|
||||||
|
|
||||||
|
Advanced collection configuration (server-to-server sync)
|
||||||
|
---------------------------------------------------------
|
||||||
|
|
||||||
|
The examples above are good enough if you want to synchronize a remote server
|
||||||
|
to a previously empty disk. However, even more trickery is required when you
|
||||||
|
have two servers with *already existing* collections which you want to
|
||||||
|
synchronize.
|
||||||
|
|
||||||
|
The core problem in this situation is that vdirsyncer pairs collections by
|
||||||
|
collection name by default (see definition in previous section, basically a
|
||||||
|
foldername or a remote UUID). When you have two servers, those collection names
|
||||||
|
may not line up as nicely. Suppose you created two calendars "Test", one on a
|
||||||
|
NextCloud server and one on iCloud, using their respective web interfaces. The
|
||||||
|
URLs look something like this::
|
||||||
|
|
||||||
|
NextCloud: https://example.com/remote.php/dav/calendars/user/test/
|
||||||
|
iCloud: https://p-XX.caldav.icloud.com/YYY/calendars/3b4c9995-5c67-4021-9fa0-be4633623e1c
|
||||||
|
|
||||||
|
Those are two DAV calendar collections. Their collection names will be ``test``
|
||||||
|
and ``3b4c9995-5c67-4021-9fa0-be4633623e1c`` respectively, so you don't have a
|
||||||
|
single name you can address them both with. You will need to manually "pair"
|
||||||
|
(no pun intended) those collections up like this::
|
||||||
|
|
||||||
|
[pair doublecloud]
|
||||||
|
a = "my_nextcloud"
|
||||||
|
b = "my_icloud"
|
||||||
|
collections = [["mytest", "test", "3b4c9995-5c67-4021-9fa0-be4633623e1c"]]
|
||||||
|
|
||||||
|
``mytest`` gives that combination of calendars a nice name you can use when
|
||||||
|
talking about it, so you would use ``vdirsyncer sync doublecloud/mytest`` to
|
||||||
|
say: "Only synchronize these two storages, nothing else that may be
|
||||||
|
configured".
|
||||||
|
|
||||||
|
.. note:: Why not use displaynames?
|
||||||
|
|
||||||
|
You may wonder why vdirsyncer just couldn't figure this out by itself. After
|
||||||
|
all, you did name both collections "Test" (which is called "the
|
||||||
|
displayname"), so why not pair collections by that value?
|
||||||
|
|
||||||
|
There are a few problems with this idea:
|
||||||
|
|
||||||
|
- Two calendars may have the same exact displayname.
|
||||||
|
- A calendar may not have a (non-empty) displayname.
|
||||||
|
- The displayname might change. Either you rename the calendar, or the
|
||||||
|
calendar renames itself because you change a language setting.
|
||||||
|
|
||||||
|
In the end, that property was never designed to be parsed by machines.
|
||||||
|
|
|
||||||
10
docs/tutorials/baikal.rst
Normal file
10
docs/tutorials/baikal.rst
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
======
|
||||||
|
Baikal
|
||||||
|
======
|
||||||
|
|
||||||
|
Vdirsyncer is continuously tested against the latest version of Baikal_.
|
||||||
|
|
||||||
|
- Baikal up to ``0.2.7`` also uses an old version of SabreDAV, with the same
|
||||||
|
issue as ownCloud, see :gh:`160`. This issue is fixed in later versions.
|
||||||
|
|
||||||
|
.. _Baikal: http://sabre.io/baikal/
|
||||||
95
docs/tutorials/claws-mail.rst
Normal file
95
docs/tutorials/claws-mail.rst
Normal file
|
|
@ -0,0 +1,95 @@
|
||||||
|
.. _claws-mail-tutorial:
|
||||||
|
|
||||||
|
Vdirsyncer with Claws Mail
|
||||||
|
==========================
|
||||||
|
|
||||||
|
First of all, Claws-Mail only supports **read-only** functions for vCards. It
|
||||||
|
can only read contacts, but there's no editor.
|
||||||
|
|
||||||
|
Preparation
|
||||||
|
-----------
|
||||||
|
|
||||||
|
We need to install vdirsyncer, for that look :doc:`here </installation>`. Then
|
||||||
|
we need to create some folders::
|
||||||
|
|
||||||
|
mkdir ~/.vdirsyncer
|
||||||
|
mkdir ~/.contacts
|
||||||
|
|
||||||
|
Configuration
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Now we create the configuration for vdirsyncer. Open
|
||||||
|
``~/.vdirsyncer/config`` with a text editor. The config should look like
|
||||||
|
this:
|
||||||
|
|
||||||
|
.. code:: ini
|
||||||
|
|
||||||
|
[general]
|
||||||
|
status_path = "~/.vdirsyncer/status/"
|
||||||
|
|
||||||
|
[storage local]
|
||||||
|
type = "singlefile"
|
||||||
|
path = "~/.contacts/%s.vcf"
|
||||||
|
|
||||||
|
[storage online]
|
||||||
|
type = "carddav"
|
||||||
|
url = "CARDDAV_LINK"
|
||||||
|
username = "USERNAME"
|
||||||
|
password = "PASSWORD"
|
||||||
|
read_only = true
|
||||||
|
|
||||||
|
[pair contacts]
|
||||||
|
a = "local"
|
||||||
|
b = "online"
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
conflict_resolution = "b wins"
|
||||||
|
|
||||||
|
- In the general section, we define the status folder path, for discovered
|
||||||
|
collections and generally stuff that needs to persist between syncs.
|
||||||
|
- In the local section we define that all contacts should be sync in a single
|
||||||
|
file and the path for the contacts.
|
||||||
|
- In the online section you must change the url, username and password to your
|
||||||
|
setup. We also set the storage to read-only such that no changes get
|
||||||
|
synchronized back. Claws-Mail should not be able to do any changes anyway,
|
||||||
|
but this is one extra safety step in case files get corrupted or vdirsyncer
|
||||||
|
behaves erratically. You can leave that part out if you want to be able to
|
||||||
|
edit those files locally.
|
||||||
|
- In the last section we configure that online contacts win in a conflict
|
||||||
|
situation. Configure this part however you like. A correct value depends on
|
||||||
|
which side is most likely to be up-to-date.
|
||||||
|
|
||||||
|
Sync
|
||||||
|
----
|
||||||
|
|
||||||
|
Now we discover and sync our contacts::
|
||||||
|
|
||||||
|
vdirsyncer discover contacts
|
||||||
|
vdirsyncer sync contacts
|
||||||
|
|
||||||
|
Claws Mail
|
||||||
|
----------
|
||||||
|
|
||||||
|
Open Claws-Mail. Go to **Tools** => **Addressbook**.
|
||||||
|
|
||||||
|
Click on **Addressbook** => **New vCard**. Choose a name for the book.
|
||||||
|
|
||||||
|
Then search for the for the vCard in the folder **~/.contacts/**. Click
|
||||||
|
ok, and you we will see your contacts.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Claws-Mail shows only contacts that have a mail address.
|
||||||
|
|
||||||
|
Crontab
|
||||||
|
-------
|
||||||
|
|
||||||
|
On the end we create a crontab, so that vdirsyncer syncs automatically
|
||||||
|
every 30 minutes our contacts::
|
||||||
|
|
||||||
|
crontab -e
|
||||||
|
|
||||||
|
On the end of that file enter this line::
|
||||||
|
|
||||||
|
*/30 * * * * /usr/local/bin/vdirsyncer sync > /dev/null
|
||||||
|
|
||||||
|
And you're done!
|
||||||
41
docs/tutorials/davmail.rst
Normal file
41
docs/tutorials/davmail.rst
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
.. _davmail_setup:
|
||||||
|
|
||||||
|
===========================
|
||||||
|
DavMail (Exchange, Outlook)
|
||||||
|
===========================
|
||||||
|
|
||||||
|
DavMail_ is a proxy program that allows you to use Card- and CalDAV clients
|
||||||
|
with Outlook. That allows you to use vdirsyncer with Outlook.
|
||||||
|
|
||||||
|
In practice your success with DavMail may wildly vary. Depending on your
|
||||||
|
Exchange server you might get confronted with weird errors of all sorts
|
||||||
|
(including data-loss).
|
||||||
|
|
||||||
|
**Make absolutely sure you use the latest DavMail**::
|
||||||
|
|
||||||
|
[storage outlook]
|
||||||
|
type = "caldav"
|
||||||
|
url = "http://localhost:1080/users/user@example.com/calendar/"
|
||||||
|
username = "user@example.com"
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
- Older versions of DavMail handle URLs case-insensitively. See :gh:`144`.
|
||||||
|
- DavMail is handling malformed data on the Exchange server very poorly. In
|
||||||
|
such cases the `Calendar Checking Tool for Outlook
|
||||||
|
<https://www.microsoft.com/en-us/download/details.aspx?id=28786>`_ might
|
||||||
|
help.
|
||||||
|
- In some cases, you may see errors about duplicate events. It may look
|
||||||
|
something like this::
|
||||||
|
|
||||||
|
error: my_calendar/calendar: Storage "my_calendar_remote/calendar" contains multiple items with the same UID or even content. Vdirsyncer will now abort the synchronization of this collection, because the fix for this is not clear; It could be the result of a badly behaving server. You can try running:
|
||||||
|
error:
|
||||||
|
error: vdirsyncer repair my_calendar_remote/calendar
|
||||||
|
error:
|
||||||
|
error: But make sure to have a backup of your data in some form. The offending hrefs are:
|
||||||
|
[...]
|
||||||
|
|
||||||
|
In order to fix this, you can try the Remove-DuplicateAppointments.ps1_
|
||||||
|
PowerShell script that Microsoft has come up with in order to remove duplicates.
|
||||||
|
|
||||||
|
.. _DavMail: http://davmail.sourceforge.net/
|
||||||
|
.. _Remove-DuplicateAppointments.ps1: https://blogs.msdn.microsoft.com/emeamsgdev/2015/02/12/powershell-remove-duplicate-calendar-appointments/
|
||||||
23
docs/tutorials/fastmail.rst
Normal file
23
docs/tutorials/fastmail.rst
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
========
|
||||||
|
FastMail
|
||||||
|
========
|
||||||
|
|
||||||
|
Vdirsyncer is continuously tested against FastMail_, thanks to them for
|
||||||
|
providing a free account for this purpose. There are no known issues with it.
|
||||||
|
`FastMail's support pages
|
||||||
|
<https://www.fastmail.com/help/technical/servernamesandports.html>`_ provide
|
||||||
|
the settings to use::
|
||||||
|
|
||||||
|
[storage cal]
|
||||||
|
type = "caldav"
|
||||||
|
url = "https://caldav.fastmail.com/"
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
[storage card]
|
||||||
|
type = "carddav"
|
||||||
|
url = "https://carddav.fastmail.com/"
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
.. _FastMail: https://www.fastmail.com/
|
||||||
9
docs/tutorials/google.rst
Normal file
9
docs/tutorials/google.rst
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
======
|
||||||
|
Google
|
||||||
|
======
|
||||||
|
|
||||||
|
Using vdirsyncer with Google Calendar is possible as of 0.10, but it is not
|
||||||
|
tested frequently. You can use :storage:`google_contacts` and
|
||||||
|
:storage:`google_calendar`.
|
||||||
|
|
||||||
|
For more information see :gh:`202` and :gh:`8`.
|
||||||
35
docs/tutorials/icloud.rst
Normal file
35
docs/tutorials/icloud.rst
Normal file
|
|
@ -0,0 +1,35 @@
|
||||||
|
.. _icloud_setup:
|
||||||
|
|
||||||
|
======
|
||||||
|
iCloud
|
||||||
|
======
|
||||||
|
|
||||||
|
Vdirsyncer is regularly tested against iCloud_.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage cal]
|
||||||
|
type = "caldav"
|
||||||
|
url = "https://caldav.icloud.com/"
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
[storage card]
|
||||||
|
type = "carddav"
|
||||||
|
url = "https://contacts.icloud.com/"
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
Problems:
|
||||||
|
|
||||||
|
- Vdirsyncer can't do two-factor auth with iCloud (there doesn't seem to be a
|
||||||
|
way to do two-factor auth over the DAV APIs) You'll need to use `app-specific
|
||||||
|
passwords <https://support.apple.com/en-us/HT204397>`_ instead.
|
||||||
|
- iCloud has a few special requirements when creating collections. In principle
|
||||||
|
vdirsyncer can do it, but it is recommended to create them from an Apple
|
||||||
|
client (or the iCloud web interface).
|
||||||
|
|
||||||
|
- iCloud requires a minimum length of collection names.
|
||||||
|
- Calendars created by vdirsyncer cannot be used as tasklists.
|
||||||
|
|
||||||
|
.. _iCloud: https://www.icloud.com/
|
||||||
63
docs/tutorials/index.rst
Normal file
63
docs/tutorials/index.rst
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
===============
|
||||||
|
Other tutorials
|
||||||
|
===============
|
||||||
|
|
||||||
|
The following section contains tutorials not explicitly about any particular
|
||||||
|
core function of vdirsyncer. They usually show how to integrate vdirsyncer with
|
||||||
|
third-party software. Because of that, it may be that the information regarding
|
||||||
|
that other software only applies to specific versions of them.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Please :doc:`contribute </contributing>` your own tutorials too! Pages are
|
||||||
|
often only stubs and are lacking full examples.
|
||||||
|
|
||||||
|
Client applications
|
||||||
|
===================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
claws-mail
|
||||||
|
systemd-timer
|
||||||
|
todoman
|
||||||
|
|
||||||
|
Further applications, with missing pages:
|
||||||
|
|
||||||
|
- khal_, a CLI calendar application supporting :doc:`vdir </vdir>`. You can use
|
||||||
|
:storage:`filesystem` with it.
|
||||||
|
- Many graphical calendar apps such as dayplanner_, Orage_ or rainlendar_ save
|
||||||
|
a calendar in a single ``.ics`` file. You can use :storage:`singlefile` with
|
||||||
|
those.
|
||||||
|
- khard_, a commandline addressbook supporting :doc:`vdir </vdir>`. You can use
|
||||||
|
:storage:`filesystem` with it.
|
||||||
|
- contactquery.c_, a small program explicitly written for querying vdirs from
|
||||||
|
mutt.
|
||||||
|
- mates_, a commandline addressbook supporting :doc:`vdir </vdir>`.
|
||||||
|
- vdirel_, access :doc:`vdir </vdir>` contacts from Emacs.
|
||||||
|
|
||||||
|
.. _khal: http://lostpackets.de/khal/
|
||||||
|
.. _dayplanner: http://www.day-planner.org/
|
||||||
|
.. _Orage: https://gitlab.xfce.org/apps/orage
|
||||||
|
.. _rainlendar: http://www.rainlendar.net/
|
||||||
|
.. _khard: https://github.com/scheibler/khard/
|
||||||
|
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c
|
||||||
|
.. _mates: https://github.com/pimutils/mates.rs
|
||||||
|
.. _vdirel: https://github.com/DamienCassou/vdirel
|
||||||
|
|
||||||
|
.. _supported-servers:
|
||||||
|
|
||||||
|
Servers
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
baikal
|
||||||
|
davmail
|
||||||
|
fastmail
|
||||||
|
google
|
||||||
|
icloud
|
||||||
|
nextcloud
|
||||||
|
owncloud
|
||||||
|
radicale
|
||||||
|
xandikos
|
||||||
20
docs/tutorials/nextcloud.rst
Normal file
20
docs/tutorials/nextcloud.rst
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
=========
|
||||||
|
nextCloud
|
||||||
|
=========
|
||||||
|
|
||||||
|
Vdirsyncer is continuously tested against the latest version of nextCloud_::
|
||||||
|
|
||||||
|
[storage cal]
|
||||||
|
type = "caldav"
|
||||||
|
url = "https://nextcloud.example.com/"
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
[storage card]
|
||||||
|
type = "carddav"
|
||||||
|
url = "https://nextcloud.example.com/"
|
||||||
|
|
||||||
|
- WebCAL-subscriptions can't be discovered by vdirsyncer. See `this relevant
|
||||||
|
issue <https://github.com/nextcloud/calendar/issues/63>`_.
|
||||||
|
|
||||||
|
.. _nextCloud: https://nextcloud.com/
|
||||||
26
docs/tutorials/owncloud.rst
Normal file
26
docs/tutorials/owncloud.rst
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
.. _owncloud_setup:
|
||||||
|
|
||||||
|
========
|
||||||
|
ownCloud
|
||||||
|
========
|
||||||
|
|
||||||
|
Vdirsyncer is continuously tested against the latest version of ownCloud_::
|
||||||
|
|
||||||
|
[storage cal]
|
||||||
|
type = "caldav"
|
||||||
|
url = "https://example.com/remote.php/dav/"
|
||||||
|
username = ...
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
[storage card]
|
||||||
|
type = "carddav"
|
||||||
|
url = "https://example.com/remote.php/dav/"
|
||||||
|
username = ...
|
||||||
|
password = ...
|
||||||
|
|
||||||
|
- *Versions older than 7.0.0:* ownCloud uses SabreDAV, which had problems
|
||||||
|
detecting collisions and race-conditions. The problems were reported and are
|
||||||
|
fixed in SabreDAV's repo, and the corresponding fix is also in ownCloud since
|
||||||
|
7.0.0. See :gh:`16` for more information.
|
||||||
|
|
||||||
|
.. _ownCloud: https://owncloud.org/
|
||||||
26
docs/tutorials/radicale.rst
Normal file
26
docs/tutorials/radicale.rst
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
========
|
||||||
|
Radicale
|
||||||
|
========
|
||||||
|
|
||||||
|
Radicale_ is a very lightweight server, however, it intentionally doesn't
|
||||||
|
implement the CalDAV and CardDAV standards completely, which might lead to
|
||||||
|
issues even with very well-written clients. Apart from its non-conformity with
|
||||||
|
standards, there are multiple other problems with its code quality and the way
|
||||||
|
it is maintained. Consider using e.g. :doc:`xandikos` instead.
|
||||||
|
|
||||||
|
That said, vdirsyncer is continuously tested against the git version and the
|
||||||
|
latest PyPI release of Radicale.
|
||||||
|
|
||||||
|
- Vdirsyncer can't create collections on Radicale.
|
||||||
|
- Radicale doesn't `support time ranges in the calendar-query of CalDAV
|
||||||
|
<https://github.com/Kozea/Radicale/issues/146>`_, so setting ``start_date``
|
||||||
|
and ``end_date`` for :storage:`caldav` will have no or unpredicted
|
||||||
|
consequences.
|
||||||
|
|
||||||
|
- `Versions of Radicale older than 0.9b1 choke on RFC-conform queries for all
|
||||||
|
items of a collection <https://github.com/Kozea/Radicale/issues/143>`_.
|
||||||
|
|
||||||
|
You have to set ``item_types = ["VTODO", "VEVENT"]`` in
|
||||||
|
:storage:`caldav` for vdirsyncer to work with those versions.
|
||||||
|
|
||||||
|
.. _Radicale: http://radicale.org/
|
||||||
37
docs/tutorials/systemd-timer.rst
Normal file
37
docs/tutorials/systemd-timer.rst
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
.. _systemd_timer-tutorial:
|
||||||
|
|
||||||
|
Running as a systemd.timer
|
||||||
|
==========================
|
||||||
|
|
||||||
|
vdirsyncer includes unit files to run at an interval (by default every 15±5
|
||||||
|
minutes).
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
These are not installed when installing via pip, only via distribution
|
||||||
|
packages. If you installed via pip, or your distribution doesn't ship systemd
|
||||||
|
unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_
|
||||||
|
into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``.
|
||||||
|
|
||||||
|
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.service
|
||||||
|
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.timer
|
||||||
|
|
||||||
|
Activation
|
||||||
|
----------
|
||||||
|
|
||||||
|
To activate the timer, just run ``systemctl --user enable vdirsyncer.timer``.
|
||||||
|
To see logs of previous runs, use ``journalctl --user -u vdirsyncer``.
|
||||||
|
|
||||||
|
Configuration
|
||||||
|
-------------
|
||||||
|
|
||||||
|
It's quite possible that the default "every fifteen minutes" interval isn't to
|
||||||
|
your liking. No default will suit everybody, but this is configurable by simply
|
||||||
|
running::
|
||||||
|
|
||||||
|
systemctl --user edit vdirsyncer.timer
|
||||||
|
|
||||||
|
This will open a blank editor, where you can override the timer by including::
|
||||||
|
|
||||||
|
OnBootSec=5m # This is how long after boot the first run takes place.
|
||||||
|
OnUnitActiveSec=15m # This is how often subsequent runs take place.
|
||||||
69
docs/tutorials/todoman.rst
Normal file
69
docs/tutorials/todoman.rst
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
=======
|
||||||
|
Todoman
|
||||||
|
=======
|
||||||
|
|
||||||
|
The iCalendar format also supports saving tasks in form of ``VTODO``-entries,
|
||||||
|
with the same file extension as normal events: ``.ics``. Many CalDAV servers
|
||||||
|
support synchronizing tasks, vdirsyncer does too.
|
||||||
|
|
||||||
|
todoman_ is a CLI task manager supporting :doc:`vdir </vdir>`. Its interface is
|
||||||
|
similar to the ones of Taskwarrior or the todo.txt CLI app. You can use
|
||||||
|
:storage:`filesystem` with it.
|
||||||
|
|
||||||
|
.. _todoman: http://todoman.readthedocs.io/
|
||||||
|
|
||||||
|
Setting up vdirsyncer
|
||||||
|
=====================
|
||||||
|
|
||||||
|
For this tutorial we will use NextCloud.
|
||||||
|
|
||||||
|
Assuming a config like this::
|
||||||
|
|
||||||
|
[general]
|
||||||
|
status_path = "~/.vdirsyncer/status/"
|
||||||
|
|
||||||
|
[pair calendars]
|
||||||
|
conflict_resolution = "b wins"
|
||||||
|
a = "calendars_local"
|
||||||
|
b = "calendars_dav"
|
||||||
|
collections = ["from b"]
|
||||||
|
metadata = ["color", "displayname"]
|
||||||
|
|
||||||
|
[storage calendars_local]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "~/.calendars/"
|
||||||
|
fileext = ".ics"
|
||||||
|
|
||||||
|
[storage calendars_dav]
|
||||||
|
type = "caldav"
|
||||||
|
url = "https://nextcloud.example.net/"
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
``vdirsyncer sync`` will then synchronize the calendars of your NextCloud_
|
||||||
|
instance to subfolders of ``~/.calendar/``.
|
||||||
|
|
||||||
|
.. _NextCloud: https://nextcloud.com/
|
||||||
|
|
||||||
|
Setting up todoman
|
||||||
|
==================
|
||||||
|
|
||||||
|
Write this to ``~/.config/todoman/config.py``::
|
||||||
|
|
||||||
|
path = "~/.calendars/*"
|
||||||
|
|
||||||
|
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
|
||||||
|
which is exactly the tasklists we want. Now you can use ``todoman`` as
|
||||||
|
described in its documentation_ and run ``vdirsyncer sync`` to synchronize the changes to NextCloud.
|
||||||
|
|
||||||
|
.. _glob: https://en.wikipedia.org/wiki/Glob_(programming)
|
||||||
|
.. _documentation: http://todoman.readthedocs.io/
|
||||||
|
|
||||||
|
Other clients
|
||||||
|
=============
|
||||||
|
|
||||||
|
The following client applications also synchronize over CalDAV:
|
||||||
|
|
||||||
|
- The Tasks-app found on iOS
|
||||||
|
- `OpenTasks for Android <https://github.com/dmfs/opentasks>`_
|
||||||
|
- The `Tasks <https://apps.nextcloud.com/apps/tasks>`_-app for NextCloud's web UI
|
||||||
23
docs/tutorials/xandikos.rst
Normal file
23
docs/tutorials/xandikos.rst
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
========
|
||||||
|
Xandikos
|
||||||
|
========
|
||||||
|
|
||||||
|
Xandikos_ is a lightweight, yet complete CalDAV and CardDAV server, backed by
|
||||||
|
git. Vdirsyncer is continuously tested against its latest version.
|
||||||
|
|
||||||
|
After running ``./bin/xandikos --defaults -d $HOME/dav``, you should be able to
|
||||||
|
point vdirsyncer against the root of Xandikos like this::
|
||||||
|
|
||||||
|
[storage cal]
|
||||||
|
type = "caldav"
|
||||||
|
url = "https://xandikos.example.com/"
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
[storage card]
|
||||||
|
type = "carddav"
|
||||||
|
url = "https://xandikos.example.com/"
|
||||||
|
username = "..."
|
||||||
|
password = "..."
|
||||||
|
|
||||||
|
.. _Xandikos: https://github.com/jelmer/xandikos
|
||||||
|
|
@ -1,10 +1,14 @@
|
||||||
=======================
|
=======================
|
||||||
The vdir Storage Format
|
The Vdir Storage Format
|
||||||
=======================
|
=======================
|
||||||
|
|
||||||
This document describes a standard for storing calendars and contacts on a
|
This document describes a standard for storing calendars and contacts on a
|
||||||
filesystem, with the main goal of being easy to implement.
|
filesystem, with the main goal of being easy to implement.
|
||||||
|
|
||||||
|
Vdirsyncer synchronizes to vdirs via :storage:`filesystem`. Each vdir
|
||||||
|
(basically just a directory with some files in it) represents a calendar or
|
||||||
|
addressbook.
|
||||||
|
|
||||||
Basic Structure
|
Basic Structure
|
||||||
===============
|
===============
|
||||||
|
|
||||||
|
|
@ -18,32 +22,50 @@ An item is:
|
||||||
- An iCalendar_ file, in which case the file extension *must* be `.ics`.
|
- An iCalendar_ file, in which case the file extension *must* be `.ics`.
|
||||||
|
|
||||||
An item *should* contain a ``UID`` property as described by the vCard and
|
An item *should* contain a ``UID`` property as described by the vCard and
|
||||||
iCalendar standards.
|
iCalendar standards. If it contains more than one ``UID`` property, the values
|
||||||
|
of those *must* not differ.
|
||||||
|
|
||||||
The filename *must* consist of the ``ident``, followed by the file extension.
|
The file *must* contain exactly one event, task or contact. In most cases this
|
||||||
The ``ident`` is either the ``UID``, if the item has one, else a string with
|
also implies only one ``VEVENT``/``VTODO``/``VCARD`` component per file, but
|
||||||
similar properties as the ``UID``:
|
e.g. recurrence exceptions would require multiple ``VEVENT`` components per
|
||||||
|
event.
|
||||||
|
|
||||||
Type name: UID
|
The filename should have similar properties as the ``UID`` of the file content.
|
||||||
|
However, there is no requirement for these two to be the same. Programs may
|
||||||
Type purpose: To specify a value that represents a globally unique
|
choose to store additional metadata in that filename, however, at the same time
|
||||||
identifier corresponding to the individual or resource associated
|
they *must not* assume that the metadata they included will be preserved by
|
||||||
with the vCard.
|
other programs.
|
||||||
|
|
||||||
-- The vCard_ RFC
|
|
||||||
|
|
||||||
One reason this format was chosen is due to its compatibility with the CardDAV_
|
|
||||||
and CalDAV_ standards.
|
|
||||||
|
|
||||||
.. _vCard: https://tools.ietf.org/html/rfc6350
|
.. _vCard: https://tools.ietf.org/html/rfc6350
|
||||||
.. _iCalendar: https://tools.ietf.org/html/rfc5545
|
.. _iCalendar: https://tools.ietf.org/html/rfc5545
|
||||||
.. _CardDAV: http://tools.ietf.org/html/rfc6352
|
.. _CardDAV: http://tools.ietf.org/html/rfc6352
|
||||||
.. _CalDAV: http://tools.ietf.org/search/rfc4791
|
.. _CalDAV: http://tools.ietf.org/search/rfc4791
|
||||||
|
|
||||||
|
Metadata
|
||||||
|
========
|
||||||
|
|
||||||
|
Any of the below metadata files may be absent. None of the files listed below
|
||||||
|
have any file extensions.
|
||||||
|
|
||||||
|
- A file called ``color`` inside the vdir indicates the vdir's color, a
|
||||||
|
property that is only relevant in UI design.
|
||||||
|
|
||||||
|
Its content is an ASCII-encoded hex-RGB value of the form ``#RRGGBB``. For
|
||||||
|
example, a file content of ``#FF0000`` indicates that the vdir has a red
|
||||||
|
(user-visible) color. No short forms or informal values such as ``red`` (as
|
||||||
|
known from CSS, for example) are allowed. The prefixing ``#`` must be
|
||||||
|
present.
|
||||||
|
|
||||||
|
- Files called ``displayname`` and ``description`` contain a UTF-8 encoded label/
|
||||||
|
description, that may be used to represent the vdir in UIs.
|
||||||
|
|
||||||
|
- A file called ``order`` inside the vdir includes the relative order
|
||||||
|
of the calendar, a property that is only relevant in UI design.
|
||||||
|
|
||||||
Writing to vdirs
|
Writing to vdirs
|
||||||
================
|
================
|
||||||
|
|
||||||
Creating and modifying items *should* happen atomically_.
|
Creating and modifying items or metadata files *should* happen atomically_.
|
||||||
|
|
||||||
Writing to a temporary file on the same physical device, and then moving it to
|
Writing to a temporary file on the same physical device, and then moving it to
|
||||||
the appropriate location is usually a very effective solution. For this
|
the appropriate location is usually a very effective solution. For this
|
||||||
|
|
@ -62,8 +84,11 @@ Reading from vdirs
|
||||||
- The ``ident`` part of the filename *should not* be parsed to improve the
|
- The ``ident`` part of the filename *should not* be parsed to improve the
|
||||||
speed of item lookup.
|
speed of item lookup.
|
||||||
|
|
||||||
Rationale
|
Considerations
|
||||||
=========
|
==============
|
||||||
|
|
||||||
|
The primary reason this format was chosen is due to its compatibility with the
|
||||||
|
CardDAV_ and CalDAV_ standards.
|
||||||
|
|
||||||
Performance
|
Performance
|
||||||
-----------
|
-----------
|
||||||
|
|
@ -74,7 +99,7 @@ collections for faster search and lookup.
|
||||||
|
|
||||||
The reason items' filenames don't contain any extra information is simple: The
|
The reason items' filenames don't contain any extra information is simple: The
|
||||||
solutions presented induced duplication of data, where one duplicate might
|
solutions presented induced duplication of data, where one duplicate might
|
||||||
become out of date because of bad implementations. As it stands right now, a
|
become out of date because of bad implementations. As it stands right now, an
|
||||||
index format could be formalized separately though.
|
index format could be formalized separately though.
|
||||||
|
|
||||||
vdirsyncer doesn't really have to bother about efficient item lookup, because
|
vdirsyncer doesn't really have to bother about efficient item lookup, because
|
||||||
|
|
|
||||||
55
docs/when.rst
Normal file
55
docs/when.rst
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
==========================
|
||||||
|
When do I need Vdirsyncer?
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Why not Dropbox + todo.txt?
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
Projects like `todo.txt <http://todotxt.com/>`_ criticize the complexity of
|
||||||
|
modern productivity apps, and that rightfully. So they set out to create a new,
|
||||||
|
super-simple, human-readable format, such that vim suffices for viewing the raw
|
||||||
|
data. However, when they're faced with the question how to synchronize that
|
||||||
|
data across multiple devices, they seemed to have reached the dead end with
|
||||||
|
their novel idea: "Let's just use Dropbox".
|
||||||
|
|
||||||
|
What does file sync software do if both files have changed since the last sync?
|
||||||
|
The answer is to ignore the question, just sync as often as possible, and hope
|
||||||
|
for the best. Because if it comes to a sync conflict, most sync services are
|
||||||
|
not daring to merge files, and create two copies on each computer instead.
|
||||||
|
Merging the two task lists is left to the user.
|
||||||
|
|
||||||
|
A better idea would've been to use ``git`` to synchronize the ``todo.txt``
|
||||||
|
file, which is at least able to resolve some basic conflicts.
|
||||||
|
|
||||||
|
Why not file sync (Dropbox, git, ...) + vdir?
|
||||||
|
---------------------------------------------
|
||||||
|
|
||||||
|
Since :doc:`vdirs <vdir>` are just a bunch of files, it is obvious to try *file
|
||||||
|
synchronization* for synchronizing your data between multiple computers, such
|
||||||
|
as:
|
||||||
|
|
||||||
|
* `Syncthing <https://syncthing.net/>`_
|
||||||
|
* `Dropbox <https://dropbox.com/>`_ or one of the gajillion services like it
|
||||||
|
* `unison <https://www.cis.upenn.edu/~bcpierce/unison/>`_
|
||||||
|
* Just ``git`` with a ``sshd``.
|
||||||
|
|
||||||
|
The disadvantages of those solutions largely depend on the exact file sync
|
||||||
|
program chosen:
|
||||||
|
|
||||||
|
* Like with ``todo.txt``, Dropbox and friends are obviously agnostic/unaware of
|
||||||
|
the files' contents. If a file has changed on both sides, Dropbox just copies
|
||||||
|
both versions to both sides.
|
||||||
|
|
||||||
|
This is a good idea if the user is directly interfacing with the file system
|
||||||
|
and is able to resolve conflicts themselves. Here it might lead to
|
||||||
|
erroneous behavior with e.g. ``khal``, since there are now two events with
|
||||||
|
the same UID.
|
||||||
|
|
||||||
|
This point doesn't apply to git: It has very good merging capabilities,
|
||||||
|
better than what vdirsyncer currently has.
|
||||||
|
|
||||||
|
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
|
||||||
|
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
|
||||||
|
e.g. DAVx⁵_ or other apps bundled with smartphones.
|
||||||
|
|
||||||
|
.. _DAVx⁵: https://www.davx5.com/
|
||||||
78
example.cfg
78
example.cfg
|
|
@ -1,78 +0,0 @@
|
||||||
# An example configuration for vdirsyncer.
|
|
||||||
# Optional parameters are commented out.
|
|
||||||
|
|
||||||
[general]
|
|
||||||
# A folder where vdirsyncer can store some metadata about each pair.
|
|
||||||
status_path = ~/.vdirsyncer/status/
|
|
||||||
|
|
||||||
# CARDDAV
|
|
||||||
[pair bob_contacts]
|
|
||||||
# A `[pair <name>]` block defines two storages `a` and `b` that should be
|
|
||||||
# synchronized. The definition of these storages follows in `[storage <name>]`
|
|
||||||
# blocks. This is similar to accounts in OfflineIMAP.
|
|
||||||
a = bob_contacts_local
|
|
||||||
b = bob_contacts_remote
|
|
||||||
|
|
||||||
# If you want to synchronize several addressbooks, calendars etc that share
|
|
||||||
# the same storage location and differ only in a suffix to this location
|
|
||||||
# (i.e., a subdirectory) you can use collections. The comma-separated values
|
|
||||||
# in this parameter represent these subdirectories and are added as URL
|
|
||||||
# segments or similar.
|
|
||||||
|
|
||||||
# Together with the definition of the following two `[storage]` blocks below
|
|
||||||
# in this example it means that
|
|
||||||
# - https://owncloud.example.com/remote.php/carddav/addressbooks/bob/default/
|
|
||||||
# will get synced with ~/.contacts/default/
|
|
||||||
# - https://owncloud.example.com/remote.php/carddav/addressbooks/bob/work/
|
|
||||||
# will get synced with ~/.contacts/work/
|
|
||||||
|
|
||||||
# Omitting this parameter implies that the given path and URL in the
|
|
||||||
# corresponding `[storage <name>]` blocks are already pointing to a
|
|
||||||
# collection each.
|
|
||||||
collections = default,work
|
|
||||||
|
|
||||||
# To resolve a conflict the following values are possible:
|
|
||||||
# `None` - abort when collisions occur (default)
|
|
||||||
# `a wins` - assume a's items to be more up-to-date
|
|
||||||
# `b wins` - assume b's items to be more up-to-date
|
|
||||||
#conflict_resolution = None
|
|
||||||
|
|
||||||
[storage bob_contacts_local]
|
|
||||||
# A storage references actual data on a remote server or on the local disk.
|
|
||||||
# Similar to repositories in OfflineIMAP.
|
|
||||||
type = filesystem
|
|
||||||
path = ~/.contacts/
|
|
||||||
fileext = .vcf
|
|
||||||
# Create the directory if it doesn't exist: `True` or `False`
|
|
||||||
#create = True
|
|
||||||
#encoding = utf-8
|
|
||||||
|
|
||||||
[storage bob_contacts_remote]
|
|
||||||
type = carddav
|
|
||||||
url = https://owncloud.example.com/remote.php/carddav/addressbooks/bob/
|
|
||||||
# Auth types. If you know yours, set it explicitly for performance.
|
|
||||||
# - basic
|
|
||||||
# - digest
|
|
||||||
# - guess (default)
|
|
||||||
#auth = guess
|
|
||||||
#username =
|
|
||||||
# The password can also be fetched from the system password storage or netrc
|
|
||||||
#password =
|
|
||||||
|
|
||||||
# CALDAV
|
|
||||||
[pair bob_calendar]
|
|
||||||
a = bob_calendar_local
|
|
||||||
b = bob_calendar_remote
|
|
||||||
collections = private,work
|
|
||||||
|
|
||||||
[storage bob_calendar_local]
|
|
||||||
type = filesystem
|
|
||||||
path = ~/.calendars/
|
|
||||||
fileext = .ics
|
|
||||||
|
|
||||||
[storage bob_calendar_remote]
|
|
||||||
type = caldav
|
|
||||||
url = https://owncloud.example.com/remote.php/caldav/calendars/bob/
|
|
||||||
#auth = guess
|
|
||||||
#username =
|
|
||||||
#password =
|
|
||||||
29
publish-release.yaml
Normal file
29
publish-release.yaml
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
# Push new version to PyPI.
|
||||||
|
#
|
||||||
|
# Usage: hut builds submit publish-release.yaml --follow
|
||||||
|
|
||||||
|
image: alpine/edge
|
||||||
|
packages:
|
||||||
|
- py3-build
|
||||||
|
- py3-pip
|
||||||
|
- py3-setuptools
|
||||||
|
- py3-setuptools_scm
|
||||||
|
- py3-wheel
|
||||||
|
- twine
|
||||||
|
sources:
|
||||||
|
- https://github.com/pimutils/vdirsyncer
|
||||||
|
secrets:
|
||||||
|
- a36c8ba3-fba0-4338-b402-6aea0fbe771e # PyPI token.
|
||||||
|
environment:
|
||||||
|
CI: true
|
||||||
|
tasks:
|
||||||
|
- check-tag: |
|
||||||
|
cd vdirsyncer
|
||||||
|
git fetch --tags
|
||||||
|
|
||||||
|
# Stop here unless this is a tag.
|
||||||
|
git describe --exact-match --tags || complete-build
|
||||||
|
- publish: |
|
||||||
|
cd vdirsyncer
|
||||||
|
python -m build --no-isolation
|
||||||
|
twine upload --non-interactive dist/*
|
||||||
114
pyproject.toml
Normal file
114
pyproject.toml
Normal file
|
|
@ -0,0 +1,114 @@
|
||||||
|
# Vdirsyncer synchronizes calendars and contacts.
|
||||||
|
#
|
||||||
|
# Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
|
||||||
|
# how to package vdirsyncer.
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=64", "setuptools_scm>=8"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "vdirsyncer"
|
||||||
|
authors = [
|
||||||
|
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
|
||||||
|
]
|
||||||
|
description = "Synchronize calendars and contacts"
|
||||||
|
readme = "README.rst"
|
||||||
|
requires-python = ">=3.9"
|
||||||
|
keywords = ["todo", "task", "icalendar", "cli"]
|
||||||
|
license = "BSD-3-Clause"
|
||||||
|
license-files = ["LICENSE"]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 4 - Beta",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Operating System :: POSIX",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Topic :: Internet",
|
||||||
|
"Topic :: Office/Business :: Scheduling",
|
||||||
|
"Topic :: Utilities",
|
||||||
|
]
|
||||||
|
dependencies = [
|
||||||
|
"click>=5.0,<9.0",
|
||||||
|
"click-log>=0.3.0,<0.5.0",
|
||||||
|
"requests>=2.20.0",
|
||||||
|
"aiohttp>=3.8.2,<4.0.0",
|
||||||
|
"aiostream>=0.4.3,<0.8.0",
|
||||||
|
"tenacity>=9.0.0",
|
||||||
|
]
|
||||||
|
dynamic = ["version"]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
google = ["aiohttp-oauthlib"]
|
||||||
|
test = [
|
||||||
|
"hypothesis>=6.72.0,<7.0.0",
|
||||||
|
"pytest",
|
||||||
|
"pytest-cov",
|
||||||
|
"pytest-httpserver",
|
||||||
|
"trustme",
|
||||||
|
"pytest-asyncio",
|
||||||
|
"aioresponses",
|
||||||
|
]
|
||||||
|
docs = [
|
||||||
|
"sphinx!=1.4.7",
|
||||||
|
"sphinx_rtd_theme",
|
||||||
|
"setuptools_scm",
|
||||||
|
]
|
||||||
|
check = [
|
||||||
|
"mypy",
|
||||||
|
"ruff",
|
||||||
|
"types-docutils",
|
||||||
|
"types-requests",
|
||||||
|
"types-setuptools",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
vdirsyncer = "vdirsyncer.cli:app"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
|
extend-select = [
|
||||||
|
"B0",
|
||||||
|
"C4",
|
||||||
|
"E",
|
||||||
|
"I",
|
||||||
|
"RSE",
|
||||||
|
"SIM",
|
||||||
|
"TID",
|
||||||
|
"UP",
|
||||||
|
"W",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
force-single-line = true
|
||||||
|
required-imports = ["from __future__ import annotations"]
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = """
|
||||||
|
--tb=short
|
||||||
|
--cov-config .coveragerc
|
||||||
|
--cov=vdirsyncer
|
||||||
|
--cov-report=term-missing:skip-covered
|
||||||
|
--no-cov-on-fail
|
||||||
|
--color=yes
|
||||||
|
"""
|
||||||
|
# filterwarnings=error
|
||||||
|
asyncio_default_fixture_loop_scope = "function"
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
ignore_missing_imports = true
|
||||||
|
|
||||||
|
[tool.coverage.report]
|
||||||
|
exclude_lines = [
|
||||||
|
"if TYPE_CHECKING:",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
include = ["vdirsyncer*"]
|
||||||
|
|
||||||
|
[tool.setuptools_scm]
|
||||||
|
write_to = "vdirsyncer/version.py"
|
||||||
|
version_scheme = "no-guess-dev"
|
||||||
49
scripts/_build_deb_in_container.bash
Normal file
49
scripts/_build_deb_in_container.bash
Normal file
|
|
@ -0,0 +1,49 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# This script is mean to be run inside a dedicated container,
|
||||||
|
# and not interatively.
|
||||||
|
|
||||||
|
set -ex
|
||||||
|
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y build-essential fakeroot debhelper git
|
||||||
|
apt-get install -y python3-all python3-pip python3-venv
|
||||||
|
apt-get install -y ruby ruby-dev
|
||||||
|
|
||||||
|
pip3 install virtualenv virtualenv-tools3
|
||||||
|
virtualenv -p python3 /vdirsyncer/env/
|
||||||
|
|
||||||
|
gem install fpm
|
||||||
|
|
||||||
|
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
|
||||||
|
pip3 uninstall -y virtualenv
|
||||||
|
echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
|
||||||
|
chmod +x /usr/local/bin/virtualenv
|
||||||
|
|
||||||
|
cp -r /source/ /vdirsyncer/vdirsyncer/
|
||||||
|
cd /vdirsyncer/vdirsyncer/ || exit 2
|
||||||
|
mkdir /vdirsyncer/pkgs/
|
||||||
|
|
||||||
|
basename -- *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
|
||||||
|
# XXX: Do I really not want google support included?
|
||||||
|
(echo -n *.tar.gz; echo '[google]') | tee requirements.txt
|
||||||
|
fpm --verbose \
|
||||||
|
--input-type virtualenv \
|
||||||
|
--output-type deb \
|
||||||
|
--name "vdirsyncer-latest" \
|
||||||
|
--version "$(cat version)" \
|
||||||
|
--prefix /opt/venvs/vdirsyncer-latest \
|
||||||
|
--depends python3 \
|
||||||
|
requirements.txt
|
||||||
|
|
||||||
|
mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
|
||||||
|
|
||||||
|
cd /vdirsyncer/pkgs/
|
||||||
|
dpkg -i -- *.deb
|
||||||
|
|
||||||
|
# Check that it works:
|
||||||
|
LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
|
||||||
|
|
||||||
|
cp -- *.deb /source/
|
||||||
56
scripts/release-deb.sh
Normal file
56
scripts/release-deb.sh
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -xeu
|
||||||
|
|
||||||
|
SCRIPT_PATH=$(realpath "$0")
|
||||||
|
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
|
||||||
|
|
||||||
|
# E.g.: debian, ubuntu
|
||||||
|
DISTRO=${DISTRO:1}
|
||||||
|
# E.g.: bullseye, bookwork
|
||||||
|
DISTROVER=${DISTROVER:2}
|
||||||
|
CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
|
||||||
|
CONTEXT="$(mktemp -d)"
|
||||||
|
|
||||||
|
DEST_DIR="$SCRIPT_DIR/../$DISTRO-$DISTROVER"
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
rm -rf "$CONTEXT"
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Prepare files.
|
||||||
|
cp scripts/_build_deb_in_container.bash "$CONTEXT"
|
||||||
|
python setup.py sdist -d "$CONTEXT"
|
||||||
|
|
||||||
|
docker run -it \
|
||||||
|
--name "$CONTAINER_NAME" \
|
||||||
|
--volume "$CONTEXT:/source" \
|
||||||
|
"$DISTRO:$DISTROVER" \
|
||||||
|
bash /source/_build_deb_in_container.bash
|
||||||
|
|
||||||
|
# Keep around the package filename.
|
||||||
|
PACKAGE=$(ls "$CONTEXT"/*.deb)
|
||||||
|
PACKAGE=$(basename "$PACKAGE")
|
||||||
|
|
||||||
|
# Save the build deb files.
|
||||||
|
mkdir -p "$DEST_DIR"
|
||||||
|
cp "$CONTEXT"/*.deb "$DEST_DIR"
|
||||||
|
|
||||||
|
echo Build complete! 🤖
|
||||||
|
|
||||||
|
# Packagecloud uses some internal IDs for each distro.
|
||||||
|
# Extract the one for the distro we're publishing.
|
||||||
|
DISTRO_ID=$(
|
||||||
|
curl -s \
|
||||||
|
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/distributions.json | \
|
||||||
|
jq '.deb | .[] | select(.index_name=="'"$DISTRO"'") | .versions | .[] | select(.index_name=="'"$DISTROVER"'") | .id'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Actually push the package.
|
||||||
|
curl \
|
||||||
|
-F "package[distro_version_id]=$DISTRO_ID" \
|
||||||
|
-F "package[package_file]=@$DEST_DIR/$PACKAGE" \
|
||||||
|
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/repos/pimutils/vdirsyncer/packages.json
|
||||||
|
|
||||||
|
echo Done! ✨
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
[wheel]
|
|
||||||
universal = 1
|
|
||||||
|
|
||||||
[pytest]
|
|
||||||
norecursedirs = tests/storage/dav/servers/*
|
|
||||||
48
setup.py
48
setup.py
|
|
@ -1,48 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
'''
|
|
||||||
vdirsyncer
|
|
||||||
~~~~~~~~~~
|
|
||||||
|
|
||||||
vdirsyncer is a synchronization tool for vdir. See the README for more
|
|
||||||
details.
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
import ast
|
|
||||||
import re
|
|
||||||
|
|
||||||
from setuptools import find_packages, setup
|
|
||||||
|
|
||||||
|
|
||||||
_version_re = re.compile(r'__version__\s+=\s+(.*)')
|
|
||||||
|
|
||||||
|
|
||||||
with open('vdirsyncer/__init__.py', 'rb') as f:
|
|
||||||
version = str(ast.literal_eval(_version_re.search(
|
|
||||||
f.read().decode('utf-8')).group(1)))
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name='vdirsyncer',
|
|
||||||
version=version,
|
|
||||||
author='Markus Unterwaditzer',
|
|
||||||
author_email='markus@unterwaditzer.net',
|
|
||||||
url='https://github.com/untitaker/vdirsyncer',
|
|
||||||
description='A synchronization tool for vdir',
|
|
||||||
license='MIT',
|
|
||||||
long_description=open('README.rst').read(),
|
|
||||||
packages=find_packages(exclude=['tests.*', 'tests']),
|
|
||||||
include_package_data=True,
|
|
||||||
entry_points={
|
|
||||||
'console_scripts': ['vdirsyncer = vdirsyncer.cli:main']
|
|
||||||
},
|
|
||||||
install_requires=[
|
|
||||||
'click>=2.0',
|
|
||||||
'requests',
|
|
||||||
'lxml',
|
|
||||||
'icalendar>=3.6',
|
|
||||||
'requests_toolbelt>=0.3.0'
|
|
||||||
],
|
|
||||||
extras_require={'keyring': ['keyring']}
|
|
||||||
)
|
|
||||||
|
|
@ -1,37 +1,29 @@
|
||||||
# -*- coding: utf-8 -*-
|
"""
|
||||||
'''
|
Test suite for vdirsyncer.
|
||||||
tests
|
"""
|
||||||
~~~~~
|
|
||||||
|
|
||||||
Test suite for vdirsyncer.
|
from __future__ import annotations
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
import hypothesis.strategies as st
|
||||||
:license: MIT, see LICENSE for more details.
|
import urllib3.exceptions
|
||||||
'''
|
|
||||||
|
|
||||||
from vdirsyncer.utils.compat import text_type
|
from vdirsyncer.vobject import normalize_item
|
||||||
from vdirsyncer.utils.vobject import normalize_item as _normalize_item
|
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
def blow_up(*a, **kw):
|
def blow_up(*a, **kw):
|
||||||
raise AssertionError('Did not expect to be called.')
|
raise AssertionError("Did not expect to be called.")
|
||||||
|
|
||||||
|
|
||||||
def normalize_item(item):
|
|
||||||
if not isinstance(item, text_type):
|
|
||||||
item = item.raw
|
|
||||||
return tuple(sorted(_normalize_item(
|
|
||||||
item, use_icalendar=False).splitlines()))
|
|
||||||
|
|
||||||
|
|
||||||
def assert_item_equals(a, b):
|
def assert_item_equals(a, b):
|
||||||
assert normalize_item(a) == normalize_item(b)
|
assert normalize_item(a) == normalize_item(b)
|
||||||
|
|
||||||
|
|
||||||
VCARD_TEMPLATE = u'''BEGIN:VCARD
|
VCARD_TEMPLATE = """BEGIN:VCARD
|
||||||
VERSION:3.0
|
VERSION:3.0
|
||||||
FN:Cyrus Daboo
|
FN:Cyrus Daboo
|
||||||
N:Daboo;Cyrus
|
N:Daboo;Cyrus;;;
|
||||||
ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA
|
ADR;TYPE=POSTAL:;2822 Email HQ;Suite 2821;RFCVille;PA;15213;USA
|
||||||
EMAIL;TYPE=PREF:cyrus@example.com
|
EMAIL;TYPE=PREF:cyrus@example.com
|
||||||
NICKNAME:me
|
NICKNAME:me
|
||||||
|
|
@ -39,12 +31,12 @@ NOTE:Example VCard.
|
||||||
ORG:Self Employed
|
ORG:Self Employed
|
||||||
TEL;TYPE=VOICE:412 605 0499
|
TEL;TYPE=VOICE:412 605 0499
|
||||||
TEL;TYPE=FAX:412 605 0705
|
TEL;TYPE=FAX:412 605 0705
|
||||||
URL:http://www.example.com
|
URL;VALUE=URI:http://www.example.com
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{r}
|
UID:{uid}
|
||||||
END:VCARD'''
|
END:VCARD"""
|
||||||
|
|
||||||
TASK_TEMPLATE = u'''BEGIN:VCALENDAR
|
TASK_TEMPLATE = """BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//dmfs.org//mimedir.icalendar//EN
|
PRODID:-//dmfs.org//mimedir.icalendar//EN
|
||||||
BEGIN:VTODO
|
BEGIN:VTODO
|
||||||
|
|
@ -54,29 +46,65 @@ LAST-MODIFIED;VALUE=DATE-TIME:20140122T151338Z
|
||||||
SEQUENCE:2
|
SEQUENCE:2
|
||||||
SUMMARY:Book: Kowlani - Tödlicher Staub
|
SUMMARY:Book: Kowlani - Tödlicher Staub
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{r}
|
UID:{uid}
|
||||||
END:VTODO
|
END:VTODO
|
||||||
END:VCALENDAR'''
|
END:VCALENDAR"""
|
||||||
|
|
||||||
|
|
||||||
BARE_EVENT_TEMPLATE = u'''BEGIN:VEVENT
|
BARE_EVENT_TEMPLATE = """BEGIN:VEVENT
|
||||||
DTSTART:19970714T170000Z
|
DTSTART:19970714T170000Z
|
||||||
DTEND:19970715T035959Z
|
DTEND:19970715T035959Z
|
||||||
SUMMARY:Bastille Day Party
|
SUMMARY:Bastille Day Party
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{r}
|
UID:{uid}
|
||||||
END:VEVENT'''
|
END:VEVENT"""
|
||||||
|
|
||||||
|
|
||||||
EVENT_TEMPLATE = u'''BEGIN:VCALENDAR
|
EVENT_TEMPLATE = (
|
||||||
|
"""BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
''' + BARE_EVENT_TEMPLATE + u'''
|
"""
|
||||||
END:VCALENDAR'''
|
+ BARE_EVENT_TEMPLATE
|
||||||
|
+ """
|
||||||
|
END:VCALENDAR"""
|
||||||
|
)
|
||||||
|
|
||||||
|
EVENT_WITH_TIMEZONE_TEMPLATE = (
|
||||||
|
"""BEGIN:VCALENDAR
|
||||||
|
BEGIN:VTIMEZONE
|
||||||
|
TZID:Europe/Rome
|
||||||
|
X-LIC-LOCATION:Europe/Rome
|
||||||
|
BEGIN:DAYLIGHT
|
||||||
|
TZOFFSETFROM:+0100
|
||||||
|
TZOFFSETTO:+0200
|
||||||
|
TZNAME:CEST
|
||||||
|
DTSTART:19700329T020000
|
||||||
|
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
|
||||||
|
END:DAYLIGHT
|
||||||
|
BEGIN:STANDARD
|
||||||
|
TZOFFSETFROM:+0200
|
||||||
|
TZOFFSETTO:+0100
|
||||||
|
TZNAME:CET
|
||||||
|
DTSTART:19701025T030000
|
||||||
|
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
||||||
|
END:STANDARD
|
||||||
|
END:VTIMEZONE
|
||||||
|
"""
|
||||||
|
+ BARE_EVENT_TEMPLATE
|
||||||
|
+ """
|
||||||
|
END:VCALENDAR"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
SIMPLE_TEMPLATE = u'''BEGIN:FOO
|
SIMPLE_TEMPLATE = """BEGIN:FOO
|
||||||
UID:{r}
|
UID:{uid}
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
HAHA:YES
|
HAHA:YES
|
||||||
END:FOO'''
|
END:FOO"""
|
||||||
|
|
||||||
|
printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs")))
|
||||||
|
|
||||||
|
uid_strategy = st.text(
|
||||||
|
st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
|
||||||
|
).filter(lambda x: x.strip() == x)
|
||||||
|
|
|
||||||
|
|
@ -1,19 +1,70 @@
|
||||||
# -*- coding: utf-8 -*-
|
"""
|
||||||
'''
|
General-purpose fixtures for vdirsyncer's testsuite.
|
||||||
tests.conftest
|
"""
|
||||||
~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
General-purpose fixtures for vdirsyncer's testsuite.
|
from __future__ import annotations
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
import logging
|
||||||
:license: MIT, see LICENSE for more details.
|
import os
|
||||||
'''
|
|
||||||
|
import aiohttp
|
||||||
|
import click_log
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
import vdirsyncer.log
|
from hypothesis import HealthCheck
|
||||||
|
from hypothesis import Verbosity
|
||||||
|
from hypothesis import settings
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def setup_logging():
|
def setup_logging():
|
||||||
vdirsyncer.log.set_level(vdirsyncer.log.logging.DEBUG)
|
click_log.basic_config("vdirsyncer").setLevel(logging.DEBUG)
|
||||||
vdirsyncer.log.add_handler(vdirsyncer.log.stdout_handler)
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import pytest_benchmark
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def benchmark():
|
||||||
|
return lambda x: x()
|
||||||
|
|
||||||
|
else:
|
||||||
|
del pytest_benchmark
|
||||||
|
|
||||||
|
|
||||||
|
settings.register_profile(
|
||||||
|
"ci",
|
||||||
|
settings(
|
||||||
|
max_examples=1000,
|
||||||
|
verbosity=Verbosity.verbose,
|
||||||
|
suppress_health_check=[HealthCheck.too_slow],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
settings.register_profile(
|
||||||
|
"deterministic",
|
||||||
|
settings(
|
||||||
|
derandomize=True,
|
||||||
|
suppress_health_check=list(HealthCheck),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
|
||||||
|
|
||||||
|
if os.environ.get("DETERMINISTIC_TESTS", "false").lower() == "true":
|
||||||
|
settings.load_profile("deterministic")
|
||||||
|
elif os.environ.get("CI", "false").lower() == "true":
|
||||||
|
settings.load_profile("ci")
|
||||||
|
else:
|
||||||
|
settings.load_profile("dev")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def aio_session():
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
yield session
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def aio_connector():
|
||||||
|
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
||||||
|
yield conn
|
||||||
|
|
|
||||||
|
|
@ -1,188 +1,428 @@
|
||||||
# -*- coding: utf-8 -*-
|
from __future__ import annotations
|
||||||
'''
|
|
||||||
tests.storage
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
import random
|
import random
|
||||||
|
import textwrap
|
||||||
|
import uuid
|
||||||
|
from urllib.parse import quote as urlquote
|
||||||
|
from urllib.parse import unquote as urlunquote
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
|
||||||
import vdirsyncer.exceptions as exceptions
|
from tests import EVENT_TEMPLATE
|
||||||
from vdirsyncer.storage.base import Item
|
from tests import TASK_TEMPLATE
|
||||||
from vdirsyncer.utils.compat import iteritems, text_type
|
from tests import VCARD_TEMPLATE
|
||||||
|
from tests import assert_item_equals
|
||||||
from .. import SIMPLE_TEMPLATE, assert_item_equals
|
from tests import normalize_item
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.storage.base import normalize_meta_value
|
||||||
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
|
|
||||||
class BaseStorageTests(object):
|
def get_server_mixin(server_name):
|
||||||
item_template = SIMPLE_TEMPLATE
|
from . import __name__ as base
|
||||||
|
|
||||||
|
x = __import__(f"{base}.servers.{server_name}", fromlist=[""])
|
||||||
|
return x.ServerMixin
|
||||||
|
|
||||||
|
|
||||||
|
def format_item(item_template, uid=None):
|
||||||
|
# assert that special chars are handled correctly.
|
||||||
|
r = random.random()
|
||||||
|
return Item(item_template.format(r=r, uid=uid or r))
|
||||||
|
|
||||||
|
|
||||||
|
class StorageTests:
|
||||||
|
storage_class = None
|
||||||
|
supports_collections = True
|
||||||
|
supports_metadata = True
|
||||||
|
|
||||||
|
@pytest.fixture(params=["VEVENT", "VTODO", "VCARD"])
|
||||||
|
def item_type(self, request):
|
||||||
|
"""Parametrize with all supported item types."""
|
||||||
|
return request.param
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def storage_args(self):
|
def get_storage_args(self):
|
||||||
return self.get_storage_args
|
"""
|
||||||
|
Return a function with the following properties:
|
||||||
|
|
||||||
def get_storage_args(self, collection=None):
|
:param collection: The name of the collection to create and use.
|
||||||
raise NotImplementedError()
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def s(self, get_storage_args):
|
||||||
|
rv = self.storage_class(**await get_storage_args())
|
||||||
|
return rv
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def storage(self, storage_args):
|
def get_item(self, item_type):
|
||||||
def inner(**kw):
|
template = {
|
||||||
return self.storage_class(**storage_args(**kw))
|
"VEVENT": EVENT_TEMPLATE,
|
||||||
|
"VTODO": TASK_TEMPLATE,
|
||||||
|
"VCARD": VCARD_TEMPLATE,
|
||||||
|
}[item_type]
|
||||||
|
|
||||||
return inner
|
return lambda **kw: format_item(template, **kw)
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def s(self, storage):
|
def requires_collections(self):
|
||||||
return storage()
|
if not self.supports_collections:
|
||||||
|
pytest.skip("This storage does not support collections.")
|
||||||
|
|
||||||
def _create_bogus_item(self, item_template=None):
|
@pytest.fixture
|
||||||
r = random.random()
|
def requires_metadata(self):
|
||||||
item_template = item_template or self.item_template
|
if not self.supports_metadata:
|
||||||
return Item(item_template.format(r=r))
|
pytest.skip("This storage does not support metadata.")
|
||||||
|
|
||||||
def test_generic(self, s):
|
@pytest.mark.asyncio
|
||||||
items = [self._create_bogus_item() for i in range(1, 10)]
|
async def test_generic(self, s, get_item):
|
||||||
|
items = [get_item() for i in range(1, 10)]
|
||||||
hrefs = []
|
hrefs = []
|
||||||
for item in items:
|
for item in items:
|
||||||
hrefs.append(s.upload(item))
|
href, etag = await s.upload(item)
|
||||||
|
if etag is None:
|
||||||
|
_, etag = await s.get(href)
|
||||||
|
hrefs.append((href, etag))
|
||||||
hrefs.sort()
|
hrefs.sort()
|
||||||
assert hrefs == sorted(s.list())
|
assert hrefs == sorted(await aiostream.stream.list(s.list()))
|
||||||
for href, etag in hrefs:
|
for href, etag in hrefs:
|
||||||
assert isinstance(href, (text_type, bytes))
|
assert isinstance(href, (str, bytes))
|
||||||
assert isinstance(etag, (text_type, bytes))
|
assert isinstance(etag, (str, bytes))
|
||||||
assert s.has(href)
|
assert await s.has(href)
|
||||||
item, etag2 = s.get(href)
|
item, etag2 = await s.get(href)
|
||||||
assert etag == etag2
|
assert etag == etag2
|
||||||
|
|
||||||
def test_empty_get_multi(self, s):
|
@pytest.mark.asyncio
|
||||||
assert list(s.get_multi([])) == []
|
async def test_empty_get_multi(self, s):
|
||||||
|
assert await aiostream.stream.list(s.get_multi([])) == []
|
||||||
|
|
||||||
def test_upload_already_existing(self, s):
|
@pytest.mark.asyncio
|
||||||
item = self._create_bogus_item()
|
async def test_get_multi_duplicates(self, s, get_item):
|
||||||
s.upload(item)
|
href, etag = await s.upload(get_item())
|
||||||
|
if etag is None:
|
||||||
|
_, etag = await s.get(href)
|
||||||
|
((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
|
||||||
|
assert href2 == href
|
||||||
|
assert etag2 == etag
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_upload_already_existing(self, s, get_item):
|
||||||
|
item = get_item()
|
||||||
|
await s.upload(item)
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.upload(item)
|
await s.upload(item)
|
||||||
|
|
||||||
def test_upload(self, s):
|
@pytest.mark.asyncio
|
||||||
item = self._create_bogus_item()
|
async def test_upload(self, s, get_item):
|
||||||
href, etag = s.upload(item)
|
item = get_item()
|
||||||
assert_item_equals(s.get(href)[0], item)
|
href, _etag = await s.upload(item)
|
||||||
|
assert_item_equals((await s.get(href))[0], item)
|
||||||
|
|
||||||
def test_update(self, s):
|
@pytest.mark.asyncio
|
||||||
item = self._create_bogus_item()
|
async def test_update(self, s, get_item):
|
||||||
href, etag = s.upload(item)
|
item = get_item()
|
||||||
assert_item_equals(s.get(href)[0], item)
|
href, etag = await s.upload(item)
|
||||||
|
if etag is None:
|
||||||
|
_, etag = await s.get(href)
|
||||||
|
assert_item_equals((await s.get(href))[0], item)
|
||||||
|
|
||||||
new_item = self._create_bogus_item()
|
new_item = get_item(uid=item.uid)
|
||||||
new_etag = s.update(href, new_item, etag)
|
new_etag = await s.update(href, new_item, etag)
|
||||||
# See https://github.com/untitaker/vdirsyncer/issues/48
|
if new_etag is None:
|
||||||
assert isinstance(new_etag, (bytes, text_type))
|
_, new_etag = await s.get(href)
|
||||||
assert_item_equals(s.get(href)[0], new_item)
|
# See https://github.com/pimutils/vdirsyncer/issues/48
|
||||||
|
assert isinstance(new_etag, (bytes, str))
|
||||||
|
assert_item_equals((await s.get(href))[0], new_item)
|
||||||
|
|
||||||
def test_update_nonexisting(self, s):
|
@pytest.mark.asyncio
|
||||||
item = self._create_bogus_item()
|
async def test_update_nonexisting(self, s, get_item):
|
||||||
|
item = get_item()
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.update(s._get_href(item), item, '"123"')
|
await s.update("huehue", item, '"123"')
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_wrong_etag(self, s, get_item):
|
||||||
|
item = get_item()
|
||||||
|
href, _etag = await s.upload(item)
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.update('huehue', item, '"123"')
|
await s.update(href, item, '"lolnope"')
|
||||||
|
|
||||||
def test_wrong_etag(self, s):
|
|
||||||
item = self._create_bogus_item()
|
|
||||||
href, etag = s.upload(item)
|
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.update(href, item, '"lolnope"')
|
await s.delete(href, '"lolnope"')
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete(self, s, get_item):
|
||||||
|
href, etag = await s.upload(get_item())
|
||||||
|
await s.delete(href, etag)
|
||||||
|
assert not await aiostream.stream.list(s.list())
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_nonexisting(self, s, get_item):
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
s.delete(href, '"lolnope"')
|
await s.delete("1", '"123"')
|
||||||
|
|
||||||
def test_delete(self, s):
|
@pytest.mark.asyncio
|
||||||
href, etag = s.upload(self._create_bogus_item())
|
async def test_list(self, s, get_item):
|
||||||
s.delete(href, etag)
|
assert not await aiostream.stream.list(s.list())
|
||||||
assert not list(s.list())
|
href, etag = await s.upload(get_item())
|
||||||
|
if etag is None:
|
||||||
|
_, etag = await s.get(href)
|
||||||
|
assert await aiostream.stream.list(s.list()) == [(href, etag)]
|
||||||
|
|
||||||
def test_delete_nonexisting(self, s):
|
@pytest.mark.asyncio
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
async def test_has(self, s, get_item):
|
||||||
s.delete('1', '"123"')
|
assert not await s.has("asd")
|
||||||
|
href, etag = await s.upload(get_item())
|
||||||
|
assert await s.has(href)
|
||||||
|
assert not await s.has("asd")
|
||||||
|
await s.delete(href, etag)
|
||||||
|
assert not await s.has(href)
|
||||||
|
|
||||||
def test_list(self, s):
|
@pytest.mark.asyncio
|
||||||
assert not list(s.list())
|
async def test_update_others_stay_the_same(self, s, get_item):
|
||||||
s.upload(self._create_bogus_item())
|
info = {}
|
||||||
assert list(s.list())
|
for _ in range(4):
|
||||||
|
href, etag = await s.upload(get_item())
|
||||||
|
if etag is None:
|
||||||
|
_, etag = await s.get(href)
|
||||||
|
info[href] = etag
|
||||||
|
|
||||||
def test_has(self, s):
|
items = await aiostream.stream.list(
|
||||||
assert not s.has('asd')
|
s.get_multi(href for href, etag in info.items())
|
||||||
href, etag = s.upload(self._create_bogus_item())
|
)
|
||||||
assert s.has(href)
|
assert {href: etag for href, item, etag in items} == info
|
||||||
assert not s.has('asd')
|
|
||||||
|
|
||||||
def test_update_others_stay_the_same(self, s):
|
|
||||||
info = dict([
|
|
||||||
s.upload(self._create_bogus_item()),
|
|
||||||
s.upload(self._create_bogus_item()),
|
|
||||||
s.upload(self._create_bogus_item()),
|
|
||||||
s.upload(self._create_bogus_item())
|
|
||||||
])
|
|
||||||
|
|
||||||
assert dict(
|
|
||||||
(href, etag) for href, item, etag
|
|
||||||
in s.get_multi(href for href, etag in iteritems(info))
|
|
||||||
) == info
|
|
||||||
|
|
||||||
def test_repr(self, s):
|
def test_repr(self, s):
|
||||||
assert self.storage_class.__name__ in repr(s)
|
assert self.storage_class.__name__ in repr(s)
|
||||||
|
assert s.instance_name is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
class SupportsCollections(object):
|
async def test_discover(
|
||||||
|
self,
|
||||||
def test_discover(self, storage_args):
|
requires_collections,
|
||||||
|
get_storage_args,
|
||||||
|
get_item,
|
||||||
|
aio_connector,
|
||||||
|
):
|
||||||
collections = set()
|
collections = set()
|
||||||
|
for i in range(1, 5):
|
||||||
|
collection = f"test{i}"
|
||||||
|
s = self.storage_class(**await get_storage_args(collection=collection))
|
||||||
|
assert not await aiostream.stream.list(s.list())
|
||||||
|
await s.upload(get_item())
|
||||||
|
collections.add(s.collection)
|
||||||
|
|
||||||
def main():
|
discovered = await aiostream.stream.list(
|
||||||
for i in range(1, 5):
|
self.storage_class.discover(**await get_storage_args(collection=None))
|
||||||
collection = 'test{}'.format(i)
|
)
|
||||||
# Create collections on-the-fly for most storages
|
actual = {c["collection"] for c in discovered}
|
||||||
# Except ownCloud, which already has all of them, and more
|
|
||||||
i += 1
|
|
||||||
s = self.storage_class(**storage_args(collection=collection))
|
|
||||||
|
|
||||||
# radicale ignores empty collections during discovery
|
assert actual >= collections
|
||||||
item = self._create_bogus_item()
|
|
||||||
s.upload(item)
|
|
||||||
|
|
||||||
collections.add(s.collection)
|
@pytest.mark.asyncio
|
||||||
main() # remove leftover variables from loop for safety
|
async def test_create_collection(
|
||||||
|
self,
|
||||||
|
requires_collections,
|
||||||
|
get_storage_args,
|
||||||
|
get_item,
|
||||||
|
):
|
||||||
|
if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"):
|
||||||
|
pytest.skip("Manual cleanup would be necessary.")
|
||||||
|
if getattr(self, "dav_server", "") == "radicale":
|
||||||
|
pytest.skip("Radicale does not support collection creation")
|
||||||
|
|
||||||
d = self.storage_class.discover(
|
args = await get_storage_args(collection=None)
|
||||||
**storage_args(collection=None))
|
args["collection"] = "test"
|
||||||
|
|
||||||
def main():
|
s = self.storage_class(**await self.storage_class.create_collection(**args))
|
||||||
for s in d:
|
|
||||||
if s.collection not in collections:
|
|
||||||
# ownCloud has many more collections, as on-the-fly
|
|
||||||
# creation doesn't really work there. Skip those
|
|
||||||
# collections, as they are not relevant to us.
|
|
||||||
print('Skipping {}'.format(s.collection))
|
|
||||||
continue
|
|
||||||
collections.remove(s.collection)
|
|
||||||
main()
|
|
||||||
|
|
||||||
assert not collections
|
href = (await s.upload(get_item()))[0]
|
||||||
|
assert href in await aiostream.stream.list(
|
||||||
|
(href async for href, etag in s.list())
|
||||||
|
)
|
||||||
|
|
||||||
def test_discover_collection_arg(self, storage_args):
|
@pytest.mark.asyncio
|
||||||
args = storage_args(collection='test2')
|
async def test_discover_collection_arg(
|
||||||
|
self, requires_collections, get_storage_args
|
||||||
|
):
|
||||||
|
args = await get_storage_args(collection="test2")
|
||||||
with pytest.raises(TypeError) as excinfo:
|
with pytest.raises(TypeError) as excinfo:
|
||||||
list(self.storage_class.discover(**args))
|
await aiostream.stream.list(self.storage_class.discover(**args))
|
||||||
|
|
||||||
assert 'collection argument must not be given' in str(excinfo.value)
|
assert "collection argument must not be given" in str(excinfo.value)
|
||||||
|
|
||||||
def test_collection_arg(self, storage):
|
@pytest.mark.asyncio
|
||||||
s = storage(collection='test2')
|
async def test_collection_arg(self, get_storage_args):
|
||||||
# Can't do stronger assertion because of radicale, which needs a
|
if self.supports_collections:
|
||||||
# fileextension to guess the collection type.
|
s = self.storage_class(**await get_storage_args(collection="test2"))
|
||||||
assert 'test2' in s.collection
|
# Can't do stronger assertion because of radicale, which needs a
|
||||||
|
# fileextension to guess the collection type.
|
||||||
|
assert "test2" in s.collection
|
||||||
|
else:
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
self.storage_class(collection="ayy", **await get_storage_args())
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_case_sensitive_uids(self, s, get_item):
|
||||||
|
if s.storage_name == "filesystem":
|
||||||
|
pytest.skip("Behavior depends on the filesystem.")
|
||||||
|
|
||||||
class StorageTests(BaseStorageTests, SupportsCollections):
|
uid = str(uuid.uuid4())
|
||||||
pass
|
await s.upload(get_item(uid=uid.upper()))
|
||||||
|
await s.upload(get_item(uid=uid.lower()))
|
||||||
|
items = [href async for href, etag in s.list()]
|
||||||
|
assert len(items) == 2
|
||||||
|
assert len(set(items)) == 2
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_specialchars(
|
||||||
|
self, monkeypatch, requires_collections, get_storage_args, get_item
|
||||||
|
):
|
||||||
|
if getattr(self, "dav_server", "") in ("icloud", "fastmail"):
|
||||||
|
pytest.skip("iCloud and FastMail reject this name.")
|
||||||
|
|
||||||
|
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
||||||
|
|
||||||
|
uid = "test @ foo ät bar град сатану"
|
||||||
|
collection = "test @ foo ät bar"
|
||||||
|
|
||||||
|
s = self.storage_class(**await get_storage_args(collection=collection))
|
||||||
|
item = get_item(uid=uid)
|
||||||
|
|
||||||
|
href, etag = await s.upload(item)
|
||||||
|
item2, etag2 = await s.get(href)
|
||||||
|
if etag is not None:
|
||||||
|
assert etag2 == etag
|
||||||
|
assert_item_equals(item2, item)
|
||||||
|
|
||||||
|
((_, etag3),) = await aiostream.stream.list(s.list())
|
||||||
|
assert etag2 == etag3
|
||||||
|
|
||||||
|
assert collection in urlunquote(s.collection)
|
||||||
|
if self.storage_class.storage_name.endswith("dav"):
|
||||||
|
assert urlquote(uid, "/@:") in href
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_newline_in_uid(
|
||||||
|
self, monkeypatch, requires_collections, get_storage_args, get_item
|
||||||
|
):
|
||||||
|
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
||||||
|
|
||||||
|
uid = "UID:20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%0A.ics"
|
||||||
|
|
||||||
|
s = self.storage_class(**await get_storage_args())
|
||||||
|
item = get_item(uid=uid)
|
||||||
|
|
||||||
|
href, etag = await s.upload(item)
|
||||||
|
item2, etag2 = await s.get(href)
|
||||||
|
if etag is not None:
|
||||||
|
assert etag2 == etag
|
||||||
|
assert_item_equals(item2, item)
|
||||||
|
|
||||||
|
((_, etag3),) = await aiostream.stream.list(s.list())
|
||||||
|
assert etag2 == etag3
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_empty_metadata(self, requires_metadata, s):
|
||||||
|
if getattr(self, "dav_server", ""):
|
||||||
|
pytest.skip()
|
||||||
|
|
||||||
|
assert await s.get_meta("color") is None
|
||||||
|
assert await s.get_meta("displayname") is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_metadata(self, requires_metadata, s):
|
||||||
|
if getattr(self, "dav_server", "") == "xandikos":
|
||||||
|
pytest.skip("xandikos does not support removing metadata.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
await s.set_meta("color", None)
|
||||||
|
assert await s.get_meta("color") is None
|
||||||
|
await s.set_meta("color", "#ff0000")
|
||||||
|
assert await s.get_meta("color") == "#ff0000"
|
||||||
|
except exceptions.UnsupportedMetadataError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_encoding_metadata(self, requires_metadata, s):
|
||||||
|
for x in ("hello world", "hello wörld"):
|
||||||
|
await s.set_meta("displayname", x)
|
||||||
|
rv = await s.get_meta("displayname")
|
||||||
|
assert rv == x
|
||||||
|
assert isinstance(rv, str)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"value",
|
||||||
|
[
|
||||||
|
None,
|
||||||
|
"",
|
||||||
|
"Hello there!",
|
||||||
|
"Österreich",
|
||||||
|
"中国",
|
||||||
|
"한글",
|
||||||
|
"42a4ec99-b1c2-4859-b142-759112f2ca50",
|
||||||
|
"فلسطين",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_metadata_normalization(self, requires_metadata, s, value):
|
||||||
|
x = await s.get_meta("displayname")
|
||||||
|
assert x == normalize_meta_value(x)
|
||||||
|
|
||||||
|
if not getattr(self, "dav_server", None):
|
||||||
|
# ownCloud replaces "" with "unnamed"
|
||||||
|
await s.set_meta("displayname", value)
|
||||||
|
assert await s.get_meta("displayname") == normalize_meta_value(value)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_recurring_events(self, s, item_type):
|
||||||
|
if item_type != "VEVENT":
|
||||||
|
pytest.skip("This storage instance doesn't support iCalendar.")
|
||||||
|
|
||||||
|
uid = str(uuid.uuid4())
|
||||||
|
item = Item(
|
||||||
|
textwrap.dedent(
|
||||||
|
f"""
|
||||||
|
BEGIN:VCALENDAR
|
||||||
|
VERSION:2.0
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTART;TZID=UTC:20140325T084000Z
|
||||||
|
DTEND;TZID=UTC:20140325T101000Z
|
||||||
|
DTSTAMP:20140327T060506Z
|
||||||
|
UID:{uid}
|
||||||
|
RECURRENCE-ID;TZID=UTC:20140325T083000Z
|
||||||
|
CREATED:20131216T033331Z
|
||||||
|
DESCRIPTION:
|
||||||
|
LAST-MODIFIED:20140327T060215Z
|
||||||
|
LOCATION:
|
||||||
|
SEQUENCE:1
|
||||||
|
STATUS:CONFIRMED
|
||||||
|
SUMMARY:test Event
|
||||||
|
TRANSP:OPAQUE
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTART;TZID=UTC:20140128T083000Z
|
||||||
|
DTEND;TZID=UTC:20140128T100000Z
|
||||||
|
RRULE:FREQ=WEEKLY;BYDAY=TU;UNTIL=20141208T213000Z
|
||||||
|
DTSTAMP:20140327T060506Z
|
||||||
|
UID:{uid}
|
||||||
|
CREATED:20131216T033331Z
|
||||||
|
DESCRIPTION:
|
||||||
|
LAST-MODIFIED:20140222T101012Z
|
||||||
|
LOCATION:
|
||||||
|
SEQUENCE:0
|
||||||
|
STATUS:CONFIRMED
|
||||||
|
SUMMARY:Test event
|
||||||
|
TRANSP:OPAQUE
|
||||||
|
END:VEVENT
|
||||||
|
END:VCALENDAR
|
||||||
|
"""
|
||||||
|
).strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
href, _etag = await s.upload(item)
|
||||||
|
|
||||||
|
item2, _etag2 = await s.get(href)
|
||||||
|
assert normalize_item(item) == normalize_item(item2)
|
||||||
|
|
|
||||||
116
tests/storage/conftest.py
Normal file
116
tests/storage/conftest.py
Normal file
|
|
@ -0,0 +1,116 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import contextlib
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import aiostream
|
||||||
|
import pytest
|
||||||
|
import pytest_asyncio
|
||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
def wait_for_container(url):
|
||||||
|
"""Wait for a container to initialise.
|
||||||
|
|
||||||
|
Polls a URL every 100ms until the server responds.
|
||||||
|
"""
|
||||||
|
# give the server 5 seconds to settle
|
||||||
|
for _ in range(50):
|
||||||
|
print(_)
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.get(url)
|
||||||
|
response.raise_for_status()
|
||||||
|
except requests.ConnectionError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
pytest.exit(
|
||||||
|
"Server did not initialise in 5 seconds.\n"
|
||||||
|
"WARNING: There may be a stale docker container still running."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def dockerised_server(name, container_port, exposed_port):
|
||||||
|
"""Run a dockerised DAV server as a contenxt manager."""
|
||||||
|
container_id = None
|
||||||
|
url = f"http://127.0.0.1:{exposed_port}/"
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Hint: This will block while the pull happends, and only return once
|
||||||
|
# the container has actually started.
|
||||||
|
output = subprocess.check_output(
|
||||||
|
[
|
||||||
|
"docker",
|
||||||
|
"run",
|
||||||
|
"--rm",
|
||||||
|
"--detach",
|
||||||
|
"--publish",
|
||||||
|
f"{exposed_port}:{container_port}",
|
||||||
|
f"whynothugo/vdirsyncer-devkit-{name}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
container_id = output.decode().strip()
|
||||||
|
wait_for_container(url)
|
||||||
|
|
||||||
|
yield url
|
||||||
|
finally:
|
||||||
|
if container_id:
|
||||||
|
subprocess.check_output(["docker", "kill", container_id])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def baikal_server():
|
||||||
|
with dockerised_server("baikal", "80", "8002"):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def radicale_server():
|
||||||
|
with dockerised_server("radicale", "8001", "8001"):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def xandikos_server():
|
||||||
|
with dockerised_server("xandikos", "8000", "8000"):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest_asyncio.fixture
|
||||||
|
async def slow_create_collection(request, aio_connector):
|
||||||
|
# We need to properly clean up because otherwise we might run into
|
||||||
|
# storage limits.
|
||||||
|
to_delete = []
|
||||||
|
|
||||||
|
async def inner(cls: type, args: dict, collection_name: str) -> dict:
|
||||||
|
"""Create a collection
|
||||||
|
|
||||||
|
Returns args necessary to create a Storage instance pointing to it.
|
||||||
|
"""
|
||||||
|
assert collection_name.startswith("test")
|
||||||
|
|
||||||
|
# Make each name unique
|
||||||
|
collection_name = f"{collection_name}-vdirsyncer-ci-{uuid.uuid4()}"
|
||||||
|
|
||||||
|
# Create the collection:
|
||||||
|
args = await cls.create_collection(collection_name, **args)
|
||||||
|
collection = cls(**args)
|
||||||
|
|
||||||
|
# Keep collection in a list to be deleted once tests end:
|
||||||
|
to_delete.append(collection)
|
||||||
|
|
||||||
|
assert not await aiostream.stream.list(collection.list())
|
||||||
|
return args
|
||||||
|
|
||||||
|
yield inner
|
||||||
|
|
||||||
|
await asyncio.gather(*(c.session.request("DELETE", "") for c in to_delete))
|
||||||
|
|
@ -1,8 +1,53 @@
|
||||||
# -*- coding: utf-8 -*-
|
from __future__ import annotations
|
||||||
'''
|
|
||||||
tests.storage.dav
|
|
||||||
~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
import os
|
||||||
:license: MIT, see LICENSE for more details.
|
import uuid
|
||||||
'''
|
|
||||||
|
import aiohttp
|
||||||
|
import aiostream
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from tests import assert_item_equals
|
||||||
|
from tests.storage import StorageTests
|
||||||
|
from tests.storage import get_server_mixin
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
|
dav_server = os.environ.get("DAV_SERVER", "skip")
|
||||||
|
ServerMixin = get_server_mixin(dav_server)
|
||||||
|
|
||||||
|
|
||||||
|
class DAVStorageTests(ServerMixin, StorageTests):
|
||||||
|
dav_server = dav_server
|
||||||
|
|
||||||
|
@pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.")
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dav_broken_item(self, s):
|
||||||
|
item = Item("HAHA:YES")
|
||||||
|
with pytest.raises((exceptions.Error, aiohttp.ClientResponseError)):
|
||||||
|
await s.upload(item)
|
||||||
|
assert not await aiostream.stream.list(s.list())
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
||||||
|
def breakdown(*a, **kw):
|
||||||
|
raise AssertionError("Expected not to be called.")
|
||||||
|
|
||||||
|
monkeypatch.setattr("requests.sessions.Session.request", breakdown)
|
||||||
|
|
||||||
|
try:
|
||||||
|
assert list(await aiostream.stream.list(s.get_multi([]))) == []
|
||||||
|
finally:
|
||||||
|
# Make sure monkeypatch doesn't interfere with DAV server teardown
|
||||||
|
monkeypatch.undo()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_dav_unicode_href(self, s, get_item, monkeypatch):
|
||||||
|
if self.dav_server == "radicale":
|
||||||
|
pytest.skip("Radicale is unable to deal with unicode hrefs")
|
||||||
|
|
||||||
|
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
|
||||||
|
item = get_item(uid="град сатану" + str(uuid.uuid4()))
|
||||||
|
href, _etag = await s.upload(item)
|
||||||
|
item2, _etag2 = await s.get(href)
|
||||||
|
assert_item_equals(item, item2)
|
||||||
|
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
'''
|
|
||||||
tests.storage.dav.servers
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
|
|
@ -1,108 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
import wsgi_intercept
|
|
||||||
import wsgi_intercept.requests_intercept
|
|
||||||
|
|
||||||
wsgi_intercept.requests_intercept.install()
|
|
||||||
|
|
||||||
|
|
||||||
RADICALE_SCHEMA = '''
|
|
||||||
create table collection (
|
|
||||||
path varchar(200) not null,
|
|
||||||
parent_path varchar(200) references collection (path),
|
|
||||||
primary key (path));
|
|
||||||
|
|
||||||
create table item (
|
|
||||||
name varchar(200) not null,
|
|
||||||
tag text not null,
|
|
||||||
collection_path varchar(200) references collection (path),
|
|
||||||
primary key (name));
|
|
||||||
|
|
||||||
create table header (
|
|
||||||
name varchar(200) not null,
|
|
||||||
value text not null,
|
|
||||||
collection_path varchar(200) references collection (path),
|
|
||||||
primary key (name, collection_path));
|
|
||||||
|
|
||||||
create table line (
|
|
||||||
name text not null,
|
|
||||||
value text not null,
|
|
||||||
item_name varchar(200) references item (name),
|
|
||||||
timestamp bigint not null,
|
|
||||||
primary key (timestamp));
|
|
||||||
|
|
||||||
create table property (
|
|
||||||
name varchar(200) not null,
|
|
||||||
value text not null,
|
|
||||||
collection_path varchar(200) references collection (path),
|
|
||||||
primary key (name, collection_path));
|
|
||||||
'''.split(';')
|
|
||||||
|
|
||||||
storage_backend = os.environ.get('RADICALE_BACKEND', '') or 'filesystem'
|
|
||||||
|
|
||||||
|
|
||||||
def do_the_radicale_dance(tmpdir):
|
|
||||||
# All of radicale is already global state, the cleanliness of the code and
|
|
||||||
# all hope is already lost. This function runs before every test.
|
|
||||||
|
|
||||||
# This wipes out the radicale modules, to reset all of its state.
|
|
||||||
for module in list(sys.modules):
|
|
||||||
if module.startswith('radicale'):
|
|
||||||
del sys.modules[module]
|
|
||||||
|
|
||||||
# radicale.config looks for this envvar. We have to delete it before it
|
|
||||||
# tries to load a config file.
|
|
||||||
os.environ['RADICALE_CONFIG'] = ''
|
|
||||||
import radicale.config
|
|
||||||
|
|
||||||
# Now we can set some basic configuration.
|
|
||||||
radicale.config.set('rights', 'type', 'owner_only')
|
|
||||||
radicale.config.set('auth', 'type', 'http')
|
|
||||||
|
|
||||||
import radicale.auth.http
|
|
||||||
|
|
||||||
def is_authenticated(user, password):
|
|
||||||
return user == 'bob' and password == 'bob'
|
|
||||||
radicale.auth.http.is_authenticated = is_authenticated
|
|
||||||
|
|
||||||
if storage_backend == 'filesystem':
|
|
||||||
radicale.config.set('storage', 'type', 'filesystem')
|
|
||||||
radicale.config.set('storage', 'filesystem_folder', tmpdir)
|
|
||||||
elif storage_backend == 'database':
|
|
||||||
radicale.config.set('storage', 'type', 'database')
|
|
||||||
radicale.config.set('storage', 'database_url', 'sqlite://')
|
|
||||||
from radicale.storage import database
|
|
||||||
|
|
||||||
s = database.Session()
|
|
||||||
for line in RADICALE_SCHEMA:
|
|
||||||
s.execute(line)
|
|
||||||
s.commit()
|
|
||||||
else:
|
|
||||||
raise RuntimeError()
|
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin(object):
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
|
||||||
def setup(self, request, tmpdir):
|
|
||||||
do_the_radicale_dance(str(tmpdir))
|
|
||||||
from radicale import Application
|
|
||||||
|
|
||||||
wsgi_intercept.add_wsgi_intercept('127.0.0.1', 80, Application)
|
|
||||||
|
|
||||||
def teardown():
|
|
||||||
wsgi_intercept.remove_wsgi_intercept('127.0.0.1', 80)
|
|
||||||
request.addfinalizer(teardown)
|
|
||||||
|
|
||||||
def get_storage_args(self, collection='test'):
|
|
||||||
url = 'http://127.0.0.1/bob/'
|
|
||||||
if collection is not None:
|
|
||||||
collection += self.storage_class.fileext
|
|
||||||
|
|
||||||
return {'url': url, 'username': 'bob', 'password': 'bob',
|
|
||||||
'collection': collection}
|
|
||||||
|
|
@ -1,18 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
set -e
|
|
||||||
[ -n "$REQUIREMENTS" ] || export REQUIREMENTS=release
|
|
||||||
[ -n "$RADICALE_BACKEND" ] || export RADICALE_BACKEND=filesystem
|
|
||||||
|
|
||||||
if [ "$REQUIREMENTS" = "release" ]; then
|
|
||||||
radicale_pkg="radicale"
|
|
||||||
elif [ "$REQUIREMENTS" = "devel" ]; then
|
|
||||||
radicale_pkg="git+https://github.com/Kozea/Radicale.git"
|
|
||||||
else
|
|
||||||
echo "Invalid requirements envvar"
|
|
||||||
false
|
|
||||||
fi
|
|
||||||
pip install wsgi_intercept $radicale_pkg
|
|
||||||
|
|
||||||
if [ "$RADICALE_BACKEND" = "database" ]; then
|
|
||||||
pip install sqlalchemy
|
|
||||||
fi
|
|
||||||
174
tests/storage/dav/test_caldav.py
Normal file
174
tests/storage/dav/test_caldav.py
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import datetime
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
import aiostream
|
||||||
|
import pytest
|
||||||
|
from aioresponses import aioresponses
|
||||||
|
|
||||||
|
from tests import EVENT_TEMPLATE
|
||||||
|
from tests import TASK_TEMPLATE
|
||||||
|
from tests import VCARD_TEMPLATE
|
||||||
|
from tests.storage import format_item
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.storage.dav import CalDAVStorage
|
||||||
|
|
||||||
|
from . import DAVStorageTests
|
||||||
|
from . import dav_server
|
||||||
|
|
||||||
|
|
||||||
|
class TestCalDAVStorage(DAVStorageTests):
|
||||||
|
storage_class = CalDAVStorage
|
||||||
|
|
||||||
|
@pytest.fixture(params=["VTODO", "VEVENT"])
|
||||||
|
def item_type(self, request):
|
||||||
|
return request.param
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
||||||
|
s = self.storage_class(item_types=(item_type,), **await get_storage_args())
|
||||||
|
|
||||||
|
# Most storages hard-fail, but xandikos doesn't.
|
||||||
|
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError):
|
||||||
|
await s.upload(format_item(VCARD_TEMPLATE))
|
||||||
|
|
||||||
|
assert not await aiostream.stream.list(s.list())
|
||||||
|
|
||||||
|
# The `arg` param is not named `item_types` because that would hit
|
||||||
|
# https://bitbucket.org/pytest-dev/pytest/issue/745/
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("arg", "calls_num"),
|
||||||
|
[
|
||||||
|
(("VTODO",), 1),
|
||||||
|
(("VEVENT",), 1),
|
||||||
|
(("VTODO", "VEVENT"), 2),
|
||||||
|
(("VTODO", "VEVENT", "VJOURNAL"), 3),
|
||||||
|
((), 1),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_item_types_performance(
|
||||||
|
self, get_storage_args, arg, calls_num, monkeypatch
|
||||||
|
):
|
||||||
|
s = self.storage_class(item_types=arg, **await get_storage_args())
|
||||||
|
old_parse = s._parse_prop_responses
|
||||||
|
calls = []
|
||||||
|
|
||||||
|
def new_parse(*a, **kw):
|
||||||
|
calls.append(None)
|
||||||
|
return old_parse(*a, **kw)
|
||||||
|
|
||||||
|
monkeypatch.setattr(s, "_parse_prop_responses", new_parse)
|
||||||
|
await aiostream.stream.list(s.list())
|
||||||
|
assert len(calls) == calls_num
|
||||||
|
|
||||||
|
@pytest.mark.xfail(
|
||||||
|
dav_server == "radicale", reason="Radicale doesn't support timeranges."
|
||||||
|
)
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_timerange_correctness(self, get_storage_args):
|
||||||
|
start_date = datetime.datetime(2013, 9, 10)
|
||||||
|
end_date = datetime.datetime(2013, 9, 13)
|
||||||
|
s = self.storage_class(
|
||||||
|
start_date=start_date, end_date=end_date, **await get_storage_args()
|
||||||
|
)
|
||||||
|
|
||||||
|
too_old_item = format_item(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
BEGIN:VCALENDAR
|
||||||
|
VERSION:2.0
|
||||||
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTART:19970714T170000Z
|
||||||
|
DTEND:19970715T035959Z
|
||||||
|
SUMMARY:Bastille Day Party
|
||||||
|
X-SOMETHING:{r}
|
||||||
|
UID:{r}
|
||||||
|
END:VEVENT
|
||||||
|
END:VCALENDAR
|
||||||
|
"""
|
||||||
|
).strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
too_new_item = format_item(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
BEGIN:VCALENDAR
|
||||||
|
VERSION:2.0
|
||||||
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTART:20150714T170000Z
|
||||||
|
DTEND:20150715T035959Z
|
||||||
|
SUMMARY:Another Bastille Day Party
|
||||||
|
X-SOMETHING:{r}
|
||||||
|
UID:{r}
|
||||||
|
END:VEVENT
|
||||||
|
END:VCALENDAR
|
||||||
|
"""
|
||||||
|
).strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
good_item = format_item(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
BEGIN:VCALENDAR
|
||||||
|
VERSION:2.0
|
||||||
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTART:20130911T170000Z
|
||||||
|
DTEND:20130912T035959Z
|
||||||
|
SUMMARY:What's with all these Bastille Day Partys
|
||||||
|
X-SOMETHING:{r}
|
||||||
|
UID:{r}
|
||||||
|
END:VEVENT
|
||||||
|
END:VCALENDAR
|
||||||
|
"""
|
||||||
|
).strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
await s.upload(too_old_item)
|
||||||
|
await s.upload(too_new_item)
|
||||||
|
expected_href, _ = await s.upload(good_item)
|
||||||
|
|
||||||
|
((actual_href, _),) = await aiostream.stream.list(s.list())
|
||||||
|
assert actual_href == expected_href
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_invalid_resource(self, monkeypatch, get_storage_args):
|
||||||
|
args = await get_storage_args(collection=None)
|
||||||
|
|
||||||
|
with aioresponses() as m:
|
||||||
|
m.add(args["url"], method="PROPFIND", status=200, body="Hello world")
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
s = self.storage_class(**args)
|
||||||
|
await aiostream.stream.list(s.list())
|
||||||
|
|
||||||
|
assert len(m.requests) == 1
|
||||||
|
|
||||||
|
@pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT")
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs."
|
||||||
|
)
|
||||||
|
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_item_types_general(self, s):
|
||||||
|
event = (await s.upload(format_item(EVENT_TEMPLATE)))[0]
|
||||||
|
task = (await s.upload(format_item(TASK_TEMPLATE)))[0]
|
||||||
|
s.item_types = ("VTODO", "VEVENT")
|
||||||
|
|
||||||
|
async def hrefs():
|
||||||
|
return {href async for href, etag in s.list()}
|
||||||
|
|
||||||
|
assert await hrefs() == {event, task}
|
||||||
|
s.item_types = ("VTODO",)
|
||||||
|
assert await hrefs() == {task}
|
||||||
|
s.item_types = ("VEVENT",)
|
||||||
|
assert await hrefs() == {event}
|
||||||
|
s.item_types = ()
|
||||||
|
assert await hrefs() == {event, task}
|
||||||
15
tests/storage/dav/test_carddav.py
Normal file
15
tests/storage/dav/test_carddav.py
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer.storage.dav import CardDAVStorage
|
||||||
|
|
||||||
|
from . import DAVStorageTests
|
||||||
|
|
||||||
|
|
||||||
|
class TestCardDAVStorage(DAVStorageTests):
|
||||||
|
storage_class = CardDAVStorage
|
||||||
|
|
||||||
|
@pytest.fixture(params=["VCARD"])
|
||||||
|
def item_type(self, request):
|
||||||
|
return request.param
|
||||||
|
|
@ -1,217 +1,59 @@
|
||||||
# -*- coding: utf-8 -*-
|
from __future__ import annotations
|
||||||
'''
|
|
||||||
tests.storage.dav.test_main
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import os
|
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import requests
|
from vdirsyncer.storage.dav import _BAD_XML_CHARS
|
||||||
import requests.exceptions
|
from vdirsyncer.storage.dav import _merge_xml
|
||||||
|
from vdirsyncer.storage.dav import _normalize_href
|
||||||
from tests import EVENT_TEMPLATE, TASK_TEMPLATE, VCARD_TEMPLATE
|
from vdirsyncer.storage.dav import _parse_xml
|
||||||
|
|
||||||
import vdirsyncer.exceptions as exceptions
|
|
||||||
from vdirsyncer.storage.base import Item
|
|
||||||
from vdirsyncer.storage.dav import CaldavStorage, CarddavStorage
|
|
||||||
|
|
||||||
from .. import StorageTests
|
|
||||||
|
|
||||||
|
|
||||||
dav_server = os.environ.get('DAV_SERVER', '').strip() or 'radicale'
|
def test_xml_utilities():
|
||||||
|
x = _parse_xml(
|
||||||
|
b"""<?xml version="1.0" encoding="UTF-8" ?>
|
||||||
|
<multistatus xmlns="DAV:">
|
||||||
|
<response>
|
||||||
|
<propstat>
|
||||||
|
<status>HTTP/1.1 404 Not Found</status>
|
||||||
|
<prop>
|
||||||
|
<getcontenttype/>
|
||||||
|
</prop>
|
||||||
|
</propstat>
|
||||||
|
<propstat>
|
||||||
|
<prop>
|
||||||
|
<resourcetype>
|
||||||
|
<collection/>
|
||||||
|
</resourcetype>
|
||||||
|
</prop>
|
||||||
|
</propstat>
|
||||||
|
</response>
|
||||||
|
</multistatus>
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
response = x.find("{DAV:}response")
|
||||||
|
props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop"))
|
||||||
|
assert props.find("{DAV:}resourcetype/{DAV:}collection") is not None
|
||||||
|
assert props.find("{DAV:}getcontenttype") is not None
|
||||||
|
|
||||||
|
|
||||||
def _get_server_mixin(server_name):
|
@pytest.mark.parametrize("char", range(32))
|
||||||
from . import __name__ as base
|
def test_xml_specialchars(char):
|
||||||
x = __import__('{}.servers.{}'.format(base, server_name), fromlist=[''])
|
x = _parse_xml(
|
||||||
return x.ServerMixin
|
'<?xml version="1.0" encoding="UTF-8" ?>'
|
||||||
|
f"<foo>ye{chr(char)}s\r\n"
|
||||||
|
"hello</foo>".encode("ascii")
|
||||||
|
)
|
||||||
|
|
||||||
ServerMixin = _get_server_mixin(dav_server)
|
if char in _BAD_XML_CHARS:
|
||||||
|
assert x.text == "yes\nhello"
|
||||||
|
|
||||||
|
|
||||||
templates = {
|
@pytest.mark.parametrize(
|
||||||
'VCARD': VCARD_TEMPLATE,
|
"href",
|
||||||
'VEVENT': EVENT_TEMPLATE,
|
[
|
||||||
'VTODO': TASK_TEMPLATE
|
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
|
||||||
}
|
],
|
||||||
|
)
|
||||||
|
def test_normalize_href(href):
|
||||||
class DavStorageTests(ServerMixin, StorageTests):
|
assert href == _normalize_href("https://example.com", href)
|
||||||
def test_dav_broken_item(self, s):
|
|
||||||
item = Item(u'HAHA:YES')
|
|
||||||
try:
|
|
||||||
s.upload(item)
|
|
||||||
except (exceptions.Error, requests.exceptions.HTTPError):
|
|
||||||
pass
|
|
||||||
assert not list(s.list())
|
|
||||||
|
|
||||||
def test_wrong_etag(self, s):
|
|
||||||
super(DavStorageTests, self).test_wrong_etag(s)
|
|
||||||
|
|
||||||
def test_update_nonexisting(self, s):
|
|
||||||
super(DavStorageTests, self).test_update_nonexisting(s)
|
|
||||||
|
|
||||||
def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
|
||||||
def breakdown(*a, **kw):
|
|
||||||
raise AssertionError('Expected not to be called.')
|
|
||||||
|
|
||||||
monkeypatch.setattr('requests.sessions.Session.request', breakdown)
|
|
||||||
|
|
||||||
assert list(s.get_multi([])) == []
|
|
||||||
|
|
||||||
|
|
||||||
class TestCaldavStorage(DavStorageTests):
|
|
||||||
storage_class = CaldavStorage
|
|
||||||
|
|
||||||
item_template = TASK_TEMPLATE
|
|
||||||
|
|
||||||
def test_both_vtodo_and_vevent(self, s):
|
|
||||||
task = self._create_bogus_item(item_template=TASK_TEMPLATE)
|
|
||||||
event = self._create_bogus_item(item_template=EVENT_TEMPLATE)
|
|
||||||
href_etag_task = s.upload(task)
|
|
||||||
href_etag_event = s.upload(event)
|
|
||||||
assert set(s.list()) == set([
|
|
||||||
href_etag_task,
|
|
||||||
href_etag_event
|
|
||||||
])
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('item_type', ['VTODO', 'VEVENT'])
|
|
||||||
def test_item_types_correctness(self, item_type, storage_args):
|
|
||||||
other_item_type = 'VTODO' if item_type == 'VEVENT' else 'VEVENT'
|
|
||||||
s = self.storage_class(item_types=(item_type,), **storage_args())
|
|
||||||
try:
|
|
||||||
s.upload(self._create_bogus_item(
|
|
||||||
item_template=templates[other_item_type]))
|
|
||||||
s.upload(self._create_bogus_item(
|
|
||||||
item_template=templates[other_item_type]))
|
|
||||||
except (exceptions.Error, requests.exceptions.HTTPError):
|
|
||||||
pass
|
|
||||||
href, etag = \
|
|
||||||
s.upload(self._create_bogus_item(
|
|
||||||
item_template=templates[item_type]))
|
|
||||||
((href2, etag2),) = s.list()
|
|
||||||
assert href2 == href
|
|
||||||
assert etag2 == etag
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('item_types', [
|
|
||||||
('VTODO',),
|
|
||||||
('VEVENT',),
|
|
||||||
('VTODO', 'VEVENT'),
|
|
||||||
('VTODO', 'VEVENT', 'VJOURNAL'),
|
|
||||||
()
|
|
||||||
])
|
|
||||||
def test_item_types_performance(self, storage_args, item_types,
|
|
||||||
monkeypatch):
|
|
||||||
s = self.storage_class(item_types=item_types, **storage_args())
|
|
||||||
item = self._create_bogus_item()
|
|
||||||
href, etag = s.upload(item)
|
|
||||||
|
|
||||||
old_dav_query = s._dav_query
|
|
||||||
calls = []
|
|
||||||
|
|
||||||
def _dav_query(*a, **kw):
|
|
||||||
calls.append(None)
|
|
||||||
return old_dav_query(*a, **kw)
|
|
||||||
|
|
||||||
monkeypatch.setattr(s, '_dav_query', _dav_query)
|
|
||||||
|
|
||||||
rv = list(s.list())
|
|
||||||
if (dav_server != 'radicale' and not s.item_types) \
|
|
||||||
or item.parsed.name in s.item_types:
|
|
||||||
assert rv == [(href, etag)]
|
|
||||||
assert len(calls) == (len(item_types) or 1)
|
|
||||||
|
|
||||||
@pytest.mark.xfail(dav_server == 'radicale',
|
|
||||||
reason='Radicale doesn\'t support timeranges.')
|
|
||||||
def test_timerange_correctness(self, storage_args):
|
|
||||||
start_date = datetime.datetime(2013, 9, 10)
|
|
||||||
end_date = datetime.datetime(2013, 9, 13)
|
|
||||||
s = self.storage_class(start_date=start_date, end_date=end_date,
|
|
||||||
**storage_args())
|
|
||||||
|
|
||||||
too_old_item = self._create_bogus_item(item_template=dedent(u'''
|
|
||||||
BEGIN:VCALENDAR
|
|
||||||
VERSION:2.0
|
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:19970714T170000Z
|
|
||||||
DTEND:19970715T035959Z
|
|
||||||
SUMMARY:Bastille Day Party
|
|
||||||
X-SOMETHING:{r}
|
|
||||||
UID:{r}
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
''').strip())
|
|
||||||
|
|
||||||
too_new_item = self._create_bogus_item(item_template=dedent(u'''
|
|
||||||
BEGIN:VCALENDAR
|
|
||||||
VERSION:2.0
|
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20150714T170000Z
|
|
||||||
DTEND:20150715T035959Z
|
|
||||||
SUMMARY:Another Bastille Day Party
|
|
||||||
X-SOMETHING:{r}
|
|
||||||
UID:{r}
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
''').strip())
|
|
||||||
|
|
||||||
good_item = self._create_bogus_item(item_template=dedent(u'''
|
|
||||||
BEGIN:VCALENDAR
|
|
||||||
VERSION:2.0
|
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
|
||||||
BEGIN:VEVENT
|
|
||||||
DTSTART:20130911T170000Z
|
|
||||||
DTEND:20130912T035959Z
|
|
||||||
SUMMARY:What's with all these Bastille Day Partys
|
|
||||||
X-SOMETHING:{r}
|
|
||||||
UID:{r}
|
|
||||||
END:VEVENT
|
|
||||||
END:VCALENDAR
|
|
||||||
''').strip())
|
|
||||||
|
|
||||||
s.upload(too_old_item)
|
|
||||||
s.upload(too_new_item)
|
|
||||||
href, etag = s.upload(good_item)
|
|
||||||
|
|
||||||
assert list(s.list()) == [(href, etag)]
|
|
||||||
|
|
||||||
def test_item_types_passed_as_string(self, storage_args):
|
|
||||||
kw = storage_args()
|
|
||||||
a = self.storage_class(item_types='VTODO,VEVENT', **kw)
|
|
||||||
b = self.storage_class(item_types=('VTODO', 'VEVENT'), **kw)
|
|
||||||
assert a.item_types == b.item_types == ('VTODO', 'VEVENT')
|
|
||||||
|
|
||||||
def test_invalid_resource(self, monkeypatch, storage_args):
|
|
||||||
calls = []
|
|
||||||
args = storage_args(collection=None)
|
|
||||||
|
|
||||||
def request(session, method, url, data=None, headers=None, auth=None,
|
|
||||||
verify=None):
|
|
||||||
assert url == args['url']
|
|
||||||
calls.append(None)
|
|
||||||
|
|
||||||
r = requests.Response()
|
|
||||||
r.status_code = 200
|
|
||||||
r._content = 'Hello World.'
|
|
||||||
return r
|
|
||||||
|
|
||||||
monkeypatch.setattr('requests.sessions.Session.request', request)
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
s = self.storage_class(**args)
|
|
||||||
list(s.list())
|
|
||||||
assert len(calls) == 1
|
|
||||||
|
|
||||||
|
|
||||||
class TestCarddavStorage(DavStorageTests):
|
|
||||||
storage_class = CarddavStorage
|
|
||||||
item_template = VCARD_TEMPLATE
|
|
||||||
|
|
|
||||||
0
tests/storage/servers/__init__.py
Normal file
0
tests/storage/servers/__init__.py
Normal file
38
tests/storage/servers/baikal/__init__.py
Normal file
38
tests/storage/servers/baikal/__init__.py
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin:
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(
|
||||||
|
self,
|
||||||
|
request,
|
||||||
|
tmpdir,
|
||||||
|
slow_create_collection,
|
||||||
|
baikal_server,
|
||||||
|
aio_connector,
|
||||||
|
):
|
||||||
|
async def inner(collection="test"):
|
||||||
|
base_url = "http://127.0.0.1:8002/"
|
||||||
|
args = {
|
||||||
|
"url": base_url,
|
||||||
|
"username": "baikal",
|
||||||
|
"password": "baikal",
|
||||||
|
"connector": aio_connector,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.storage_class.fileext == ".vcf":
|
||||||
|
args["url"] = base_url + "card.php/"
|
||||||
|
else:
|
||||||
|
args["url"] = base_url + "cal.php/"
|
||||||
|
|
||||||
|
if collection is not None:
|
||||||
|
args = await slow_create_collection(
|
||||||
|
self.storage_class,
|
||||||
|
args,
|
||||||
|
collection,
|
||||||
|
)
|
||||||
|
return args
|
||||||
|
|
||||||
|
return inner
|
||||||
50
tests/storage/servers/davical/__init__.py
Normal file
50
tests/storage/servers/davical/__init__.py
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
try:
|
||||||
|
caldav_args = {
|
||||||
|
# Those credentials are configured through the Travis UI
|
||||||
|
"username": os.environ["DAVICAL_USERNAME"].strip(),
|
||||||
|
"password": os.environ["DAVICAL_PASSWORD"].strip(),
|
||||||
|
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
|
||||||
|
}
|
||||||
|
except KeyError as e:
|
||||||
|
pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.flaky(reruns=5)
|
||||||
|
class ServerMixin:
|
||||||
|
@pytest.fixture
|
||||||
|
def davical_args(self):
|
||||||
|
if self.storage_class.fileext == ".ics":
|
||||||
|
return dict(caldav_args)
|
||||||
|
elif self.storage_class.fileext == ".vcf":
|
||||||
|
pytest.skip("No carddav")
|
||||||
|
else:
|
||||||
|
raise RuntimeError
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(self, davical_args, request):
|
||||||
|
async def inner(collection="test"):
|
||||||
|
if collection is None:
|
||||||
|
return davical_args
|
||||||
|
|
||||||
|
assert collection.startswith("test")
|
||||||
|
|
||||||
|
for _ in range(4):
|
||||||
|
args = self.storage_class.create_collection(
|
||||||
|
collection + str(uuid.uuid4()), **davical_args
|
||||||
|
)
|
||||||
|
s = self.storage_class(**args)
|
||||||
|
if not list(s.list()):
|
||||||
|
# See: https://stackoverflow.com/a/33984811
|
||||||
|
request.addfinalizer(lambda x=s: x.session.request("DELETE", ""))
|
||||||
|
return args
|
||||||
|
|
||||||
|
raise RuntimeError("Failed to find free collection.")
|
||||||
|
|
||||||
|
return inner
|
||||||
1
tests/storage/servers/davical/install.sh
Normal file
1
tests/storage/servers/davical/install.sh
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
pip install pytest-rerunfailures
|
||||||
42
tests/storage/servers/fastmail/__init__.py
Normal file
42
tests/storage/servers/fastmail/__init__.py
Normal file
|
|
@ -0,0 +1,42 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin:
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(self, slow_create_collection, aio_connector, request):
|
||||||
|
if (
|
||||||
|
"item_type" in request.fixturenames
|
||||||
|
and request.getfixturevalue("item_type") == "VTODO"
|
||||||
|
):
|
||||||
|
# Fastmail has non-standard support for TODOs
|
||||||
|
# See https://github.com/pimutils/vdirsyncer/issues/824
|
||||||
|
pytest.skip("Fastmail has non-standard VTODO support.")
|
||||||
|
|
||||||
|
async def inner(collection="test"):
|
||||||
|
args = {
|
||||||
|
"username": os.environ["FASTMAIL_USERNAME"],
|
||||||
|
"password": os.environ["FASTMAIL_PASSWORD"],
|
||||||
|
"connector": aio_connector,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.storage_class.fileext == ".ics":
|
||||||
|
args["url"] = "https://caldav.fastmail.com/"
|
||||||
|
elif self.storage_class.fileext == ".vcf":
|
||||||
|
args["url"] = "https://carddav.fastmail.com/"
|
||||||
|
else:
|
||||||
|
raise RuntimeError
|
||||||
|
|
||||||
|
if collection is not None:
|
||||||
|
args = await slow_create_collection(
|
||||||
|
self.storage_class,
|
||||||
|
args,
|
||||||
|
collection,
|
||||||
|
)
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
return inner
|
||||||
33
tests/storage/servers/icloud/__init__.py
Normal file
33
tests/storage/servers/icloud/__init__.py
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin:
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(self, item_type, slow_create_collection):
|
||||||
|
if item_type != "VEVENT":
|
||||||
|
# iCloud collections can either be calendars or task lists.
|
||||||
|
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615
|
||||||
|
pytest.skip("iCloud doesn't support anything else than VEVENT")
|
||||||
|
|
||||||
|
async def inner(collection="test"):
|
||||||
|
args = {
|
||||||
|
"username": os.environ["ICLOUD_USERNAME"],
|
||||||
|
"password": os.environ["ICLOUD_PASSWORD"],
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.storage_class.fileext == ".ics":
|
||||||
|
args["url"] = "https://caldav.icloud.com/"
|
||||||
|
elif self.storage_class.fileext == ".vcf":
|
||||||
|
args["url"] = "https://contacts.icloud.com/"
|
||||||
|
else:
|
||||||
|
raise RuntimeError
|
||||||
|
|
||||||
|
if collection is not None:
|
||||||
|
args = slow_create_collection(self.storage_class, args, collection)
|
||||||
|
return args
|
||||||
|
|
||||||
|
return inner
|
||||||
33
tests/storage/servers/radicale/__init__.py
Normal file
33
tests/storage/servers/radicale/__init__.py
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin:
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(
|
||||||
|
self,
|
||||||
|
request,
|
||||||
|
tmpdir,
|
||||||
|
slow_create_collection,
|
||||||
|
radicale_server,
|
||||||
|
aio_connector,
|
||||||
|
):
|
||||||
|
async def inner(collection="test"):
|
||||||
|
url = "http://127.0.0.1:8001/"
|
||||||
|
args = {
|
||||||
|
"url": url,
|
||||||
|
"username": "radicale",
|
||||||
|
"password": "radicale",
|
||||||
|
"connector": aio_connector,
|
||||||
|
}
|
||||||
|
|
||||||
|
if collection is not None:
|
||||||
|
args = await slow_create_collection(
|
||||||
|
self.storage_class,
|
||||||
|
args,
|
||||||
|
collection,
|
||||||
|
)
|
||||||
|
return args
|
||||||
|
|
||||||
|
return inner
|
||||||
9
tests/storage/servers/skip/__init__.py
Normal file
9
tests/storage/servers/skip/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin:
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(self):
|
||||||
|
pytest.skip("DAV tests disabled.")
|
||||||
29
tests/storage/servers/xandikos/__init__.py
Normal file
29
tests/storage/servers/xandikos/__init__.py
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin:
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(
|
||||||
|
self,
|
||||||
|
request,
|
||||||
|
tmpdir,
|
||||||
|
slow_create_collection,
|
||||||
|
xandikos_server,
|
||||||
|
aio_connector,
|
||||||
|
):
|
||||||
|
async def inner(collection="test"):
|
||||||
|
url = "http://127.0.0.1:8000/"
|
||||||
|
args = {"url": url, "connector": aio_connector}
|
||||||
|
|
||||||
|
if collection is not None:
|
||||||
|
args = await slow_create_collection(
|
||||||
|
self.storage_class,
|
||||||
|
args,
|
||||||
|
collection,
|
||||||
|
)
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
@ -1,18 +1,12 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
import subprocess
|
||||||
'''
|
|
||||||
tests.storage.filesystem
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vdirsyncer.storage.filesystem import FilesystemStorage
|
from vdirsyncer.storage.filesystem import FilesystemStorage
|
||||||
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
from . import StorageTests
|
from . import StorageTests
|
||||||
|
|
||||||
|
|
@ -20,38 +14,119 @@ from . import StorageTests
|
||||||
class TestFilesystemStorage(StorageTests):
|
class TestFilesystemStorage(StorageTests):
|
||||||
storage_class = FilesystemStorage
|
storage_class = FilesystemStorage
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture
|
||||||
def setup(self, tmpdir):
|
def get_storage_args(self, tmpdir):
|
||||||
self.tmpdir = str(tmpdir)
|
async def inner(collection="test"):
|
||||||
|
rv = {"path": str(tmpdir), "fileext": ".txt", "collection": collection}
|
||||||
|
if collection is not None:
|
||||||
|
rv = await self.storage_class.create_collection(**rv)
|
||||||
|
return rv
|
||||||
|
|
||||||
def get_storage_args(self, collection=None):
|
return inner
|
||||||
path = self.tmpdir
|
|
||||||
if collection is not None:
|
|
||||||
os.makedirs(os.path.join(path, collection))
|
|
||||||
return {'path': path, 'fileext': '.txt', 'collection': collection}
|
|
||||||
|
|
||||||
def test_create_is_false(self, tmpdir):
|
|
||||||
with pytest.raises(IOError):
|
|
||||||
self.storage_class(str(tmpdir), '.txt', collection='lol',
|
|
||||||
create=False)
|
|
||||||
|
|
||||||
def test_is_not_directory(self, tmpdir):
|
def test_is_not_directory(self, tmpdir):
|
||||||
with pytest.raises(IOError):
|
with pytest.raises(OSError):
|
||||||
f = tmpdir.join('hue')
|
f = tmpdir.join("hue")
|
||||||
f.write('stub')
|
f.write("stub")
|
||||||
self.storage_class(str(tmpdir), '.txt', collection='hue')
|
self.storage_class(str(tmpdir) + "/hue", ".txt")
|
||||||
|
|
||||||
def test_create_is_true(self, tmpdir):
|
@pytest.mark.asyncio
|
||||||
self.storage_class(str(tmpdir), '.txt', collection='asd')
|
async def test_broken_data(self, tmpdir):
|
||||||
assert tmpdir.listdir() == [tmpdir.join('asd')]
|
s = self.storage_class(str(tmpdir), ".txt")
|
||||||
|
|
||||||
def test_broken_data(self, tmpdir):
|
class BrokenItem:
|
||||||
s = self.storage_class(str(tmpdir), '.txt')
|
raw = "Ц, Ш, Л, ж, Д, З, Ю".encode()
|
||||||
|
uid = "jeezus"
|
||||||
class BrokenItem(object):
|
|
||||||
raw = u'Ц, Ш, Л, ж, Д, З, Ю'.encode('utf-8')
|
|
||||||
uid = 'jeezus'
|
|
||||||
ident = uid
|
ident = uid
|
||||||
|
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
s.upload(BrokenItem)
|
await s.upload(BrokenItem)
|
||||||
assert not tmpdir.listdir()
|
assert not tmpdir.listdir()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_ident_with_slash(self, tmpdir):
|
||||||
|
s = self.storage_class(str(tmpdir), ".txt")
|
||||||
|
await s.upload(Item("UID:a/b/c"))
|
||||||
|
(item_file,) = tmpdir.listdir()
|
||||||
|
assert "/" not in item_file.basename
|
||||||
|
assert item_file.isfile()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_ignore_tmp_files(self, tmpdir):
|
||||||
|
"""Test that files with .tmp suffix beside .ics files are ignored."""
|
||||||
|
s = self.storage_class(str(tmpdir), ".ics")
|
||||||
|
await s.upload(Item("UID:xyzxyz"))
|
||||||
|
(item_file,) = tmpdir.listdir()
|
||||||
|
item_file.copy(item_file.new(ext="tmp"))
|
||||||
|
assert len(tmpdir.listdir()) == 2
|
||||||
|
assert len(await aiostream.stream.list(s.list())) == 1
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_ignore_tmp_files_empty_fileext(self, tmpdir):
|
||||||
|
"""Test that files with .tmp suffix are ignored with empty fileext."""
|
||||||
|
s = self.storage_class(str(tmpdir), "")
|
||||||
|
await s.upload(Item("UID:xyzxyz"))
|
||||||
|
(item_file,) = tmpdir.listdir()
|
||||||
|
item_file.copy(item_file.new(ext="tmp"))
|
||||||
|
assert len(tmpdir.listdir()) == 2
|
||||||
|
# assert False, tmpdir.listdir() # enable to see the created filename
|
||||||
|
assert len(await aiostream.stream.list(s.list())) == 1
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_ignore_files_typical_backup(self, tmpdir):
|
||||||
|
"""Test file-name ignorance with typical backup ending ~."""
|
||||||
|
ignorext = "~" # without dot
|
||||||
|
|
||||||
|
storage = self.storage_class(str(tmpdir), "", fileignoreext=ignorext)
|
||||||
|
await storage.upload(Item("UID:xyzxyz"))
|
||||||
|
(item_file,) = tmpdir.listdir()
|
||||||
|
item_file.copy(item_file.new(basename=item_file.basename + ignorext))
|
||||||
|
|
||||||
|
assert len(tmpdir.listdir()) == 2
|
||||||
|
assert len(await aiostream.stream.list(storage.list())) == 1
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_too_long_uid(self, tmpdir):
|
||||||
|
storage = self.storage_class(str(tmpdir), ".txt")
|
||||||
|
item = Item("UID:" + "hue" * 600)
|
||||||
|
|
||||||
|
href, _etag = await storage.upload(item)
|
||||||
|
assert item.uid not in href
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_post_hook_inactive(self, tmpdir, monkeypatch):
|
||||||
|
def check_call_mock(*args, **kwargs):
|
||||||
|
raise AssertionError
|
||||||
|
|
||||||
|
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
||||||
|
|
||||||
|
s = self.storage_class(str(tmpdir), ".txt", post_hook=None)
|
||||||
|
await s.upload(Item("UID:a/b/c"))
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_post_hook_active(self, tmpdir, monkeypatch):
|
||||||
|
calls = []
|
||||||
|
exe = "foo"
|
||||||
|
|
||||||
|
def check_call_mock(call, *args, **kwargs):
|
||||||
|
calls.append(True)
|
||||||
|
assert len(call) == 2
|
||||||
|
assert call[0] == exe
|
||||||
|
|
||||||
|
monkeypatch.setattr(subprocess, "call", check_call_mock)
|
||||||
|
|
||||||
|
s = self.storage_class(str(tmpdir), ".txt", post_hook=exe)
|
||||||
|
await s.upload(Item("UID:a/b/c"))
|
||||||
|
assert calls
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_ignore_git_dirs(self, tmpdir):
|
||||||
|
tmpdir.mkdir(".git").mkdir("foo")
|
||||||
|
tmpdir.mkdir("a")
|
||||||
|
tmpdir.mkdir("b")
|
||||||
|
|
||||||
|
expected = {"a", "b"}
|
||||||
|
actual = {
|
||||||
|
c["collection"] async for c in self.storage_class.discover(str(tmpdir))
|
||||||
|
}
|
||||||
|
assert actual == expected
|
||||||
|
|
|
||||||
|
|
@ -1,86 +1,163 @@
|
||||||
# -*- coding: utf-8 -*-
|
from __future__ import annotations
|
||||||
'''
|
|
||||||
tests.storage.test_http
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
import pytest
|
import pytest
|
||||||
|
from aioresponses import CallbackResult
|
||||||
from requests import Response
|
from aioresponses import aioresponses
|
||||||
|
|
||||||
from tests import normalize_item
|
from tests import normalize_item
|
||||||
|
from vdirsyncer.exceptions import UserError
|
||||||
|
from vdirsyncer.http import BasicAuthMethod
|
||||||
|
from vdirsyncer.http import DigestAuthMethod
|
||||||
|
from vdirsyncer.http import UsageLimitReached
|
||||||
|
from vdirsyncer.http import request
|
||||||
from vdirsyncer.storage.http import HttpStorage
|
from vdirsyncer.storage.http import HttpStorage
|
||||||
|
from vdirsyncer.storage.http import prepare_auth
|
||||||
|
|
||||||
|
|
||||||
def test_list(monkeypatch):
|
@pytest.mark.asyncio
|
||||||
collection_url = 'http://127.0.0.1/calendar/collection.ics'
|
async def test_list(aio_connector):
|
||||||
|
collection_url = "http://127.0.0.1/calendar/collection.ics"
|
||||||
|
|
||||||
items = [
|
items = [
|
||||||
(u'BEGIN:VEVENT\n'
|
(
|
||||||
u'SUMMARY:Eine Kurzinfo\n'
|
"BEGIN:VEVENT\n"
|
||||||
u'DESCRIPTION:Beschreibung des Termines\n'
|
"SUMMARY:Eine Kurzinfo\n"
|
||||||
u'END:VEVENT'),
|
"DESCRIPTION:Beschreibung des Termines\n"
|
||||||
(u'BEGIN:VEVENT\n'
|
"END:VEVENT"
|
||||||
u'SUMMARY:Eine zweite Küèrzinfo\n'
|
),
|
||||||
u'DESCRIPTION:Beschreibung des anderen Termines\n'
|
(
|
||||||
u'BEGIN:VALARM\n'
|
"BEGIN:VEVENT\n"
|
||||||
u'ACTION:AUDIO\n'
|
"SUMMARY:Eine zweite Küèrzinfo\n"
|
||||||
u'TRIGGER:19980403T120000\n'
|
"DESCRIPTION:Beschreibung des anderen Termines\n"
|
||||||
u'ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n'
|
"BEGIN:VALARM\n"
|
||||||
u'REPEAT:4\n'
|
"ACTION:AUDIO\n"
|
||||||
u'DURATION:PT1H\n'
|
"TRIGGER:19980403T120000\n"
|
||||||
u'END:VALARM\n'
|
"ATTACH;FMTTYPE=audio/basic:http://host.com/pub/ssbanner.aud\n"
|
||||||
u'END:VEVENT')
|
"REPEAT:4\n"
|
||||||
|
"DURATION:PT1H\n"
|
||||||
|
"END:VALARM\n"
|
||||||
|
"END:VEVENT"
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
responses = [
|
responses = ["\n".join(["BEGIN:VCALENDAR", *items, "END:VCALENDAR"])] * 2
|
||||||
u'\n'.join([u'BEGIN:VCALENDAR'] + items + [u'END:VCALENDAR'])
|
|
||||||
] * 2
|
|
||||||
|
|
||||||
def get(method, url, *a, **kw):
|
def callback(url, headers, **kwargs):
|
||||||
assert method == 'GET'
|
assert headers["User-Agent"].startswith("vdirsyncer/")
|
||||||
assert url == collection_url
|
|
||||||
r = Response()
|
|
||||||
r.status_code = 200
|
|
||||||
assert responses
|
assert responses
|
||||||
r._content = responses.pop().encode('utf-8')
|
|
||||||
r.headers['Content-Type'] = 'text/icalendar'
|
|
||||||
r.encoding = 'ISO-8859-1'
|
|
||||||
return r
|
|
||||||
|
|
||||||
monkeypatch.setattr('requests.request', get)
|
return CallbackResult(
|
||||||
|
status=200,
|
||||||
|
body=responses.pop().encode("utf-8"),
|
||||||
|
headers={"Content-Type": "text/calendar; charset=iso-8859-1"},
|
||||||
|
)
|
||||||
|
|
||||||
s = HttpStorage(url=collection_url)
|
with aioresponses() as m:
|
||||||
|
m.get(collection_url, callback=callback, repeat=True)
|
||||||
|
|
||||||
found_items = {}
|
s = HttpStorage(url=collection_url, connector=aio_connector)
|
||||||
|
|
||||||
for href, etag in s.list():
|
found_items = {}
|
||||||
item, etag2 = s.get(href)
|
|
||||||
assert item.uid is None
|
|
||||||
assert etag2 == etag
|
|
||||||
found_items[normalize_item(item)] = href
|
|
||||||
|
|
||||||
expected = set(normalize_item(u'BEGIN:VCALENDAR\n' + x + '\nEND:VCALENDAR')
|
async for href, etag in s.list():
|
||||||
for x in items)
|
item, etag2 = await s.get(href)
|
||||||
|
assert item.uid is not None
|
||||||
|
assert etag2 == etag
|
||||||
|
found_items[normalize_item(item)] = href
|
||||||
|
|
||||||
assert set(found_items) == expected
|
expected = {
|
||||||
|
normalize_item("BEGIN:VCALENDAR\n" + x + "\nEND:VCALENDAR") for x in items
|
||||||
|
}
|
||||||
|
|
||||||
for href, etag in s.list():
|
assert set(found_items) == expected
|
||||||
item, etag2 = s.get(href)
|
|
||||||
assert item.uid is None
|
async for href, etag in s.list():
|
||||||
assert etag2 == etag
|
item, etag2 = await s.get(href)
|
||||||
assert found_items[normalize_item(item)] == href
|
assert item.uid is not None
|
||||||
|
assert etag2 == etag
|
||||||
|
assert found_items[normalize_item(item)] == href
|
||||||
|
|
||||||
|
|
||||||
def test_readonly_param():
|
def test_readonly_param(aio_connector):
|
||||||
url = u'http://example.com/'
|
"""The ``readonly`` param cannot be ``False``."""
|
||||||
|
|
||||||
|
url = "http://example.com/"
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
HttpStorage(url=url, read_only=False)
|
HttpStorage(url=url, read_only=False, connector=aio_connector)
|
||||||
|
|
||||||
a = HttpStorage(url=url, read_only=True).read_only
|
a = HttpStorage(url=url, read_only=True, connector=aio_connector)
|
||||||
b = HttpStorage(url=url, read_only=None).read_only
|
b = HttpStorage(url=url, read_only=None, connector=aio_connector)
|
||||||
assert a is b is True
|
|
||||||
|
assert a.read_only is b.read_only is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_prepare_auth():
|
||||||
|
assert prepare_auth(None, "", "") is None
|
||||||
|
|
||||||
|
assert prepare_auth(None, "user", "pwd") == BasicAuthMethod("user", "pwd")
|
||||||
|
assert prepare_auth("basic", "user", "pwd") == BasicAuthMethod("user", "pwd")
|
||||||
|
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
assert prepare_auth("basic", "", "pwd")
|
||||||
|
assert "you need to specify username and password" in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
assert isinstance(prepare_auth("digest", "user", "pwd"), DigestAuthMethod)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
prepare_auth("ladida", "user", "pwd")
|
||||||
|
|
||||||
|
assert "unknown authentication method" in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_prepare_auth_guess():
|
||||||
|
# guess auth is currently not supported
|
||||||
|
with pytest.raises(UserError) as excinfo:
|
||||||
|
prepare_auth("guess", "usr", "pwd")
|
||||||
|
|
||||||
|
assert "not supported" in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_false_disallowed(aio_connector):
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
HttpStorage(url="http://example.com", verify=False, connector=aio_connector)
|
||||||
|
|
||||||
|
assert "must be a path to a pem-file." in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_403_usage_limit_exceeded(aio_connector):
|
||||||
|
url = "http://127.0.0.1/test_403"
|
||||||
|
error_body = {
|
||||||
|
"error": {
|
||||||
|
"errors": [
|
||||||
|
{
|
||||||
|
"domain": "usageLimits",
|
||||||
|
"message": "Calendar usage limits exceeded.",
|
||||||
|
"reason": "quotaExceeded",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"code": 403,
|
||||||
|
"message": "Calendar usage limits exceeded.",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession(connector=aio_connector) as session:
|
||||||
|
with aioresponses() as m:
|
||||||
|
m.get(url, status=403, payload=error_body, repeat=True)
|
||||||
|
with pytest.raises(UsageLimitReached):
|
||||||
|
await request("GET", url, session)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_403_without_usage_limits_domain(aio_connector):
|
||||||
|
"""A 403 JSON error without the Google 'usageLimits' domain should not be
|
||||||
|
treated as UsageLimitReached and should surface as ClientResponseError.
|
||||||
|
"""
|
||||||
|
url = "http://127.0.0.1/test_403_no_usage_limits"
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession(connector=aio_connector) as session:
|
||||||
|
with aioresponses() as m:
|
||||||
|
m.get(url, status=403, repeat=True)
|
||||||
|
with pytest.raises(aiohttp.ClientResponseError):
|
||||||
|
await request("GET", url, session)
|
||||||
|
|
|
||||||
|
|
@ -1,93 +1,93 @@
|
||||||
# -*- coding: utf-8 -*-
|
from __future__ import annotations
|
||||||
'''
|
|
||||||
tests.storage.test_http_with_singlefile
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
import aiostream
|
||||||
import pytest
|
import pytest
|
||||||
|
from aioresponses import CallbackResult
|
||||||
|
from aioresponses import aioresponses
|
||||||
|
|
||||||
from requests import Response
|
|
||||||
|
|
||||||
from vdirsyncer.storage.base import Storage
|
|
||||||
import vdirsyncer.storage.http
|
import vdirsyncer.storage.http
|
||||||
|
from vdirsyncer.storage.base import Storage
|
||||||
from vdirsyncer.storage.singlefile import SingleFileStorage
|
from vdirsyncer.storage.singlefile import SingleFileStorage
|
||||||
|
|
||||||
from . import BaseStorageTests
|
from . import StorageTests
|
||||||
from .. import EVENT_TEMPLATE, assert_item_equals
|
|
||||||
|
|
||||||
|
|
||||||
class CombinedStorage(Storage):
|
class CombinedStorage(Storage):
|
||||||
'''A subclass of HttpStorage to make testing easier. It supports writes via
|
"""A subclass of HttpStorage to make testing easier. It supports writes via
|
||||||
SingleFileStorage.'''
|
SingleFileStorage."""
|
||||||
_repr_attributes = ('url', 'path')
|
|
||||||
|
|
||||||
def __init__(self, url, path, **kwargs):
|
_repr_attributes = ("url", "path")
|
||||||
super(CombinedStorage, self).__init__(**kwargs)
|
storage_name = "http_and_singlefile"
|
||||||
|
|
||||||
|
def __init__(self, url, path, *, connector, **kwargs):
|
||||||
|
if kwargs.get("collection") is not None:
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
super().__init__(**kwargs)
|
||||||
self.url = url
|
self.url = url
|
||||||
self.path = path
|
self.path = path
|
||||||
self._reader = vdirsyncer.storage.http.HttpStorage(url=url)
|
self._reader = vdirsyncer.storage.http.HttpStorage(url=url, connector=connector)
|
||||||
|
self._reader._ignore_uids = False
|
||||||
self._writer = SingleFileStorage(path=path)
|
self._writer = SingleFileStorage(path=path)
|
||||||
|
|
||||||
def list(self, *a, **kw):
|
async def list(self, *a, **kw):
|
||||||
return self._reader.list(*a, **kw)
|
async for item in self._reader.list(*a, **kw):
|
||||||
|
yield item
|
||||||
|
|
||||||
def get(self, *a, **kw):
|
async def get(self, *a, **kw):
|
||||||
self.list()
|
await aiostream.stream.list(self.list())
|
||||||
return self._reader.get(*a, **kw)
|
return await self._reader.get(*a, **kw)
|
||||||
|
|
||||||
def upload(self, *a, **kw):
|
async def upload(self, *a, **kw):
|
||||||
return self._writer.upload(*a, **kw)
|
return await self._writer.upload(*a, **kw)
|
||||||
|
|
||||||
def update(self, *a, **kw):
|
async def update(self, *a, **kw):
|
||||||
return self._writer.update(*a, **kw)
|
return await self._writer.update(*a, **kw)
|
||||||
|
|
||||||
def delete(self, *a, **kw):
|
async def delete(self, *a, **kw):
|
||||||
return self._writer.delete(*a, **kw)
|
return await self._writer.delete(*a, **kw)
|
||||||
|
|
||||||
|
|
||||||
class TestHttpStorage(BaseStorageTests):
|
class TestHttpStorage(StorageTests):
|
||||||
storage_class = CombinedStorage
|
storage_class = CombinedStorage
|
||||||
item_template = EVENT_TEMPLATE
|
supports_collections = False
|
||||||
|
supports_metadata = False
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def setup_tmpdir(self, tmpdir, monkeypatch):
|
def setup_tmpdir(self, tmpdir, monkeypatch):
|
||||||
self.tmpfile = str(tmpdir.join('collection.txt'))
|
self.tmpfile = str(tmpdir.ensure("collection.txt"))
|
||||||
|
|
||||||
def _request(method, url, *args, **kwargs):
|
def callback(url, headers, **kwargs):
|
||||||
assert method == 'GET'
|
"""Read our tmpfile at request time.
|
||||||
assert url == 'http://localhost:123/collection.txt'
|
|
||||||
r = Response()
|
|
||||||
r.status_code = 200
|
|
||||||
try:
|
|
||||||
with open(self.tmpfile, 'rb') as f:
|
|
||||||
r._content = f.read()
|
|
||||||
except IOError:
|
|
||||||
r._content = b''
|
|
||||||
|
|
||||||
r.headers['Content-Type'] = 'text/icalendar'
|
We can't just read this during test setup since the file get written to
|
||||||
r.encoding = 'ISO-8859-1'
|
during test execution.
|
||||||
return r
|
|
||||||
|
|
||||||
monkeypatch.setattr(vdirsyncer.storage.http, 'request', _request)
|
It might make sense to actually run a server serving the local file.
|
||||||
|
"""
|
||||||
|
assert headers["User-Agent"].startswith("vdirsyncer/")
|
||||||
|
|
||||||
def get_storage_args(self, collection=None):
|
with open(self.tmpfile) as f:
|
||||||
assert collection is None
|
body = f.read()
|
||||||
return {'url': 'http://localhost:123/collection.txt',
|
|
||||||
'path': self.tmpfile}
|
|
||||||
|
|
||||||
def test_update(self, s):
|
return CallbackResult(
|
||||||
'''The original testcase tries to fetch with the old href. But this
|
status=200,
|
||||||
storage doesn't have real hrefs, so the href might change if the
|
body=body,
|
||||||
underlying UID changes. '''
|
headers={"Content-Type": "text/calendar; charset=utf-8"},
|
||||||
|
)
|
||||||
|
|
||||||
item = self._create_bogus_item()
|
with aioresponses() as m:
|
||||||
href, etag = s.upload(item)
|
m.get("http://localhost:123/collection.txt", callback=callback, repeat=True)
|
||||||
assert_item_equals(s.get(href)[0], item)
|
yield
|
||||||
|
|
||||||
new_item = self._create_bogus_item()
|
@pytest.fixture
|
||||||
s.update(href, new_item, etag)
|
def get_storage_args(self, aio_connector):
|
||||||
((new_href, new_etag),) = s.list()
|
async def inner(collection=None):
|
||||||
assert_item_equals(s.get(new_href)[0], new_item)
|
assert collection is None
|
||||||
|
return {
|
||||||
|
"url": "http://localhost:123/collection.txt",
|
||||||
|
"path": self.tmpfile,
|
||||||
|
"connector": aio_connector,
|
||||||
|
}
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,21 +1,19 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
# -*- coding: utf-8 -*-
|
import pytest
|
||||||
'''
|
|
||||||
tests.storage.test_memory
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
|
|
||||||
from vdirsyncer.storage.memory import MemoryStorage
|
from vdirsyncer.storage.memory import MemoryStorage
|
||||||
|
|
||||||
from . import BaseStorageTests
|
from . import StorageTests
|
||||||
|
|
||||||
|
|
||||||
class TestMemoryStorage(BaseStorageTests):
|
class TestMemoryStorage(StorageTests):
|
||||||
|
|
||||||
storage_class = MemoryStorage
|
storage_class = MemoryStorage
|
||||||
|
supports_collections = False
|
||||||
|
|
||||||
def get_storage_args(self, **kwargs):
|
@pytest.fixture
|
||||||
return kwargs
|
def get_storage_args(self):
|
||||||
|
async def inner(**args):
|
||||||
|
return args
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,57 +1,22 @@
|
||||||
# -*- coding: utf-8 -*-
|
from __future__ import annotations
|
||||||
'''
|
|
||||||
tests.storage.test_singlefile
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
:copyright: (c) 2014 Markus Unterwaditzer & contributors
|
|
||||||
:license: MIT, see LICENSE for more details.
|
|
||||||
'''
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vdirsyncer.storage.singlefile import SingleFileStorage
|
from vdirsyncer.storage.singlefile import SingleFileStorage
|
||||||
|
|
||||||
from . import BaseStorageTests
|
from . import StorageTests
|
||||||
from .. import EVENT_TEMPLATE, assert_item_equals
|
|
||||||
|
|
||||||
|
|
||||||
class TestSingleFileStorage(BaseStorageTests):
|
class TestSingleFileStorage(StorageTests):
|
||||||
|
|
||||||
storage_class = SingleFileStorage
|
storage_class = SingleFileStorage
|
||||||
item_template = EVENT_TEMPLATE
|
supports_metadata = False
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture
|
||||||
def setup(self, tmpdir):
|
def get_storage_args(self, tmpdir):
|
||||||
self._path = str(tmpdir.join('test.txt'))
|
async def inner(collection="test"):
|
||||||
|
rv = {"path": str(tmpdir.join("%s.txt")), "collection": collection}
|
||||||
|
if collection is not None:
|
||||||
|
rv = await self.storage_class.create_collection(**rv)
|
||||||
|
return rv
|
||||||
|
|
||||||
def get_storage_args(self, **kwargs):
|
return inner
|
||||||
return dict(path=self._path)
|
|
||||||
|
|
||||||
def test_collection_arg(self, tmpdir):
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
self.storage_class(str(tmpdir.join('foo.ics')), collection='ha')
|
|
||||||
|
|
||||||
def test_create_arg(self, tmpdir):
|
|
||||||
s = self.storage_class(str(tmpdir) + '/foo.ics')
|
|
||||||
assert not s.list()
|
|
||||||
|
|
||||||
s.create = False
|
|
||||||
with pytest.raises(IOError):
|
|
||||||
s.list()
|
|
||||||
|
|
||||||
with pytest.raises(IOError):
|
|
||||||
s = self.storage_class(str(tmpdir) + '/foo.ics', create=False)
|
|
||||||
|
|
||||||
def test_update(self, s):
|
|
||||||
'''The original testcase tries to fetch with the old href. But this
|
|
||||||
storage doesn't have real hrefs, so the href might change if the
|
|
||||||
underlying UID changes. '''
|
|
||||||
|
|
||||||
item = self._create_bogus_item()
|
|
||||||
href, etag = s.upload(item)
|
|
||||||
assert_item_equals(s.get(href)[0], item)
|
|
||||||
|
|
||||||
new_item = self._create_bogus_item()
|
|
||||||
s.update(href, new_item, etag)
|
|
||||||
((new_href, new_etag),) = s.list()
|
|
||||||
assert_item_equals(s.get(new_href)[0], new_item)
|
|
||||||
|
|
|
||||||
0
tests/system/cli/__init__.py
Normal file
0
tests/system/cli/__init__.py
Normal file
36
tests/system/cli/conftest.py
Normal file
36
tests/system/cli/conftest.py
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from click.testing import CliRunner
|
||||||
|
|
||||||
|
import vdirsyncer.cli as cli
|
||||||
|
|
||||||
|
|
||||||
|
class _CustomRunner:
|
||||||
|
def __init__(self, tmpdir):
|
||||||
|
self.tmpdir = tmpdir
|
||||||
|
self.cfg = tmpdir.join("config")
|
||||||
|
self.runner = CliRunner()
|
||||||
|
|
||||||
|
def invoke(self, args, env=None, **kwargs):
|
||||||
|
env = env or {}
|
||||||
|
env.setdefault("VDIRSYNCER_CONFIG", str(self.cfg))
|
||||||
|
return self.runner.invoke(cli.app, args, env=env, **kwargs)
|
||||||
|
|
||||||
|
def write_with_general(self, data):
|
||||||
|
self.cfg.write(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "{}/status/"
|
||||||
|
"""
|
||||||
|
).format(str(self.tmpdir))
|
||||||
|
)
|
||||||
|
self.cfg.write(data, mode="a")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def runner(tmpdir):
|
||||||
|
return _CustomRunner(tmpdir)
|
||||||
283
tests/system/cli/test_config.py
Normal file
283
tests/system/cli/test_config.py
Normal file
|
|
@ -0,0 +1,283 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import io
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer import cli
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.cli.config import Config
|
||||||
|
|
||||||
|
invalid = object()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def read_config(tmpdir, monkeypatch):
|
||||||
|
def inner(cfg):
|
||||||
|
errors = []
|
||||||
|
monkeypatch.setattr("vdirsyncer.cli.cli_logger.error", errors.append)
|
||||||
|
f = io.StringIO(dedent(cfg.format(base=str(tmpdir))))
|
||||||
|
rv = Config.from_fileobject(f)
|
||||||
|
monkeypatch.undo()
|
||||||
|
return errors, rv
|
||||||
|
|
||||||
|
return inner
|
||||||
|
|
||||||
|
|
||||||
|
def test_read_config(read_config):
|
||||||
|
_errors, c = read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[pair bob]
|
||||||
|
a = "bob_a"
|
||||||
|
b = "bob_b"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage bob_a]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "/tmp/contacts/"
|
||||||
|
fileext = ".vcf"
|
||||||
|
yesno = false
|
||||||
|
number = 42
|
||||||
|
|
||||||
|
[storage bob_b]
|
||||||
|
type = "carddav"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert c.general == {"status_path": "/tmp/status/"}
|
||||||
|
|
||||||
|
assert set(c.pairs) == {"bob"}
|
||||||
|
bob = c.pairs["bob"]
|
||||||
|
assert bob.collections is None
|
||||||
|
|
||||||
|
assert c.storages == {
|
||||||
|
"bob_a": {
|
||||||
|
"type": "filesystem",
|
||||||
|
"path": "/tmp/contacts/",
|
||||||
|
"fileext": ".vcf",
|
||||||
|
"yesno": False,
|
||||||
|
"number": 42,
|
||||||
|
"instance_name": "bob_a",
|
||||||
|
},
|
||||||
|
"bob_b": {"type": "carddav", "instance_name": "bob_b"},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_missing_collections_param(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[pair bob]
|
||||||
|
a = "bob_a"
|
||||||
|
b = "bob_b"
|
||||||
|
|
||||||
|
[storage bob_a]
|
||||||
|
type = "lmao"
|
||||||
|
|
||||||
|
[storage bob_b]
|
||||||
|
type = "lmao"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "collections parameter missing" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_section_type(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[bogus]
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "Unknown section" in str(excinfo.value)
|
||||||
|
assert "bogus" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_missing_general_section(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[pair my_pair]
|
||||||
|
a = "my_a"
|
||||||
|
b = "my_b"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_a/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_b/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "Invalid general section." in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_wrong_general_section(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
wrong = true
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "Invalid general section." in str(excinfo.value)
|
||||||
|
assert excinfo.value.problems == [
|
||||||
|
"general section doesn't take the parameters: wrong",
|
||||||
|
"general section is missing the parameters: status_path",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_storage_name(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "{base}/status/"
|
||||||
|
|
||||||
|
[storage foo.bar]
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "invalid characters" in str(excinfo.value).lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_collections_arg(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = [null]
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "/tmp/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "/tmp/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "Expected string" in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_duplicate_sections(read_config):
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foobar"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage foobar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "/tmp/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "/tmp/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert 'Name "foobar" already used' in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_validate_collections_param():
|
||||||
|
x = cli.config._validate_collections_param
|
||||||
|
x(None)
|
||||||
|
x(["c", "a", "b"])
|
||||||
|
pytest.raises(ValueError, x, [None])
|
||||||
|
pytest.raises(ValueError, x, ["a", "a", "a"])
|
||||||
|
pytest.raises(ValueError, x, [[None, "a", "b"]])
|
||||||
|
x([["c", None, "b"]])
|
||||||
|
x([["c", "a", None]])
|
||||||
|
x([["c", None, None]])
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_implicit_value(read_config):
|
||||||
|
expected_message = "`implicit` parameter must be 'create' or absent"
|
||||||
|
with pytest.raises(exceptions.UserError) as excinfo:
|
||||||
|
read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[pair my_pair]
|
||||||
|
a = "my_a"
|
||||||
|
b = "my_b"
|
||||||
|
collections = null
|
||||||
|
implicit = "invalid"
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_a/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_b/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert expected_message in str(excinfo.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_implicit_create_only(read_config):
|
||||||
|
"""Test that implicit create works."""
|
||||||
|
errors, c = read_config(
|
||||||
|
"""
|
||||||
|
[general]
|
||||||
|
status_path = "/tmp/status/"
|
||||||
|
|
||||||
|
[pair my_pair]
|
||||||
|
a = "my_a"
|
||||||
|
b = "my_b"
|
||||||
|
collections = ["from a", "from b"]
|
||||||
|
implicit = "create"
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_a/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/path_b/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
assert not errors
|
||||||
|
pair = c.pairs["my_pair"]
|
||||||
|
assert pair.implicit == "create"
|
||||||
287
tests/system/cli/test_discover.py
Normal file
287
tests/system/cli/test_discover.py
Normal file
|
|
@ -0,0 +1,287 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.storage.base import Storage
|
||||||
|
|
||||||
|
|
||||||
|
def test_discover_command(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = ["from a"]
|
||||||
|
"""
|
||||||
|
).format(str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
foo = tmpdir.mkdir("foo")
|
||||||
|
bar = tmpdir.mkdir("bar")
|
||||||
|
|
||||||
|
for x in "abc":
|
||||||
|
foo.mkdir(x)
|
||||||
|
bar.mkdir(x)
|
||||||
|
bar.mkdir("d")
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
foo.mkdir("d")
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
lines = result.output.splitlines()
|
||||||
|
assert "Syncing foobar/a" in lines
|
||||||
|
assert "Syncing foobar/b" in lines
|
||||||
|
assert "Syncing foobar/c" in lines
|
||||||
|
assert "Syncing foobar/d" not in result.output
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
assert "Syncing foobar/a" in lines
|
||||||
|
assert "Syncing foobar/b" in lines
|
||||||
|
assert "Syncing foobar/c" in lines
|
||||||
|
assert "Syncing foobar/d" in result.output
|
||||||
|
|
||||||
|
# Check for redundant data that is already in the config. This avoids
|
||||||
|
# copying passwords from the config too.
|
||||||
|
assert "fileext" not in tmpdir.join("status").join("foobar.collections").read()
|
||||||
|
|
||||||
|
|
||||||
|
def test_discover_different_collection_names(tmpdir, runner):
|
||||||
|
foo = tmpdir.mkdir("foo")
|
||||||
|
bar = tmpdir.mkdir("bar")
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".txt"
|
||||||
|
path = "{foo}"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".txt"
|
||||||
|
path = "{bar}"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = [
|
||||||
|
["coll1", "coll_a1", "coll_b1"],
|
||||||
|
"coll2"
|
||||||
|
]
|
||||||
|
"""
|
||||||
|
).format(foo=str(foo), bar=str(bar))
|
||||||
|
)
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"], input="y\n" * 6)
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
coll_a1 = foo.join("coll_a1")
|
||||||
|
coll_b1 = bar.join("coll_b1")
|
||||||
|
|
||||||
|
assert coll_a1.exists()
|
||||||
|
assert coll_b1.exists()
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
foo_txt = coll_a1.join("foo.txt")
|
||||||
|
foo_txt.write("BEGIN:VCALENDAR\nUID:foo\nEND:VCALENDAR")
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
assert foo_txt.exists()
|
||||||
|
assert coll_b1.join("foo.txt").exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_discover_direct_path(tmpdir, runner):
|
||||||
|
foo = tmpdir.join("foo")
|
||||||
|
bar = tmpdir.join("bar")
|
||||||
|
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".txt"
|
||||||
|
path = "{foo}"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".txt"
|
||||||
|
path = "{bar}"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
"""
|
||||||
|
).format(foo=str(foo), bar=str(bar))
|
||||||
|
)
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"], input="y\n" * 2)
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
assert foo.exists()
|
||||||
|
assert bar.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_null_collection_with_named_collection(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
f"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = [["baz", "baz", null]]
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{tmpdir!s}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "singlefile"
|
||||||
|
path = "{tmpdir!s}/bar.txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"], input="y\n" * 2)
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
foo = tmpdir.join("foo")
|
||||||
|
foobaz = foo.join("baz")
|
||||||
|
assert foo.exists()
|
||||||
|
assert foobaz.exists()
|
||||||
|
|
||||||
|
bar = tmpdir.join("bar.txt")
|
||||||
|
assert bar.exists()
|
||||||
|
|
||||||
|
foobaz.join("lol.txt").write("BEGIN:VCARD\nUID:HAHA\nEND:VCARD")
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
assert "HAHA" in bar.read()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("a_requires", "b_requires"),
|
||||||
|
[
|
||||||
|
(True, True),
|
||||||
|
(True, False),
|
||||||
|
(False, True),
|
||||||
|
(False, False),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch):
|
||||||
|
class TestStorage(Storage):
|
||||||
|
storage_name = "test"
|
||||||
|
|
||||||
|
def __init__(self, require_collection, **kw):
|
||||||
|
if require_collection:
|
||||||
|
assert not kw.get("collection")
|
||||||
|
raise exceptions.CollectionRequired
|
||||||
|
|
||||||
|
async def get(self, href: str):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def list(self) -> list[tuple]:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
from vdirsyncer.cli.utils import storage_names
|
||||||
|
|
||||||
|
monkeypatch.setitem(storage_names._storages, "test", TestStorage)
|
||||||
|
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
f"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "test"
|
||||||
|
require_collection = {json.dumps(a_requires)}
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "test"
|
||||||
|
require_collection = {json.dumps(b_requires)}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
if a_requires or b_requires:
|
||||||
|
assert result.exception
|
||||||
|
assert (
|
||||||
|
"One or more storages don't support `collections = null`." in result.output
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_showconfig(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = ["from a"]
|
||||||
|
"""
|
||||||
|
).format(str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
result = runner.invoke(["showconfig"])
|
||||||
|
assert not result.exception
|
||||||
|
assert json.loads(result.output) == {
|
||||||
|
"storages": [
|
||||||
|
{
|
||||||
|
"type": "filesystem",
|
||||||
|
"path": f"{tmpdir}/foo/",
|
||||||
|
"fileext": ".txt",
|
||||||
|
"instance_name": "foo",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "filesystem",
|
||||||
|
"path": f"{tmpdir}/bar/",
|
||||||
|
"fileext": ".txt",
|
||||||
|
"instance_name": "bar",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
48
tests/system/cli/test_fetchparams.py
Normal file
48
tests/system/cli/test_fetchparams.py
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_password_from_command(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
f"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = ["a", "b", "c"]
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type.fetch = ["shell", "echo filesystem"]
|
||||||
|
path = "{tmpdir!s}/foo/"
|
||||||
|
fileext.fetch = ["command", "echo", ".txt"]
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{tmpdir!s}/bar/"
|
||||||
|
fileext.fetch = ["prompt", "Fileext for bar"]
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
foo = tmpdir.ensure("foo", dir=True)
|
||||||
|
foo.ensure("a", dir=True)
|
||||||
|
foo.ensure("b", dir=True)
|
||||||
|
foo.ensure("c", dir=True)
|
||||||
|
bar = tmpdir.ensure("bar", dir=True)
|
||||||
|
bar.ensure("a", dir=True)
|
||||||
|
bar.ensure("b", dir=True)
|
||||||
|
bar.ensure("c", dir=True)
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"], input=".asdf\n")
|
||||||
|
assert not result.exception
|
||||||
|
status = tmpdir.join("status").join("foobar.collections").read()
|
||||||
|
assert "foo" in status
|
||||||
|
assert "bar" in status
|
||||||
|
assert "asdf" not in status
|
||||||
|
assert "txt" not in status
|
||||||
|
|
||||||
|
foo.join("a").join("foo.txt").write("BEGIN:VCARD\nUID:foo\nEND:VCARD")
|
||||||
|
result = runner.invoke(["sync"], input=".asdf\n")
|
||||||
|
assert not result.exception
|
||||||
|
assert [x.basename for x in bar.join("a").listdir()] == ["foo.asdf"]
|
||||||
78
tests/system/cli/test_repair.py
Normal file
78
tests/system/cli/test_repair.py
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def storage(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{base}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
).format(base=str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
return tmpdir.mkdir("foo")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("collection", [None, "foocoll"])
|
||||||
|
def test_basic(storage, runner, collection):
|
||||||
|
if collection is not None:
|
||||||
|
storage = storage.mkdir(collection)
|
||||||
|
collection_arg = f"foo/{collection}"
|
||||||
|
else:
|
||||||
|
collection_arg = "foo"
|
||||||
|
|
||||||
|
argv = ["repair", collection_arg]
|
||||||
|
|
||||||
|
result = runner.invoke(argv, input="y")
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
storage.join("item.txt").write("BEGIN:VCARD\nEND:VCARD")
|
||||||
|
storage.join("toobroken.txt").write("")
|
||||||
|
|
||||||
|
result = runner.invoke(argv, input="y")
|
||||||
|
assert not result.exception
|
||||||
|
assert "No UID" in result.output
|
||||||
|
assert "'toobroken.txt' is malformed beyond repair" in result.output
|
||||||
|
(new_fname,) = (x for x in storage.listdir() if "toobroken" not in str(x))
|
||||||
|
assert "UID:" in new_fname.read()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("repair_uids", [None, True, False])
|
||||||
|
def test_repair_uids(storage, runner, repair_uids):
|
||||||
|
f = storage.join("baduid.txt")
|
||||||
|
orig_f = "BEGIN:VCARD\nUID:!!!!!\nEND:VCARD"
|
||||||
|
f.write(orig_f)
|
||||||
|
|
||||||
|
if repair_uids is None:
|
||||||
|
opt = []
|
||||||
|
elif repair_uids:
|
||||||
|
opt = ["--repair-unsafe-uid"]
|
||||||
|
else:
|
||||||
|
opt = ["--no-repair-unsafe-uid"]
|
||||||
|
|
||||||
|
result = runner.invoke(["repair", *opt, "foo"], input="y")
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
if repair_uids:
|
||||||
|
assert "UID or href is unsafe, assigning random UID" in result.output
|
||||||
|
assert not f.exists()
|
||||||
|
(new_f,) = storage.listdir()
|
||||||
|
s = new_f.read()
|
||||||
|
|
||||||
|
assert s.startswith("BEGIN:VCARD")
|
||||||
|
assert s.endswith("END:VCARD")
|
||||||
|
assert s != orig_f
|
||||||
|
else:
|
||||||
|
assert (
|
||||||
|
"UID may cause problems, add --repair-unsafe-uid to repair."
|
||||||
|
in result.output
|
||||||
|
)
|
||||||
|
assert f.read() == orig_f
|
||||||
579
tests/system/cli/test_sync.py
Normal file
579
tests/system/cli/test_sync.py
Normal file
|
|
@ -0,0 +1,579 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
def test_simple_run(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[pair my_pair]
|
||||||
|
a = "my_a"
|
||||||
|
b = "my_b"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/path_a/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/path_b/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
).format(str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
tmpdir.mkdir("path_a")
|
||||||
|
tmpdir.mkdir("path_b")
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
tmpdir.join("path_a/haha.txt").write("UID:haha")
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert "Copying (uploading) item haha to my_b" in result.output
|
||||||
|
assert tmpdir.join("path_b/haha.txt").read() == "UID:haha"
|
||||||
|
|
||||||
|
|
||||||
|
def test_sync_inexistant_pair(tmpdir, runner):
|
||||||
|
runner.write_with_general("")
|
||||||
|
|
||||||
|
result = runner.invoke(["sync", "foo"])
|
||||||
|
assert result.exception
|
||||||
|
assert "pair foo does not exist." in result.output.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_storage(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[pair my_pair]
|
||||||
|
a = "my_a"
|
||||||
|
b = "my_b"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage my_a]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/path_a/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage my_b]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/path_b/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
).format(str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
tmpdir.mkdir("path_a")
|
||||||
|
tmpdir.mkdir("path_b")
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
tmpdir.join("path_a/haha.txt").write("UID:haha")
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
tmpdir.join("path_b/haha.txt").remove()
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
lines = result.output.splitlines()
|
||||||
|
assert lines[0] == "Syncing my_pair"
|
||||||
|
assert lines[1].startswith('error: my_pair: Storage "my_b" was completely emptied.')
|
||||||
|
assert result.exception
|
||||||
|
|
||||||
|
|
||||||
|
def test_verbosity(tmpdir, runner):
|
||||||
|
runner.write_with_general("")
|
||||||
|
result = runner.invoke(["--verbosity=HAHA", "sync"])
|
||||||
|
assert result.exception
|
||||||
|
assert (
|
||||||
|
'invalid value for "--verbosity"' in result.output.lower()
|
||||||
|
or "invalid value for '--verbosity'" in result.output.lower()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_collections_cache_invalidation(tmpdir, runner):
|
||||||
|
foo = tmpdir.mkdir("foo")
|
||||||
|
bar = tmpdir.mkdir("bar")
|
||||||
|
for x in "abc":
|
||||||
|
foo.mkdir(x)
|
||||||
|
bar.mkdir(x)
|
||||||
|
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = ["a", "b", "c"]
|
||||||
|
"""
|
||||||
|
).format(str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
foo.join("a/itemone.txt").write("UID:itemone")
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
assert "detected change in config file" not in result.output.lower()
|
||||||
|
|
||||||
|
rv = bar.join("a").listdir()
|
||||||
|
assert len(rv) == 1
|
||||||
|
assert rv[0].basename == "itemone.txt"
|
||||||
|
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/bar2/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = ["a", "b", "c"]
|
||||||
|
"""
|
||||||
|
).format(str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
for entry in tmpdir.join("status").listdir():
|
||||||
|
if not str(entry).endswith(".collections"):
|
||||||
|
entry.remove()
|
||||||
|
bar2 = tmpdir.mkdir("bar2")
|
||||||
|
for x in "abc":
|
||||||
|
bar2.mkdir(x)
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert "detected change in config file" in result.output.lower()
|
||||||
|
assert result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
rv = bar.join("a").listdir()
|
||||||
|
rv2 = bar2.join("a").listdir()
|
||||||
|
assert len(rv) == len(rv2) == 1
|
||||||
|
assert rv[0].basename == rv2[0].basename == "itemone.txt"
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_pairs_as_cli_arg(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{0}/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = ["a", "b", "c"]
|
||||||
|
"""
|
||||||
|
).format(str(tmpdir))
|
||||||
|
)
|
||||||
|
|
||||||
|
for base in ("foo", "bar"):
|
||||||
|
base = tmpdir.mkdir(base)
|
||||||
|
for c in "abc":
|
||||||
|
base.mkdir(c)
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["sync", "foobar/d"])
|
||||||
|
assert result.exception
|
||||||
|
assert 'pair foobar: collection "d" not found' in result.output.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_multiple_pairs(tmpdir, runner):
|
||||||
|
def get_cfg():
|
||||||
|
for name_a, name_b in ("foo", "bar"), ("bam", "baz"):
|
||||||
|
yield dedent(
|
||||||
|
"""
|
||||||
|
[pair {a}{b}]
|
||||||
|
a = "{a}"
|
||||||
|
b = "{b}"
|
||||||
|
collections = null
|
||||||
|
"""
|
||||||
|
).format(a=name_a, b=name_b)
|
||||||
|
|
||||||
|
for name in name_a, name_b:
|
||||||
|
yield dedent(
|
||||||
|
"""
|
||||||
|
[storage {name}]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{path}"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
).format(name=name, path=str(tmpdir.mkdir(name)))
|
||||||
|
|
||||||
|
runner.write_with_general("".join(get_cfg()))
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
assert set(result.output.splitlines()) > {
|
||||||
|
"Discovering collections for pair bambaz",
|
||||||
|
"Discovering collections for pair foobar",
|
||||||
|
}
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert not result.exception
|
||||||
|
assert set(result.output.splitlines()) == {
|
||||||
|
"Syncing bambaz",
|
||||||
|
"Syncing foobar",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# XXX: https://github.com/pimutils/vdirsyncer/issues/617
|
||||||
|
@pytest.mark.skipif(sys.platform == "darwin", reason="This test inexplicably fails")
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"collections",
|
||||||
|
[
|
||||||
|
("a", "A"),
|
||||||
|
("\ufffe",),
|
||||||
|
("Hello there!",),
|
||||||
|
("Österreich",),
|
||||||
|
("中国", "x1"),
|
||||||
|
("한글",),
|
||||||
|
("42a4ec99-b1c2-4859-b142-759112f2ca50",),
|
||||||
|
("فلسطين",),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_create_collections(collections, tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
f"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = {json.dumps(list(collections))}
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{tmpdir!s}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{tmpdir!s}/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"], input="y\n" * 2 * (len(collections) + 1))
|
||||||
|
assert not result.exception, result.output
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"] + ["foobar/" + x for x in collections])
|
||||||
|
assert not result.exception, result.output
|
||||||
|
|
||||||
|
assert {x.basename for x in tmpdir.join("foo").listdir()} == {
|
||||||
|
x.basename for x in tmpdir.join("bar").listdir()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_ident_conflict(tmpdir, runner):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
f"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{tmpdir!s}/foo/"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{tmpdir!s}/bar/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
foo = tmpdir.mkdir("foo")
|
||||||
|
tmpdir.mkdir("bar")
|
||||||
|
|
||||||
|
foo.join("one.txt").write("UID:1")
|
||||||
|
foo.join("two.txt").write("UID:1")
|
||||||
|
foo.join("three.txt").write("UID:1")
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
result = runner.invoke(["sync"])
|
||||||
|
assert result.exception
|
||||||
|
assert (
|
||||||
|
'error: foobar: Storage "foo" contains multiple items with the '
|
||||||
|
"same UID or even content"
|
||||||
|
) in result.output
|
||||||
|
assert sorted(
|
||||||
|
[
|
||||||
|
"one.txt" in result.output,
|
||||||
|
"two.txt" in result.output,
|
||||||
|
"three.txt" in result.output,
|
||||||
|
]
|
||||||
|
) == [False, True, True]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("existing", "missing"),
|
||||||
|
[
|
||||||
|
("foo", "bar"),
|
||||||
|
("bar", "foo"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_unknown_storage(tmpdir, runner, existing, missing):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
f"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage {existing}]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{tmpdir!s}/{existing}/"
|
||||||
|
fileext = ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
tmpdir.mkdir(existing)
|
||||||
|
|
||||||
|
result = runner.invoke(["discover"])
|
||||||
|
assert result.exception
|
||||||
|
|
||||||
|
assert (
|
||||||
|
f"Storage '{missing}' not found. "
|
||||||
|
f"These are the configured storages: ['{existing}']"
|
||||||
|
) in result.output
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("cmd", ["sync", "metasync"])
|
||||||
|
def test_no_configured_pairs(tmpdir, runner, cmd):
|
||||||
|
runner.write_with_general("")
|
||||||
|
|
||||||
|
result = runner.invoke([cmd])
|
||||||
|
assert result.output == ""
|
||||||
|
assert not result.exception
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("resolution", "expect_foo", "expect_bar"),
|
||||||
|
[(["command", "cp"], "UID:lol\nfööcontent", "UID:lol\nfööcontent")],
|
||||||
|
)
|
||||||
|
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
f"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
conflict_resolution = {json.dumps(resolution)}
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".txt"
|
||||||
|
path = "{tmpdir!s}/foo"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".txt"
|
||||||
|
path = "{tmpdir!s}/bar"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
foo = tmpdir.join("foo")
|
||||||
|
bar = tmpdir.join("bar")
|
||||||
|
fooitem = foo.join("lol.txt").ensure()
|
||||||
|
fooitem.write("UID:lol\nfööcontent")
|
||||||
|
baritem = bar.join("lol.txt").ensure()
|
||||||
|
baritem.write("UID:lol\nbööcontent")
|
||||||
|
|
||||||
|
r = runner.invoke(["discover"])
|
||||||
|
assert not r.exception
|
||||||
|
|
||||||
|
r = runner.invoke(["sync"])
|
||||||
|
assert not r.exception
|
||||||
|
|
||||||
|
assert fooitem.read() == expect_foo
|
||||||
|
assert baritem.read() == expect_bar
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("partial_sync", ["error", "ignore", "revert", None])
|
||||||
|
def test_partial_sync(tmpdir, runner, partial_sync):
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
{partial_sync}
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
fileext = ".txt"
|
||||||
|
path = "{base}/foo"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
read_only = true
|
||||||
|
fileext = ".txt"
|
||||||
|
path = "{base}/bar"
|
||||||
|
""".format(
|
||||||
|
partial_sync=(
|
||||||
|
f'partial_sync = "{partial_sync}"\n' if partial_sync else ""
|
||||||
|
),
|
||||||
|
base=str(tmpdir),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
foo = tmpdir.mkdir("foo")
|
||||||
|
bar = tmpdir.mkdir("bar")
|
||||||
|
|
||||||
|
foo.join("other.txt").write("UID:other")
|
||||||
|
bar.join("other.txt").write("UID:other")
|
||||||
|
|
||||||
|
baritem = bar.join("lol.txt")
|
||||||
|
baritem.write("UID:lol")
|
||||||
|
|
||||||
|
r = runner.invoke(["discover"])
|
||||||
|
assert not r.exception
|
||||||
|
|
||||||
|
r = runner.invoke(["sync"])
|
||||||
|
assert not r.exception
|
||||||
|
|
||||||
|
fooitem = foo.join("lol.txt")
|
||||||
|
fooitem.remove()
|
||||||
|
|
||||||
|
r = runner.invoke(["sync"])
|
||||||
|
|
||||||
|
if partial_sync == "error":
|
||||||
|
assert r.exception
|
||||||
|
assert "Attempted change" in r.output
|
||||||
|
elif partial_sync == "ignore":
|
||||||
|
assert baritem.exists()
|
||||||
|
r = runner.invoke(["sync"])
|
||||||
|
assert not r.exception
|
||||||
|
assert baritem.exists()
|
||||||
|
else:
|
||||||
|
assert baritem.exists()
|
||||||
|
r = runner.invoke(["sync"])
|
||||||
|
assert not r.exception
|
||||||
|
assert baritem.exists()
|
||||||
|
assert fooitem.exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_fetch_only_necessary_params(tmpdir, runner):
|
||||||
|
fetched_file = tmpdir.join("fetched_flag")
|
||||||
|
fetch_script = tmpdir.join("fetch_script")
|
||||||
|
fetch_script.write(
|
||||||
|
dedent(
|
||||||
|
f"""
|
||||||
|
set -e
|
||||||
|
touch "{fetched_file!s}"
|
||||||
|
echo ".txt"
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
runner.write_with_general(
|
||||||
|
dedent(
|
||||||
|
"""
|
||||||
|
[pair foobar]
|
||||||
|
a = "foo"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[pair bambar]
|
||||||
|
a = "bam"
|
||||||
|
b = "bar"
|
||||||
|
collections = null
|
||||||
|
|
||||||
|
[storage foo]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{path}"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bar]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{path}"
|
||||||
|
fileext = ".txt"
|
||||||
|
|
||||||
|
[storage bam]
|
||||||
|
type = "filesystem"
|
||||||
|
path = "{path}"
|
||||||
|
fileext.fetch = ["command", "sh", "{script}"]
|
||||||
|
""".format(path=str(tmpdir.mkdir("bogus")), script=str(fetch_script))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def fetched():
|
||||||
|
try:
|
||||||
|
fetched_file.remove()
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
r = runner.invoke(["discover"])
|
||||||
|
assert not r.exception
|
||||||
|
assert fetched()
|
||||||
|
|
||||||
|
r = runner.invoke(["sync", "foobar"])
|
||||||
|
assert not r.exception
|
||||||
|
assert not fetched()
|
||||||
|
|
||||||
|
r = runner.invoke(["sync"])
|
||||||
|
assert not r.exception
|
||||||
|
assert fetched()
|
||||||
|
|
||||||
|
r = runner.invoke(["sync", "bambar"])
|
||||||
|
assert not r.exception
|
||||||
|
assert fetched()
|
||||||
31
tests/system/cli/test_utils.py
Normal file
31
tests/system/cli/test_utils.py
Normal file
|
|
@ -0,0 +1,31 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vdirsyncer import exceptions
|
||||||
|
from vdirsyncer.cli.utils import handle_cli_error
|
||||||
|
from vdirsyncer.cli.utils import storage_instance_from_config
|
||||||
|
from vdirsyncer.cli.utils import storage_names
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_cli_error(capsys):
|
||||||
|
try:
|
||||||
|
raise exceptions.InvalidResponse("ayy lmao")
|
||||||
|
except BaseException:
|
||||||
|
handle_cli_error()
|
||||||
|
|
||||||
|
_out, err = capsys.readouterr()
|
||||||
|
assert "returned something vdirsyncer doesn't understand" in err
|
||||||
|
assert "ayy lmao" in err
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_storage_instance_from_config(monkeypatch, aio_connector):
|
||||||
|
class Dummy:
|
||||||
|
def __init__(self, **kw):
|
||||||
|
assert kw == {"foo": "bar", "baz": 1}
|
||||||
|
|
||||||
|
monkeypatch.setitem(storage_names._storages, "lol", Dummy)
|
||||||
|
config = {"type": "lol", "foo": "bar", "baz": 1}
|
||||||
|
storage = await storage_instance_from_config(config, connector=aio_connector)
|
||||||
|
assert isinstance(storage, Dummy)
|
||||||
28
tests/system/conftest.py
Normal file
28
tests/system/conftest.py
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import ssl
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trustme
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def ca():
|
||||||
|
return trustme.CA()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def localhost_cert(ca):
|
||||||
|
return ca.issue_cert("localhost")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def httpserver_ssl_context(localhost_cert):
|
||||||
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
|
|
||||||
|
crt = localhost_cert.cert_chain_pems[0]
|
||||||
|
key = localhost_cert.private_key_pem
|
||||||
|
with crt.tempfile() as crt_file, key.tempfile() as key_file:
|
||||||
|
context.load_cert_chain(crt_file, key_file)
|
||||||
|
|
||||||
|
return context
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue