mirror of
https://github.com/samsonjs/vdirsyncer.git
synced 2026-04-04 10:35:51 +00:00
Compare commits
92 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c93cffdf72 | ||
|
|
42564de75c | ||
|
|
6e0e674fe3 | ||
|
|
648cd1ae98 | ||
|
|
aee513a39f | ||
|
|
556ec88578 | ||
|
|
579b2ca5d9 | ||
|
|
511f427a77 | ||
|
|
07cbd58aaf | ||
|
|
8c67763a1b | ||
|
|
c31e27a88a | ||
|
|
9324fa4a74 | ||
|
|
f401078c57 | ||
|
|
12bf226a41 | ||
|
|
a61d51bc8f | ||
|
|
ec79d8b18e | ||
|
|
4f3fd09f87 | ||
|
|
b5eefc9bf5 | ||
|
|
59e822707d | ||
|
|
8cedf13fdf | ||
|
|
d26258807e | ||
|
|
003ee86a2d | ||
|
|
07eff1b418 | ||
|
|
73714afcdb | ||
|
|
69f4e4f3bc | ||
|
|
379086eb04 | ||
|
|
cba48f1d9e | ||
|
|
53d55fced4 | ||
|
|
168d999359 | ||
|
|
50c1151921 | ||
|
|
85bc7ed169 | ||
|
|
06d59f59a5 | ||
|
|
3f41f9cf41 | ||
|
|
cd2fd53e48 | ||
|
|
ba3c27322f | ||
|
|
e35e23238e | ||
|
|
2ceafac27a | ||
|
|
916fc4eb30 | ||
|
|
7e9fa7463e | ||
|
|
535911c9fd | ||
|
|
8f2734c33e | ||
|
|
4d3860d449 | ||
|
|
9c3a2b48e9 | ||
|
|
2a2457e364 | ||
|
|
855f29cc35 | ||
|
|
cc37e6a312 | ||
|
|
01573f0d66 | ||
|
|
c1aec4527c | ||
|
|
b1ec9c26c7 | ||
|
|
82f47737a0 | ||
|
|
45d76c889c | ||
|
|
c92b4f38eb | ||
|
|
47b2a43a0e | ||
|
|
2d0527ecf0 | ||
|
|
991076d12a | ||
|
|
f58f06d2b5 | ||
|
|
b1cddde635 | ||
|
|
41f64e2dca | ||
|
|
401c441acb | ||
|
|
f1310883b9 | ||
|
|
afa8031eec | ||
|
|
50604f24f1 | ||
|
|
cd6cb92b59 | ||
|
|
39c2df99eb | ||
|
|
7fdff404e6 | ||
|
|
1bdde25c0c | ||
|
|
b32932bd13 | ||
|
|
22d009b824 | ||
|
|
792dbc171f | ||
|
|
5700c4688b | ||
|
|
3984f547ce | ||
|
|
9769dab02e | ||
|
|
bd2e09a84b | ||
|
|
f7b6e67095 | ||
|
|
a2c509adf5 | ||
|
|
28fdf42238 | ||
|
|
0d3b028b17 | ||
|
|
f8e65878d8 | ||
|
|
75e83cd0f6 | ||
|
|
96a8ab35c3 | ||
|
|
619373a8e8 | ||
|
|
cbb15e1895 | ||
|
|
325304c50f | ||
|
|
bdbfc360ff | ||
|
|
c17fa308fb | ||
|
|
81f7472e3a | ||
|
|
69543b8615 | ||
|
|
1b7cb4e656 | ||
|
|
7bdb22a207 | ||
|
|
cb41a9df28 | ||
|
|
33f96f5eca | ||
|
|
178ac237ad |
158 changed files with 9911 additions and 7255 deletions
|
|
@ -1,49 +0,0 @@
|
||||||
# Run tests using the packaged dependencies on ArchLinux.
|
|
||||||
|
|
||||||
image: archlinux
|
|
||||||
packages:
|
|
||||||
- docker
|
|
||||||
- docker-compose
|
|
||||||
# Build dependencies:
|
|
||||||
- python-wheel
|
|
||||||
- python-build
|
|
||||||
- python-installer
|
|
||||||
- python-setuptools-scm
|
|
||||||
# Runtime dependencies:
|
|
||||||
- python-click
|
|
||||||
- python-click-log
|
|
||||||
- python-click-threading
|
|
||||||
- python-requests
|
|
||||||
- python-aiohttp-oauthlib
|
|
||||||
- python-tenacity
|
|
||||||
# Test dependencies:
|
|
||||||
- python-hypothesis
|
|
||||||
- python-pytest-cov
|
|
||||||
- python-pytest-httpserver
|
|
||||||
- python-trustme
|
|
||||||
- python-pytest-asyncio
|
|
||||||
- python-aiohttp
|
|
||||||
- python-aiostream
|
|
||||||
- python-aioresponses
|
|
||||||
sources:
|
|
||||||
- https://github.com/pimutils/vdirsyncer
|
|
||||||
environment:
|
|
||||||
BUILD: test
|
|
||||||
CI: true
|
|
||||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
|
||||||
DAV_SERVER: radicale xandikos
|
|
||||||
REQUIREMENTS: release
|
|
||||||
# TODO: ETESYNC_TESTS
|
|
||||||
tasks:
|
|
||||||
- check-python:
|
|
||||||
python --version | grep 'Python 3.13'
|
|
||||||
- docker: |
|
|
||||||
sudo systemctl start docker
|
|
||||||
- setup: |
|
|
||||||
cd vdirsyncer
|
|
||||||
python -m build --wheel --skip-dependency-check --no-isolation
|
|
||||||
sudo python -m installer dist/*.whl
|
|
||||||
- test: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make -e ci-test
|
|
||||||
make -e ci-test-storage
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
# Run tests using oldest available dependency versions.
|
|
||||||
#
|
|
||||||
# TODO: It might make more sense to test with an older Ubuntu or Fedora version
|
|
||||||
# here, and consider that our "oldest suppported environment".
|
|
||||||
|
|
||||||
image: alpine/3.19 # python 3.11
|
|
||||||
packages:
|
|
||||||
- docker
|
|
||||||
- docker-cli
|
|
||||||
- docker-compose
|
|
||||||
- py3-pip
|
|
||||||
- python3-dev
|
|
||||||
sources:
|
|
||||||
- https://github.com/pimutils/vdirsyncer
|
|
||||||
environment:
|
|
||||||
BUILD: test
|
|
||||||
CI: true
|
|
||||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
|
||||||
DAV_SERVER: radicale xandikos
|
|
||||||
REQUIREMENTS: minimal
|
|
||||||
tasks:
|
|
||||||
- venv: |
|
|
||||||
python3 -m venv $HOME/venv
|
|
||||||
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
|
||||||
- docker: |
|
|
||||||
sudo addgroup $(whoami) docker
|
|
||||||
sudo service docker start
|
|
||||||
- setup: |
|
|
||||||
cd vdirsyncer
|
|
||||||
# Hack, no idea why it's needed
|
|
||||||
sudo ln -s /usr/include/python3.11/cpython/longintrepr.h /usr/include/python3.11/longintrepr.h
|
|
||||||
make -e install-dev
|
|
||||||
- test: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make -e ci-test
|
|
||||||
make -e ci-test-storage
|
|
||||||
|
|
@ -1,45 +0,0 @@
|
||||||
# Run tests using latest dependencies from PyPI
|
|
||||||
|
|
||||||
image: archlinux
|
|
||||||
packages:
|
|
||||||
- docker
|
|
||||||
- docker-compose
|
|
||||||
- python-pip
|
|
||||||
sources:
|
|
||||||
- https://github.com/pimutils/vdirsyncer
|
|
||||||
secrets:
|
|
||||||
- 4d9a6dfe-5c8d-48bd-b864-a2f5d772c536
|
|
||||||
environment:
|
|
||||||
BUILD: test
|
|
||||||
CI: true
|
|
||||||
CODECOV_TOKEN: b834a3c5-28fa-4808-9bdb-182210069c79
|
|
||||||
DAV_SERVER: baikal radicale xandikos
|
|
||||||
REQUIREMENTS: release
|
|
||||||
# TODO: ETESYNC_TESTS
|
|
||||||
tasks:
|
|
||||||
- venv: |
|
|
||||||
python -m venv $HOME/venv
|
|
||||||
echo "export PATH=$HOME/venv/bin:$PATH" >> $HOME/.buildenv
|
|
||||||
- docker: |
|
|
||||||
sudo systemctl start docker
|
|
||||||
- setup: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make -e install-dev
|
|
||||||
- test: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make -e ci-test
|
|
||||||
make -e ci-test-storage
|
|
||||||
- check: |
|
|
||||||
cd vdirsyncer
|
|
||||||
make check
|
|
||||||
- check-secrets: |
|
|
||||||
# Stop here if this is a PR. PRs can't run with the below secrets.
|
|
||||||
[ -f ~/fastmail-secrets ] || complete-build
|
|
||||||
- extra-storages: |
|
|
||||||
set +x
|
|
||||||
source ~/fastmail-secrets
|
|
||||||
set -x
|
|
||||||
|
|
||||||
cd vdirsyncer
|
|
||||||
export PATH=$PATH:~/.local/bin/
|
|
||||||
DAV_SERVER=fastmail pytest tests/storage
|
|
||||||
243
.circleci/config.yml
Normal file
243
.circleci/config.yml
Normal file
|
|
@ -0,0 +1,243 @@
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
references:
|
||||||
|
basic_env: &basic_env
|
||||||
|
CI: true
|
||||||
|
restore_caches: &restore_caches
|
||||||
|
restore_cache:
|
||||||
|
keys:
|
||||||
|
- cache3-{{ arch }}-{{ .Branch }}
|
||||||
|
|
||||||
|
save_caches: &save_caches
|
||||||
|
save_cache:
|
||||||
|
key: cache3-{{ arch }}-{{ .Branch }}
|
||||||
|
paths:
|
||||||
|
- "rust/target/"
|
||||||
|
- "~/.cargo/"
|
||||||
|
- "~/.cache/pip/"
|
||||||
|
- "~/.rustup/"
|
||||||
|
|
||||||
|
basic_setup: &basic_setup
|
||||||
|
run: . scripts/circleci-install.sh
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
nextcloud:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
NEXTCLOUD_HOST: localhost:80
|
||||||
|
DAV_SERVER: nextcloud
|
||||||
|
- image: nextcloud
|
||||||
|
environment:
|
||||||
|
SQLITE_DATABASE: nextcloud
|
||||||
|
NEXTCLOUD_ADMIN_USER: asdf
|
||||||
|
NEXTCLOUD_ADMIN_PASSWORD: asdf
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: wget -O - --retry-connrefused http://localhost:80/
|
||||||
|
- run: make -e storage-test
|
||||||
|
|
||||||
|
fastmail:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
DAV_SERVER: fastmail
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e storage-test
|
||||||
|
|
||||||
|
icloud:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
DAV_SERVER: icloud
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e storage-test
|
||||||
|
|
||||||
|
davical:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
DAV_SERVER: davical
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e storage-test
|
||||||
|
|
||||||
|
xandikos:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
DAV_SERVER: xandikos
|
||||||
|
- image: vdirsyncer/xandikos:0.0.1
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: wget -O - --retry-connrefused http://localhost:5001/
|
||||||
|
- run: make -e storage-test
|
||||||
|
|
||||||
|
style:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-style
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e style
|
||||||
|
|
||||||
|
py34-minimal:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.4
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
REQUIREMENTS: minimal
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e test
|
||||||
|
|
||||||
|
py34-release:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.4
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
REQUIREMENTS: release
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e test
|
||||||
|
|
||||||
|
py34-devel:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.4
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
REQUIREMENTS: devel
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e test
|
||||||
|
|
||||||
|
py36-minimal:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
REQUIREMENTS: minimal
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e test
|
||||||
|
|
||||||
|
py36-release:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
REQUIREMENTS: release
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e test
|
||||||
|
|
||||||
|
py36-devel:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
REQUIREMENTS: devel
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e test
|
||||||
|
|
||||||
|
rust:
|
||||||
|
docker:
|
||||||
|
- image: circleci/python:3.6
|
||||||
|
environment:
|
||||||
|
<<: *basic_env
|
||||||
|
REQUIREMENTS: release
|
||||||
|
steps:
|
||||||
|
- checkout
|
||||||
|
- *restore_caches
|
||||||
|
- *basic_setup
|
||||||
|
- run: make -e install-dev install-test
|
||||||
|
- *save_caches
|
||||||
|
|
||||||
|
- run: make -e rust-test
|
||||||
|
|
||||||
|
workflows:
|
||||||
|
version: 2
|
||||||
|
test_all:
|
||||||
|
jobs:
|
||||||
|
- nextcloud
|
||||||
|
- fastmail
|
||||||
|
- icloud
|
||||||
|
- davical
|
||||||
|
- xandikos
|
||||||
|
- style
|
||||||
|
- py34-minimal
|
||||||
|
- py34-release
|
||||||
|
- py34-devel
|
||||||
|
- py36-minimal
|
||||||
|
- py36-release
|
||||||
|
- py36-devel
|
||||||
|
- rust
|
||||||
|
|
@ -2,3 +2,10 @@ comment: false
|
||||||
coverage:
|
coverage:
|
||||||
status:
|
status:
|
||||||
patch: false
|
patch: false
|
||||||
|
project:
|
||||||
|
unit:
|
||||||
|
flags: unit
|
||||||
|
system:
|
||||||
|
flags: system
|
||||||
|
storage:
|
||||||
|
flags: storage
|
||||||
|
|
|
||||||
1
.envrc
1
.envrc
|
|
@ -1 +0,0 @@
|
||||||
layout python3
|
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -13,5 +13,6 @@ env
|
||||||
dist
|
dist
|
||||||
docs/_build/
|
docs/_build/
|
||||||
vdirsyncer/version.py
|
vdirsyncer/version.py
|
||||||
|
vdirsyncer/_native*
|
||||||
.hypothesis
|
.hypothesis
|
||||||
coverage.xml
|
codecov.sh
|
||||||
|
|
|
||||||
0
.gitmodules
vendored
Normal file
0
.gitmodules
vendored
Normal file
|
|
@ -1,39 +0,0 @@
|
||||||
repos:
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
|
||||||
rev: v5.0.0
|
|
||||||
hooks:
|
|
||||||
- id: trailing-whitespace
|
|
||||||
args: [--markdown-linebreak-ext=md]
|
|
||||||
- id: end-of-file-fixer
|
|
||||||
- id: check-toml
|
|
||||||
- id: check-added-large-files
|
|
||||||
- id: debug-statements
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
|
||||||
rev: "v1.15.0"
|
|
||||||
hooks:
|
|
||||||
- id: mypy
|
|
||||||
files: vdirsyncer/.*
|
|
||||||
additional_dependencies:
|
|
||||||
- types-setuptools
|
|
||||||
- types-docutils
|
|
||||||
- types-requests
|
|
||||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
|
||||||
rev: 'v0.11.4'
|
|
||||||
hooks:
|
|
||||||
- id: ruff
|
|
||||||
args: [--fix, --exit-non-zero-on-fix]
|
|
||||||
- id: ruff-format
|
|
||||||
- repo: local
|
|
||||||
hooks:
|
|
||||||
- id: typos-syncroniz
|
|
||||||
name: typos-syncroniz
|
|
||||||
language: system
|
|
||||||
# Not how you spell "synchronise"
|
|
||||||
entry: sh -c "git grep -i syncroniz"
|
|
||||||
files: ".*/.*"
|
|
||||||
- id: typos-text-icalendar
|
|
||||||
name: typos-text-icalendar
|
|
||||||
language: system
|
|
||||||
# It's "text/calendar", no "i".
|
|
||||||
entry: sh -c "git grep -i 'text/icalendar'"
|
|
||||||
files: ".*/.*"
|
|
||||||
|
|
@ -1,16 +0,0 @@
|
||||||
version: 2
|
|
||||||
|
|
||||||
sphinx:
|
|
||||||
configuration: docs/conf.py
|
|
||||||
|
|
||||||
build:
|
|
||||||
os: "ubuntu-22.04"
|
|
||||||
tools:
|
|
||||||
python: "3.9"
|
|
||||||
|
|
||||||
python:
|
|
||||||
install:
|
|
||||||
- method: pip
|
|
||||||
path: .
|
|
||||||
extra_requirements:
|
|
||||||
- docs
|
|
||||||
|
|
@ -4,22 +4,15 @@ Contributors
|
||||||
In alphabetical order:
|
In alphabetical order:
|
||||||
|
|
||||||
- Ben Boeckel
|
- Ben Boeckel
|
||||||
- Bleala
|
|
||||||
- Christian Geier
|
- Christian Geier
|
||||||
- Clément Mondon
|
- Clément Mondon
|
||||||
- Corey Hinshaw
|
|
||||||
- Kai Herlemann
|
|
||||||
- Hugo Osvaldo Barrera
|
- Hugo Osvaldo Barrera
|
||||||
- Jason Cox
|
|
||||||
- Julian Mehne
|
- Julian Mehne
|
||||||
- Malte Kiefer
|
- Malte Kiefer
|
||||||
- Marek Marczykowski-Górecki
|
- Marek Marczykowski-Górecki
|
||||||
- Markus Unterwaditzer
|
- Markus Unterwaditzer
|
||||||
- Michael Adler
|
- Michael Adler
|
||||||
- rEnr3n
|
|
||||||
- Thomas Weißschuh
|
- Thomas Weißschuh
|
||||||
- Witcher01
|
|
||||||
- samm81
|
|
||||||
|
|
||||||
Special thanks goes to:
|
Special thanks goes to:
|
||||||
|
|
||||||
|
|
|
||||||
140
CHANGELOG.rst
140
CHANGELOG.rst
|
|
@ -9,142 +9,12 @@ Package maintainers and users who have to manually update their installation
|
||||||
may want to subscribe to `GitHub's tag feed
|
may want to subscribe to `GitHub's tag feed
|
||||||
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
|
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
|
||||||
|
|
||||||
Version 0.21.0
|
Version 0.17.0
|
||||||
==============
|
==============
|
||||||
|
|
||||||
- Implement retrying for ``google`` storage type when a rate limit is reached.
|
- Fix bug where collection discovery under DAV-storages would produce invalid
|
||||||
- ``tenacity`` is now a required dependency.
|
XML. See :gh:`688`.
|
||||||
- Drop support for Python 3.8.
|
- ownCloud and Baikal are no longer tested.
|
||||||
- Retry transient network errors for nullipotent requests.
|
|
||||||
|
|
||||||
Version 0.20.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Remove dependency on abandoned ``atomicwrites`` library.
|
|
||||||
- Implement ``filter_hook`` for the HTTP storage.
|
|
||||||
- Drop support for Python 3.7.
|
|
||||||
- Add support for Python 3.12 and Python 3.13.
|
|
||||||
- Properly close the status database after using. This especially affects tests,
|
|
||||||
where we were leaking a large amount of file descriptors.
|
|
||||||
- Extend supported versions of ``aiostream`` to include 0.7.x.
|
|
||||||
|
|
||||||
Version 0.19.3
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Added a no_delete option to the storage configuration. :gh:`1090`
|
|
||||||
- Fix crash when running ``vdirsyncer repair`` on a collection. :gh:`1019`
|
|
||||||
- Add an option to request vCard v4.0. :gh:`1066`
|
|
||||||
- Require matching ``BEGIN`` and ``END`` lines in vobjects. :gh:`1103`
|
|
||||||
- A Docker environment for Vdirsyncer has been added `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_.
|
|
||||||
- Implement digest auth. :gh:`1137`
|
|
||||||
- Add ``filter_hook`` parameter to :storage:`http`. :gh:`1136`
|
|
||||||
|
|
||||||
Version 0.19.2
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Improve the performance of ``SingleFileStorage``. :gh:`818`
|
|
||||||
- Properly document some caveats of the Google Contacts storage.
|
|
||||||
- Fix crash when using auth certs. :gh:`1033`
|
|
||||||
- The ``filesystem`` storage can be specified with ``type =
|
|
||||||
"filesystem/icalendar"`` or ``type = "filesystem/vcard"``. This has not
|
|
||||||
functional impact, and is merely for forward compatibility with the Rust
|
|
||||||
implementation of vdirsyncer.
|
|
||||||
- Python 3.10 and 3.11 are officially supported.
|
|
||||||
- Instructions for integrating with Google CalDav/CardDav have changed.
|
|
||||||
Applications now need to be registered as "Desktop applications". Using "Web
|
|
||||||
application" no longer works due to changes on Google's side. :gh:`1078`
|
|
||||||
|
|
||||||
Version 0.19.1
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Fixed crash when operating on Google Contacts. :gh:`994`
|
|
||||||
- The ``HTTP_PROXY`` and ``HTTPS_PROXY`` are now respected. :gh:`1031`
|
|
||||||
- Instructions for integrating with Google CalDav/CardDav have changed.
|
|
||||||
Applications now need to be registered as "Web Application". :gh:`975`
|
|
||||||
- Various documentation updates.
|
|
||||||
|
|
||||||
Version 0.19.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
- Add "shell" password fetch strategy to pass command string to a shell.
|
|
||||||
- Add "description" and "order" as metadata. These fetch the CalDAV:
|
|
||||||
calendar-description, ``CardDAV:addressbook-description`` and
|
|
||||||
``apple-ns:calendar-order`` properties respectively.
|
|
||||||
- Add a new ``showconfig`` status. This prints *some* configuration values as
|
|
||||||
JSON. This is intended to be used by external tools and helpers that interact
|
|
||||||
with ``vdirsyncer``, and considered experimental.
|
|
||||||
- Add ``implicit`` option to the :ref:`pair section <pair_config>`. When set to
|
|
||||||
"create", it implicitly creates missing collections during sync without user
|
|
||||||
prompts. This simplifies workflows where collections should be automatically
|
|
||||||
created on both sides.
|
|
||||||
- Update TLS-related tests that were failing due to weak MDs. :gh:`903`
|
|
||||||
- ``pytest-httpserver`` and ``trustme`` are now required for tests.
|
|
||||||
- ``pytest-localserver`` is no longer required for tests.
|
|
||||||
- Multithreaded support has been dropped. The ``"--max-workers`` has been removed.
|
|
||||||
- A new ``asyncio`` backend is now used. So far, this shows substantial speed
|
|
||||||
improvements in ``discovery`` and ``metasync``, but little change in `sync`.
|
|
||||||
This will likely continue improving over time. :gh:`906`
|
|
||||||
- The ``google`` storage types no longer require ``requests-oauthlib``, but
|
|
||||||
require ``python-aiohttp-oauthlib`` instead.
|
|
||||||
- Vdirsyncer no longer includes experimental support for `EteSync
|
|
||||||
<https://www.etesync.com/>`_. The existing integration had not been supported
|
|
||||||
for a long time and no longer worked. Support for external storages may be
|
|
||||||
added if anyone is interested in maintaining an EteSync plugin. EteSync
|
|
||||||
users should consider using `etesync-dav`_.
|
|
||||||
- The ``plist`` for macOS has been dropped. It was broken and homebrew
|
|
||||||
generates their own based on package metadata. macOS users are encouraged to
|
|
||||||
use that as a reference.
|
|
||||||
|
|
||||||
.. _etesync-dav: https://github.com/etesync/etesync-dav
|
|
||||||
|
|
||||||
Changes to SSL configuration
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Support for ``md5`` and ``sha1`` certificate fingerprints has been dropped. If
|
|
||||||
you're validating certificate fingerprints, use ``sha256`` instead.
|
|
||||||
|
|
||||||
When using a custom ``verify_fingerprint``, CA validation is always disabled.
|
|
||||||
|
|
||||||
If ``verify_fingerprint`` is unset, CA verification is always active. Disabling
|
|
||||||
both features is insecure and no longer supported.
|
|
||||||
|
|
||||||
The ``verify`` parameter no longer takes boolean values, it is now optional and
|
|
||||||
only takes a string to a custom CA for verification.
|
|
||||||
|
|
||||||
The ``verify`` and ``verify_fingerprint`` will likely be merged into a single
|
|
||||||
parameter in future.
|
|
||||||
|
|
||||||
Version 0.18.0
|
|
||||||
==============
|
|
||||||
|
|
||||||
Note: Version 0.17 has some alpha releases but ultimately was never finalised.
|
|
||||||
0.18 actually continues where 0.16 left off.
|
|
||||||
|
|
||||||
- Support for Python 3.5 and 3.6 has been dropped. This release mostly focuses
|
|
||||||
on keeping vdirsyncer compatible with newer environments.
|
|
||||||
- click 8 and click-threading 0.5.0 are now required.
|
|
||||||
- For those using ``pipsi``, we now recommend using ``pipx``, it's successor.
|
|
||||||
- Python 3.9 is now supported.
|
|
||||||
- Our Debian/Ubuntu build scripts have been updated. New versions should be
|
|
||||||
pushed to those repositories soon.
|
|
||||||
|
|
||||||
Version 0.16.8
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released 09 June 2020*
|
|
||||||
|
|
||||||
- Support Python 3.7 and 3.8.
|
|
||||||
|
|
||||||
This release is functionally identical to 0.16.7.
|
|
||||||
It's been tested with recent Python versions, and has been marked as supporting
|
|
||||||
them. It will also be the final release supporting Python 3.5 and 3.6.
|
|
||||||
|
|
||||||
Version 0.16.7
|
|
||||||
==============
|
|
||||||
|
|
||||||
*released on 19 July 2018*
|
|
||||||
|
|
||||||
- Fixes for Python 3.7
|
|
||||||
|
|
||||||
Version 0.16.6
|
Version 0.16.6
|
||||||
==============
|
==============
|
||||||
|
|
@ -250,7 +120,7 @@ Version 0.14.0
|
||||||
exit code in such situations is still non-zero.
|
exit code in such situations is still non-zero.
|
||||||
- Add ``partial_sync`` option to pair section. See :ref:`the config docs
|
- Add ``partial_sync`` option to pair section. See :ref:`the config docs
|
||||||
<partial_sync_def>`.
|
<partial_sync_def>`.
|
||||||
- Vdirsyncer will now warn if there's a string without quotes in your config.
|
- Vdirsyner will now warn if there's a string without quotes in your config.
|
||||||
Please file issues if you find documentation that uses unquoted strings.
|
Please file issues if you find documentation that uses unquoted strings.
|
||||||
- Fix an issue that would break khal's config setup wizard.
|
- Fix an issue that would break khal's config setup wizard.
|
||||||
|
|
||||||
|
|
|
||||||
9
LICENSE
9
LICENSE
|
|
@ -1,4 +1,4 @@
|
||||||
Copyright (c) 2014-2020 by Markus Unterwaditzer & contributors. See
|
Copyright (c) 2014-2018 by Markus Unterwaditzer & contributors. See
|
||||||
AUTHORS.rst for more details.
|
AUTHORS.rst for more details.
|
||||||
|
|
||||||
Some rights reserved.
|
Some rights reserved.
|
||||||
|
|
@ -31,3 +31,10 @@ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||||
DAMAGE.
|
DAMAGE.
|
||||||
|
|
||||||
|
== etesync ==
|
||||||
|
|
||||||
|
I, Tom Hacohen, hereby grant a license for EteSync's journal-manager
|
||||||
|
(https://github.com/etesync/journal-manager) to be used as a dependency in
|
||||||
|
vdirsyncer's test suite for the purpose of testing vdirsyncer without having
|
||||||
|
the copyleft section of the AGPL apply to it (vdirsyncer).
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
# setuptools-scm includes everything tracked by git
|
# setuptools-scm includes everything tracked by git
|
||||||
prune docker
|
prune contrib
|
||||||
prune scripts
|
prune scripts
|
||||||
prune tests/storage/servers
|
prune tests/storage/servers
|
||||||
|
prune tests/storage/etesync
|
||||||
recursive-include tests/storage/servers/radicale *
|
recursive-include tests/storage/servers/radicale *
|
||||||
recursive-include tests/storage/servers/skip *
|
recursive-include tests/storage/servers/skip *
|
||||||
|
|
||||||
|
|
|
||||||
142
Makefile
142
Makefile
|
|
@ -1,7 +1,7 @@
|
||||||
# See the documentation on how to run the tests:
|
# See the documentation on how to run the tests:
|
||||||
# https://vdirsyncer.pimutils.org/en/stable/contributing.html
|
# https://vdirsyncer.pimutils.org/en/stable/contributing.html
|
||||||
|
|
||||||
# Which DAV server to run the tests against (radicale, xandikos, skip, owncloud, nextcloud, ...)
|
# Which DAV server to run the tests against (radicale, xandikos, skip, nextcloud, ...)
|
||||||
export DAV_SERVER := skip
|
export DAV_SERVER := skip
|
||||||
|
|
||||||
# release (install release versions of dependencies)
|
# release (install release versions of dependencies)
|
||||||
|
|
@ -12,52 +12,152 @@ export REQUIREMENTS := release
|
||||||
# Set this to true if you run vdirsyncer's test as part of e.g. packaging.
|
# Set this to true if you run vdirsyncer's test as part of e.g. packaging.
|
||||||
export DETERMINISTIC_TESTS := false
|
export DETERMINISTIC_TESTS := false
|
||||||
|
|
||||||
# Assume to run in CI. Don't use this outside of a virtual machine. It will
|
# Run the etesync testsuite.
|
||||||
|
export ETESYNC_TESTS := false
|
||||||
|
|
||||||
|
# Assume to run in Travis. Don't use this outside of a virtual machine. It will
|
||||||
# heavily "pollute" your system, such as attempting to install a new Python
|
# heavily "pollute" your system, such as attempting to install a new Python
|
||||||
# systemwide.
|
# systemwide.
|
||||||
export CI := false
|
export CI := false
|
||||||
|
|
||||||
|
# Enable debug symbols and backtrace printing for rust lib
|
||||||
|
export RUST_BACKTRACE := $(CI)
|
||||||
|
|
||||||
# Whether to generate coverage data while running tests.
|
# Whether to generate coverage data while running tests.
|
||||||
export COVERAGE := $(CI)
|
export COVERAGE := $(CI)
|
||||||
|
|
||||||
|
# Log everything
|
||||||
|
export RUST_LOG := vdirsyncer_rustext=debug
|
||||||
|
|
||||||
|
# Additional arguments that should be passed to py.test.
|
||||||
|
PYTEST_ARGS =
|
||||||
|
|
||||||
# Variables below this line are not very interesting for getting started.
|
# Variables below this line are not very interesting for getting started.
|
||||||
|
|
||||||
|
TEST_EXTRA_PACKAGES =
|
||||||
|
|
||||||
|
ifeq ($(COVERAGE), true)
|
||||||
|
TEST_EXTRA_PACKAGES += pytest-cov
|
||||||
|
PYTEST_ARGS += --cov-config .coveragerc --cov vdirsyncer
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq ($(ETESYNC_TESTS), true)
|
||||||
|
TEST_EXTRA_PACKAGES += django-etesync-journal django djangorestframework wsgi_intercept drf-nested-routers
|
||||||
|
endif
|
||||||
|
|
||||||
|
PYTEST = py.test $(PYTEST_ARGS)
|
||||||
|
|
||||||
|
export TESTSERVER_BASE := ./tests/storage/servers/
|
||||||
CODECOV_PATH = /tmp/codecov.sh
|
CODECOV_PATH = /tmp/codecov.sh
|
||||||
|
|
||||||
all:
|
all:
|
||||||
$(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation)
|
$(error Take a look at https://vdirsyncer.pimutils.org/en/stable/tutorial.html#installation)
|
||||||
|
|
||||||
ci-test:
|
ifeq ($(CI), true)
|
||||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
codecov.sh:
|
||||||
pytest --cov vdirsyncer --cov-append tests/unit/ tests/system/
|
curl -s https://codecov.io/bash > $@
|
||||||
bash $(CODECOV_PATH) -c
|
else
|
||||||
|
codecov.sh:
|
||||||
|
echo > $@
|
||||||
|
endif
|
||||||
|
|
||||||
ci-test-storage:
|
rust-test:
|
||||||
curl -s https://codecov.io/bash > $(CODECOV_PATH)
|
cd rust/ && cargo test --release
|
||||||
|
|
||||||
|
test: unit-test system-test storage-test
|
||||||
|
|
||||||
|
unit-test: codecov.sh
|
||||||
|
$(PYTEST) tests/unit/
|
||||||
|
bash codecov.sh -c -F unit
|
||||||
|
|
||||||
|
system-test: codecov.sh
|
||||||
|
$(PYTEST) tests/system/
|
||||||
|
bash codecov.sh -c -F system
|
||||||
|
|
||||||
|
storage-test: codecov.sh
|
||||||
|
$(PYTEST) tests/storage/
|
||||||
|
bash codecov.sh -c -F storage
|
||||||
|
|
||||||
|
install-servers:
|
||||||
set -ex; \
|
set -ex; \
|
||||||
for server in $(DAV_SERVER); do \
|
for server in $(DAV_SERVER); do \
|
||||||
DAV_SERVER=$$server pytest --cov vdirsyncer --cov-append tests/storage; \
|
if [ ! "$$(ls $(TESTSERVER_BASE)$$server/)" ]; then \
|
||||||
|
git submodule update --init -- "$(TESTSERVER_BASE)$$server"; \
|
||||||
|
fi; \
|
||||||
|
(cd $(TESTSERVER_BASE)$$server && sh install.sh); \
|
||||||
done
|
done
|
||||||
bash $(CODECOV_PATH) -c
|
|
||||||
|
|
||||||
check:
|
install-test: install-servers
|
||||||
ruff check
|
pip install -Ur test-requirements.txt
|
||||||
ruff format --diff
|
set -xe && if [ "$$REQUIREMENTS" = "devel" ]; then \
|
||||||
#mypy vdirsyncer
|
pip install -U --force-reinstall \
|
||||||
|
'git+https://github.com/HypothesisWorks/hypothesis#egg=hypothesis&subdirectory=hypothesis-python' \
|
||||||
|
git+https://github.com/kennethreitz/requests \
|
||||||
|
git+https://github.com/pytest-dev/pytest; \
|
||||||
|
fi
|
||||||
|
[ -z "$(TEST_EXTRA_PACKAGES)" ] || pip install $(TEST_EXTRA_PACKAGES)
|
||||||
|
|
||||||
|
install-style: install-docs
|
||||||
|
pip install -U flake8 flake8-import-order 'flake8-bugbear>=17.3.0'
|
||||||
|
rustup component add rustfmt-preview
|
||||||
|
cargo install --force --git https://github.com/rust-lang-nursery/rust-clippy clippy
|
||||||
|
|
||||||
|
style:
|
||||||
|
flake8
|
||||||
|
! git grep -i syncroniz */*
|
||||||
|
! git grep -i 'text/icalendar' */*
|
||||||
|
sphinx-build -W -b html ./docs/ ./docs/_build/html/
|
||||||
|
cd rust/ && cargo +nightly clippy
|
||||||
|
cd rust/ && cargo +nightly fmt --all -- --check
|
||||||
|
|
||||||
|
install-docs:
|
||||||
|
pip install -Ur docs-requirements.txt
|
||||||
|
|
||||||
|
docs:
|
||||||
|
cd docs && make html
|
||||||
|
|
||||||
|
linkcheck:
|
||||||
|
sphinx-build -W -b linkcheck ./docs/ ./docs/_build/linkcheck/
|
||||||
|
|
||||||
|
release:
|
||||||
|
python setup.py sdist upload
|
||||||
|
|
||||||
release-deb:
|
release-deb:
|
||||||
sh scripts/release-deb.sh debian jessie
|
sh scripts/release-deb.sh debian jessie
|
||||||
sh scripts/release-deb.sh debian stretch
|
sh scripts/release-deb.sh debian stretch
|
||||||
sh scripts/release-deb.sh ubuntu trusty
|
sh scripts/release-deb.sh ubuntu trusty
|
||||||
sh scripts/release-deb.sh ubuntu xenial
|
sh scripts/release-deb.sh ubuntu xenial
|
||||||
sh scripts/release-deb.sh ubuntu zesty
|
|
||||||
|
|
||||||
install-dev:
|
install-dev:
|
||||||
pip install -U pip setuptools wheel
|
pip install -ve .
|
||||||
pip install -e '.[test,check,docs]'
|
[ "$(ETESYNC_TESTS)" = "false" ] || pip install -Ue .[etesync]
|
||||||
set -xe && if [ "$(REQUIREMENTS)" = "minimal" ]; then \
|
set -xe && if [ "$(REQUIREMENTS)" = "devel" ]; then \
|
||||||
pip install pyproject-dependencies && \
|
pip install -U --force-reinstall \
|
||||||
pip install -U --force-reinstall $$(pyproject-dependencies . | sed 's/>/=/'); \
|
git+https://github.com/mitsuhiko/click \
|
||||||
|
git+https://github.com/click-contrib/click-log \
|
||||||
|
git+https://github.com/kennethreitz/requests; \
|
||||||
|
elif [ "$(REQUIREMENTS)" = "minimal" ]; then \
|
||||||
|
pip install -U --force-reinstall $$(python setup.py --quiet minimal_requirements); \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
.PHONY: docs
|
ssh-submodule-urls:
|
||||||
|
git submodule foreach "\
|
||||||
|
echo -n 'Old: '; \
|
||||||
|
git remote get-url origin; \
|
||||||
|
git remote set-url origin \$$(git remote get-url origin | sed -e 's/https:\/\/github\.com\//git@github.com:/g'); \
|
||||||
|
echo -n 'New URL: '; \
|
||||||
|
git remote get-url origin"
|
||||||
|
|
||||||
|
install-rust:
|
||||||
|
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain nightly
|
||||||
|
rustup update nightly
|
||||||
|
|
||||||
|
rust/vdirsyncer_rustext.h:
|
||||||
|
cd rust/ && cargo build # hack to work around cbindgen bugs
|
||||||
|
CARGO_EXPAND_TARGET_DIR=rust/target/ cbindgen -c rust/cbindgen.toml rust/ > $@
|
||||||
|
|
||||||
|
docker/xandikos:
|
||||||
|
docker build -t vdirsyncer/xandikos:0.0.1 $@
|
||||||
|
docker push vdirsyncer/xandikos:0.0.1
|
||||||
|
|
||||||
|
.PHONY: docs rust/vdirsyncer_rustext.h docker/xandikos
|
||||||
|
|
|
||||||
47
README.rst
47
README.rst
|
|
@ -2,30 +2,6 @@
|
||||||
vdirsyncer
|
vdirsyncer
|
||||||
==========
|
==========
|
||||||
|
|
||||||
.. image:: https://builds.sr.ht/~whynothugo/vdirsyncer.svg
|
|
||||||
:target: https://builds.sr.ht/~whynothugo/vdirsyncer
|
|
||||||
:alt: CI status
|
|
||||||
|
|
||||||
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=main
|
|
||||||
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=main
|
|
||||||
:alt: Codecov coverage report
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/vdirsyncer/badge/
|
|
||||||
:target: https://vdirsyncer.rtfd.org/
|
|
||||||
:alt: documentation
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/v/vdirsyncer.svg
|
|
||||||
:target: https://pypi.python.org/pypi/vdirsyncer
|
|
||||||
:alt: version on pypi
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/badge/deb-packagecloud.io-844fec.svg
|
|
||||||
:target: https://packagecloud.io/pimutils/vdirsyncer
|
|
||||||
:alt: Debian packages
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/l/vdirsyncer.svg
|
|
||||||
:target: https://github.com/pimutils/vdirsyncer/blob/main/LICENCE
|
|
||||||
:alt: licence: BSD
|
|
||||||
|
|
||||||
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
- `Documentation <https://vdirsyncer.pimutils.org/en/stable/>`_
|
||||||
- `Source code <https://github.com/pimutils/vdirsyncer>`_
|
- `Source code <https://github.com/pimutils/vdirsyncer>`_
|
||||||
|
|
||||||
|
|
@ -40,10 +16,22 @@ servers. It can also be used to synchronize calendars and/or addressbooks
|
||||||
between two servers directly.
|
between two servers directly.
|
||||||
|
|
||||||
It aims to be for calendars and contacts what `OfflineIMAP
|
It aims to be for calendars and contacts what `OfflineIMAP
|
||||||
<https://www.offlineimap.org/>`_ is for emails.
|
<http://offlineimap.org/>`_ is for emails.
|
||||||
|
|
||||||
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
|
.. _programs: https://vdirsyncer.pimutils.org/en/latest/tutorials/
|
||||||
|
|
||||||
|
.. image:: https://circleci.com/gh/pimutils/vdirsyncer.svg?style=shield
|
||||||
|
:target: https://circleci.com/gh/pimutils/vdirsyncer
|
||||||
|
|
||||||
|
.. image:: https://codecov.io/github/pimutils/vdirsyncer/coverage.svg?branch=master
|
||||||
|
:target: https://codecov.io/github/pimutils/vdirsyncer?branch=master
|
||||||
|
|
||||||
|
.. image:: https://badge.waffle.io/pimutils/vdirsyncer.svg?label=ready&title=Ready
|
||||||
|
:target: https://waffle.io/pimutils/vdirsyncer
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/badge/deb-packagecloud.io-844fec.svg
|
||||||
|
:target: https://packagecloud.io/pimutils/vdirsyncer
|
||||||
|
|
||||||
Links of interest
|
Links of interest
|
||||||
=================
|
=================
|
||||||
|
|
||||||
|
|
@ -59,15 +47,6 @@ Links of interest
|
||||||
|
|
||||||
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
|
* `Donations <https://vdirsyncer.pimutils.org/en/stable/donations.html>`_
|
||||||
|
|
||||||
Dockerized
|
|
||||||
=================
|
|
||||||
If you want to run `Vdirsyncer <https://vdirsyncer.pimutils.org/en/stable/>`_ in a
|
|
||||||
Docker environment, you can check out the following GitHub Repository:
|
|
||||||
|
|
||||||
* `Vdirsyncer DOCKERIZED <https://github.com/Bleala/Vdirsyncer-DOCKERIZED>`_
|
|
||||||
|
|
||||||
Note: This is an unofficial Docker build, it is maintained by `Bleala <https://github.com/Bleala>`_.
|
|
||||||
|
|
||||||
License
|
License
|
||||||
=======
|
=======
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ fileext = ".vcf"
|
||||||
|
|
||||||
[storage bob_contacts_remote]
|
[storage bob_contacts_remote]
|
||||||
type = "carddav"
|
type = "carddav"
|
||||||
url = "https://owncloud.example.com/remote.php/carddav/"
|
url = "https://nextcloud.example.com/"
|
||||||
#username =
|
#username =
|
||||||
# The password can also be fetched from the system password storage, netrc or a
|
# The password can also be fetched from the system password storage, netrc or a
|
||||||
# custom command. See http://vdirsyncer.pimutils.org/en/stable/keyring.html
|
# custom command. See http://vdirsyncer.pimutils.org/en/stable/keyring.html
|
||||||
|
|
@ -65,6 +65,6 @@ fileext = ".ics"
|
||||||
|
|
||||||
[storage bob_calendar_remote]
|
[storage bob_calendar_remote]
|
||||||
type = "caldav"
|
type = "caldav"
|
||||||
url = "https://owncloud.example.com/remote.php/caldav/"
|
url = "https://nextcloud.example.com/"
|
||||||
#username =
|
#username =
|
||||||
#password =
|
#password =
|
||||||
|
|
|
||||||
|
|
@ -1,75 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
"""Ask user to resolve a vdirsyncer sync conflict interactively.
|
|
||||||
|
|
||||||
Needs a way to ask the user.
|
|
||||||
The use of https://apps.kde.org/kdialog/ for GNU/Linix is hardcoded.
|
|
||||||
|
|
||||||
Depends on python>3.5 and KDialog.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
Ensure the file executable and use it in the vdirsyncer.conf file, e.g.
|
|
||||||
|
|
||||||
conflict_resolution = ["command", "/home/bern/vdirsyncer/resolve_interactively.py"]
|
|
||||||
|
|
||||||
This file is Free Software under the following license:
|
|
||||||
SPDX-License-Identifier: BSD-3-Clause
|
|
||||||
SPDX-FileCopyrightText: 2021 Intevation GmbH <https://intevation.de>
|
|
||||||
Author: <bernhard.reiter@intevation.de>
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
KDIALOG = "/usr/bin/kdialog"
|
|
||||||
|
|
||||||
SUMMARY_PATTERN = re.compile("^(SUMMARY:.*)$", re.MULTILINE)
|
|
||||||
|
|
||||||
|
|
||||||
def get_summary(icalendar_text: str):
|
|
||||||
"""Get the first SUMMARY: line from an iCalendar text.
|
|
||||||
|
|
||||||
Do not care about the line being continued.
|
|
||||||
"""
|
|
||||||
match = re.search(SUMMARY_PATTERN, icalendar_text)
|
|
||||||
return match[1]
|
|
||||||
|
|
||||||
|
|
||||||
def main(ical1_filename, ical2_filename):
|
|
||||||
ical1 = ical1_filename.read_text()
|
|
||||||
ical2 = ical2_filename.read_text()
|
|
||||||
|
|
||||||
additional_args = ["--yes-label", "take first"] # return code == 0
|
|
||||||
additional_args += ["--no-label", "take second"] # return code == 1
|
|
||||||
additional_args += ["--cancel-label", "do not resolve"] # return code == 2
|
|
||||||
|
|
||||||
r = subprocess.run(
|
|
||||||
args=[
|
|
||||||
KDIALOG,
|
|
||||||
"--warningyesnocancel",
|
|
||||||
"There was a sync conflict, do you prefer the first entry: \n"
|
|
||||||
f"{get_summary(ical1)}...\n(full contents: {ical1_filename})\n\n"
|
|
||||||
"or the second entry:\n"
|
|
||||||
f"{get_summary(ical2)}...\n(full contents: {ical2_filename})?",
|
|
||||||
*additional_args,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
if r.returncode == 2:
|
|
||||||
# cancel was pressed
|
|
||||||
return # shall lead to items not changed, because not copied
|
|
||||||
|
|
||||||
if r.returncode == 0:
|
|
||||||
# we want to take the first item, so overwrite the second
|
|
||||||
ical2_filename.write_text(ical1)
|
|
||||||
else: # r.returncode == 1, we want the second item, so overwrite the first
|
|
||||||
ical1_filename.write_text(ical2)
|
|
||||||
|
|
||||||
|
|
||||||
if len(sys.argv) != 3:
|
|
||||||
sys.stdout.write(__doc__)
|
|
||||||
else:
|
|
||||||
main(Path(sys.argv[1]), Path(sys.argv[2]))
|
|
||||||
43
contrib/vdirsyncer.plist
Normal file
43
contrib/vdirsyncer.plist
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
|
<!-- Blueprint for cron-like launchd plist -->
|
||||||
|
<!-- Replace @@PLACEHOLDERS@@ with appropriate values for your system/settings! -->
|
||||||
|
<plist version="1.0">
|
||||||
|
<dict>
|
||||||
|
<key>EnvironmentVariables</key>
|
||||||
|
<dict>
|
||||||
|
<!-- Locale to use for vdirsyncer, e.g. en_US.UTF-8 -->
|
||||||
|
<key>LANG</key>
|
||||||
|
<string>@@LOCALE@@</string>
|
||||||
|
<key>LC_ALL</key>
|
||||||
|
<string>@@LOCALE@@</string>
|
||||||
|
</dict>
|
||||||
|
<key>Label</key>
|
||||||
|
<string>vdirsyncer</string>
|
||||||
|
<key>WorkingDirectory</key>
|
||||||
|
<!-- working directory for vdirsyncer, usually the base directory where
|
||||||
|
vdirsyncer is installed, e.g. /usr/local/ -->
|
||||||
|
<string>@@WORKINGDIRECTORY@@</string>
|
||||||
|
<key>ProgramArguments</key>
|
||||||
|
<array>
|
||||||
|
<!-- full path to vdirsyncer binary -->
|
||||||
|
<string>@@VDIRSYNCER@@</string>
|
||||||
|
<!-- only log errors -->
|
||||||
|
<string>-v</string>
|
||||||
|
<string>ERROR</string>
|
||||||
|
<string>sync</string>
|
||||||
|
</array>
|
||||||
|
<key>RunAtLoad</key>
|
||||||
|
<true/>
|
||||||
|
<key>StartInterval</key>
|
||||||
|
<!-- Sync intervall in seconds -->
|
||||||
|
<integer>@@SYNCINTERVALL@@</integer>
|
||||||
|
<!-- For logging, redirect stdout & stderr -->
|
||||||
|
<!-- <key>StandardErrorPath</key> -->
|
||||||
|
<!-- Full path to stderr logfile, e.g. /tmp/vdirsyncer_err.log -->
|
||||||
|
<!-- <string>@@STDERRFILE@@</string> -->
|
||||||
|
<!-- Full path to stdout logfile, e.g. /tmp/vdirsyncer_out.log -->
|
||||||
|
<!-- <key>StandardOutPath</key> -->
|
||||||
|
<!-- <string>@@STDOUTFILE@@</string> -->
|
||||||
|
</dict>
|
||||||
|
</plist>
|
||||||
|
|
@ -1,9 +1,7 @@
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Synchronize calendars and contacts
|
Description=Synchronize calendars and contacts
|
||||||
Documentation=https://vdirsyncer.readthedocs.org/
|
Documentation=https://vdirsyncer.readthedocs.org/
|
||||||
StartLimitBurst=2
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart=/usr/bin/vdirsyncer sync
|
ExecStart=/usr/bin/vdirsyncer sync
|
||||||
RuntimeMaxSec=3m
|
Type=oneshot
|
||||||
Restart=on-failure
|
|
||||||
|
|
|
||||||
18
docker-compose.yml
Normal file
18
docker-compose.yml
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
version: '2'
|
||||||
|
|
||||||
|
services:
|
||||||
|
nextcloud:
|
||||||
|
image: nextcloud
|
||||||
|
ports:
|
||||||
|
- '5000:80'
|
||||||
|
environment:
|
||||||
|
- SQLITE_DATABASE=nextcloud
|
||||||
|
- NEXTCLOUD_ADMIN_USER=asdf
|
||||||
|
- NEXTCLOUD_ADMIN_PASSWORD=asdf
|
||||||
|
|
||||||
|
xandikos:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: docker/xandikos/Dockerfile
|
||||||
|
ports:
|
||||||
|
- '5001:5001'
|
||||||
13
docker/xandikos/Dockerfile
Normal file
13
docker/xandikos/Dockerfile
Normal file
|
|
@ -0,0 +1,13 @@
|
||||||
|
# Original file copyright 2017 Jelmer Vernooij
|
||||||
|
|
||||||
|
FROM ubuntu:latest
|
||||||
|
RUN apt-get update && apt-get -y install xandikos locales
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
RUN locale-gen en_US.UTF-8
|
||||||
|
ENV PYTHONIOENCODING=utf-8
|
||||||
|
ENV LANG en_US.UTF-8
|
||||||
|
ENV LANGUAGE en_US:en
|
||||||
|
ENV LC_ALL en_US.UTF-8
|
||||||
|
|
||||||
|
CMD xandikos -d /tmp/dav -l 0.0.0.0 -p 5001 --autocreate
|
||||||
3
docs-requirements.txt
Normal file
3
docs-requirements.txt
Normal file
|
|
@ -0,0 +1,3 @@
|
||||||
|
sphinx != 1.4.7
|
||||||
|
sphinx_rtd_theme
|
||||||
|
setuptools_scm
|
||||||
|
|
@ -1 +1 @@
|
||||||
.. include:: ../CHANGELOG.rst
|
.. include:: ../CHANGELOG.rst
|
||||||
|
|
|
||||||
107
docs/conf.py
107
docs/conf.py
|
|
@ -1,106 +1,93 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from pkg_resources import get_distribution
|
import setuptools_scm
|
||||||
|
|
||||||
extensions = ["sphinx.ext.autodoc"]
|
extensions = ['sphinx.ext.autodoc']
|
||||||
|
|
||||||
templates_path = ["_templates"]
|
templates_path = ['_templates']
|
||||||
|
|
||||||
source_suffix = ".rst"
|
source_suffix = '.rst'
|
||||||
master_doc = "index"
|
master_doc = 'index'
|
||||||
|
|
||||||
project = "vdirsyncer"
|
project = u'vdirsyncer'
|
||||||
copyright = "2014-{}, Markus Unterwaditzer & contributors".format(
|
copyright = (u'2014-{}, Markus Unterwaditzer & contributors'
|
||||||
datetime.date.today().strftime("%Y")
|
.format(datetime.date.today().strftime('%Y')))
|
||||||
)
|
|
||||||
|
|
||||||
release = get_distribution("vdirsyncer").version
|
release = setuptools_scm.get_version(root='..', relative_to=__file__)
|
||||||
version = ".".join(release.split(".")[:2]) # The short X.Y version.
|
version = '.'.join(release.split('.')[:2]) # The short X.Y version.
|
||||||
|
|
||||||
rst_epilog = f".. |vdirsyncer_version| replace:: {release}"
|
rst_epilog = '.. |vdirsyncer_version| replace:: %s' % release
|
||||||
|
|
||||||
exclude_patterns = ["_build"]
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
pygments_style = "sphinx"
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import sphinx_rtd_theme
|
import sphinx_rtd_theme
|
||||||
|
html_theme = 'sphinx_rtd_theme'
|
||||||
html_theme = "sphinx_rtd_theme"
|
|
||||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||||
except ImportError:
|
except ImportError:
|
||||||
html_theme = "default"
|
html_theme = 'default'
|
||||||
if not on_rtd:
|
if not on_rtd:
|
||||||
print("-" * 74)
|
print('-' * 74)
|
||||||
print("Warning: sphinx-rtd-theme not installed, building with default theme.")
|
print('Warning: sphinx-rtd-theme not installed, building with default '
|
||||||
print("-" * 74)
|
'theme.')
|
||||||
|
print('-' * 74)
|
||||||
|
|
||||||
html_static_path = ["_static"]
|
html_static_path = ['_static']
|
||||||
htmlhelp_basename = "vdirsyncerdoc"
|
htmlhelp_basename = 'vdirsyncerdoc'
|
||||||
|
|
||||||
latex_elements = {}
|
latex_elements = {}
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
(
|
('index', 'vdirsyncer.tex', u'vdirsyncer Documentation',
|
||||||
"index",
|
u'Markus Unterwaditzer', 'manual'),
|
||||||
"vdirsyncer.tex",
|
|
||||||
"vdirsyncer Documentation",
|
|
||||||
"Markus Unterwaditzer",
|
|
||||||
"manual",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
man_pages = [
|
man_pages = [
|
||||||
("index", "vdirsyncer", "vdirsyncer Documentation", ["Markus Unterwaditzer"], 1)
|
('index', 'vdirsyncer', u'vdirsyncer Documentation',
|
||||||
|
[u'Markus Unterwaditzer'], 1)
|
||||||
]
|
]
|
||||||
|
|
||||||
texinfo_documents = [
|
texinfo_documents = [
|
||||||
(
|
('index', 'vdirsyncer', u'vdirsyncer Documentation',
|
||||||
"index",
|
u'Markus Unterwaditzer', 'vdirsyncer',
|
||||||
"vdirsyncer",
|
'Synchronize calendars and contacts.', 'Miscellaneous'),
|
||||||
"vdirsyncer Documentation",
|
|
||||||
"Markus Unterwaditzer",
|
|
||||||
"vdirsyncer",
|
|
||||||
"Synchronize calendars and contacts.",
|
|
||||||
"Miscellaneous",
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def github_issue_role(name, rawtext, text, lineno, inliner, options=None, content=()):
|
def github_issue_role(name, rawtext, text, lineno, inliner,
|
||||||
options = options or {}
|
options={}, content=()): # noqa: B006
|
||||||
try:
|
try:
|
||||||
issue_num = int(text)
|
issue_num = int(text)
|
||||||
if issue_num <= 0:
|
if issue_num <= 0:
|
||||||
raise ValueError
|
raise ValueError()
|
||||||
except ValueError:
|
except ValueError:
|
||||||
msg = inliner.reporter.error(f"Invalid GitHub issue: {text}", line=lineno)
|
msg = inliner.reporter.error('Invalid GitHub issue: {}'.format(text),
|
||||||
|
line=lineno)
|
||||||
prb = inliner.problematic(rawtext, rawtext, msg)
|
prb = inliner.problematic(rawtext, rawtext, msg)
|
||||||
return [prb], [msg]
|
return [prb], [msg]
|
||||||
|
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
|
|
||||||
PROJECT_HOME = "https://github.com/pimutils/vdirsyncer"
|
PROJECT_HOME = 'https://github.com/pimutils/vdirsyncer'
|
||||||
link = "{}/{}/{}".format(
|
link = '{}/{}/{}'.format(PROJECT_HOME,
|
||||||
PROJECT_HOME, "issues" if name == "gh" else "pull", issue_num
|
'issues' if name == 'gh' else 'pull',
|
||||||
)
|
issue_num)
|
||||||
linktext = ("issue #{}" if name == "gh" else "pull request #{}").format(issue_num)
|
linktext = ('issue #{}' if name == 'gh'
|
||||||
node = nodes.reference(rawtext, linktext, refuri=link, **options)
|
else 'pull request #{}').format(issue_num)
|
||||||
|
node = nodes.reference(rawtext, linktext, refuri=link,
|
||||||
|
**options)
|
||||||
return [node], []
|
return [node], []
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
def setup(app):
|
||||||
from sphinx.domains.python import PyObject
|
from sphinx.domains.python import PyObject
|
||||||
|
app.add_object_type('storage', 'storage', 'pair: %s; storage',
|
||||||
app.add_object_type(
|
doc_field_types=PyObject.doc_field_types)
|
||||||
"storage",
|
app.add_role('gh', github_issue_role)
|
||||||
"storage",
|
app.add_role('ghpr', github_issue_role)
|
||||||
"pair: %s; storage",
|
|
||||||
doc_field_types=PyObject.doc_field_types,
|
|
||||||
)
|
|
||||||
app.add_role("gh", github_issue_role)
|
|
||||||
app.add_role("ghpr", github_issue_role)
|
|
||||||
|
|
|
||||||
174
docs/config.rst
174
docs/config.rst
|
|
@ -61,8 +61,7 @@ Pair Section
|
||||||
sync`` is executed. See also :ref:`collections_tutorial`.
|
sync`` is executed. See also :ref:`collections_tutorial`.
|
||||||
|
|
||||||
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
|
The special values ``"from a"`` and ``"from b"``, tell vdirsyncer to try
|
||||||
autodiscovery on a specific storage. It means all the collections on side A /
|
autodiscovery on a specific storage.
|
||||||
side B.
|
|
||||||
|
|
||||||
If the collection you want to sync doesn't have the same name on each side,
|
If the collection you want to sync doesn't have the same name on each side,
|
||||||
you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
|
you may also use a value of the form ``["config_name", "name_a", "name_b"]``.
|
||||||
|
|
@ -72,8 +71,8 @@ Pair Section
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize all
|
- ``collections = ["from b", "foo", "bar"]`` makes vdirsyncer synchronize the
|
||||||
the collections from side B, and also the collections named "foo" and "bar".
|
collections from side B, and also the collections named "foo" and "bar".
|
||||||
|
|
||||||
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
|
- ``collections = ["from b", "from a"]`` makes vdirsyncer synchronize all
|
||||||
existing collections on either side.
|
existing collections on either side.
|
||||||
|
|
@ -117,26 +116,10 @@ Pair Section
|
||||||
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
|
- ``metadata``: Metadata keys that should be synchronized when ``vdirsyncer
|
||||||
metasync`` is executed. Example::
|
metasync`` is executed. Example::
|
||||||
|
|
||||||
metadata = ["color", "displayname", "description", "order"]
|
metadata = ["color", "displayname"]
|
||||||
|
|
||||||
This synchronizes the following properties:
|
This synchronizes the ``color`` and the ``displayname`` properties. The
|
||||||
|
``conflict_resolution`` parameter applies here as well.
|
||||||
- color: ``http://apple.com/ns/ical/:calendar-color``
|
|
||||||
- displayname: ``DAV:displayname``
|
|
||||||
- description: ``CalDAV:calendar-description`` and ``CardDAV:addressbook-description``
|
|
||||||
- order: ``http://apple.com/ns/ical/:calendar-order``
|
|
||||||
|
|
||||||
The ``conflict_resolution`` parameter applies for these properties too.
|
|
||||||
|
|
||||||
.. _implicit_def:
|
|
||||||
|
|
||||||
- ``implicit``: Opt into implicitly creating collections. Example::
|
|
||||||
|
|
||||||
implicit = "create"
|
|
||||||
|
|
||||||
When set to "create", missing collections are automatically created on both
|
|
||||||
sides during sync without prompting the user. This simplifies workflows where
|
|
||||||
all collections should be synchronized bidirectionally.
|
|
||||||
|
|
||||||
.. _storage_config:
|
.. _storage_config:
|
||||||
|
|
||||||
|
|
@ -186,7 +169,7 @@ CalDAV and CardDAV
|
||||||
url = "..."
|
url = "..."
|
||||||
#username = ""
|
#username = ""
|
||||||
#password = ""
|
#password = ""
|
||||||
#verify = /path/to/custom_ca.pem
|
#verify = true
|
||||||
#auth = null
|
#auth = null
|
||||||
#useragent = "vdirsyncer/0.16.4"
|
#useragent = "vdirsyncer/0.16.4"
|
||||||
#verify_fingerprint = null
|
#verify_fingerprint = null
|
||||||
|
|
@ -219,10 +202,12 @@ CalDAV and CardDAV
|
||||||
:param url: Base URL or an URL to a calendar.
|
:param url: Base URL or an URL to a calendar.
|
||||||
:param username: Username for authentication.
|
:param username: Username for authentication.
|
||||||
:param password: Password for authentication.
|
:param password: Password for authentication.
|
||||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
:param verify: Verify SSL certificate, default True. This can also be a
|
||||||
See :ref:`ssl-tutorial` for more information.
|
local path to a self-signed SSL certificate. See :ref:`ssl-tutorial`
|
||||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
for more information.
|
||||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the
|
||||||
|
expected server certificate. See :ref:`ssl-tutorial` for more
|
||||||
|
information.
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||||
default is preemptive Basic auth, sending credentials even if server
|
default is preemptive Basic auth, sending credentials even if server
|
||||||
didn't request them. This saves from an additional roundtrip per
|
didn't request them. This saves from an additional roundtrip per
|
||||||
|
|
@ -244,20 +229,21 @@ CalDAV and CardDAV
|
||||||
url = "..."
|
url = "..."
|
||||||
#username = ""
|
#username = ""
|
||||||
#password = ""
|
#password = ""
|
||||||
#verify = /path/to/custom_ca.pem
|
#verify = true
|
||||||
#auth = null
|
#auth = null
|
||||||
#useragent = "vdirsyncer/0.16.4"
|
#useragent = "vdirsyncer/0.16.4"
|
||||||
#verify_fingerprint = null
|
#verify_fingerprint = null
|
||||||
#auth_cert = null
|
#auth_cert = null
|
||||||
#use_vcard_4 = false
|
|
||||||
|
|
||||||
:param url: Base URL or an URL to an addressbook.
|
:param url: Base URL or an URL to an addressbook.
|
||||||
:param username: Username for authentication.
|
:param username: Username for authentication.
|
||||||
:param password: Password for authentication.
|
:param password: Password for authentication.
|
||||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
:param verify: Verify SSL certificate, default True. This can also be a
|
||||||
See :ref:`ssl-tutorial` for more information.
|
local path to a self-signed SSL certificate. See
|
||||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
:ref:`ssl-tutorial` for more information.
|
||||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
:param verify_fingerprint: Optional. SHA1 or MD5 fingerprint of the expected
|
||||||
|
server certificate. See :ref:`ssl-tutorial` for
|
||||||
|
more information.
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
||||||
default is preemptive Basic auth, sending credentials even if
|
default is preemptive Basic auth, sending credentials even if
|
||||||
server didn't request them. This saves from an additional
|
server didn't request them. This saves from an additional
|
||||||
|
|
@ -267,7 +253,6 @@ CalDAV and CardDAV
|
||||||
certificate and the key or a list of paths to the files
|
certificate and the key or a list of paths to the files
|
||||||
with them.
|
with them.
|
||||||
:param useragent: Default ``vdirsyncer``.
|
:param useragent: Default ``vdirsyncer``.
|
||||||
:param use_vcard_4: Whether the server use vCard 4.0.
|
|
||||||
|
|
||||||
Google
|
Google
|
||||||
++++++
|
++++++
|
||||||
|
|
@ -281,15 +266,7 @@ in terms of data safety**. See `this blog post
|
||||||
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
|
<https://evertpot.com/google-carddav-issues/>`_ for the details. Always back
|
||||||
up your data.
|
up your data.
|
||||||
|
|
||||||
Another caveat is that Google group labels are not synced with vCard's
|
At first run you will be asked to authorize application for google account
|
||||||
`CATEGORIES <https://www.rfc-editor.org/rfc/rfc6350#section-6.7.1>`_ property
|
|
||||||
(also see :gh:`814` and
|
|
||||||
`upstream issue #36761530 <https://issuetracker.google.com/issues/36761530>`_
|
|
||||||
for reference) and the
|
|
||||||
`BDAY <https://www.rfc-editor.org/rfc/rfc6350#section-6.2.5>`_ property is not
|
|
||||||
synced when only partial date information is present (e.g. the year is missing).
|
|
||||||
|
|
||||||
At first run you will be asked to authorize application for Google account
|
|
||||||
access.
|
access.
|
||||||
|
|
||||||
To use this storage type, you need to install some additional dependencies::
|
To use this storage type, you need to install some additional dependencies::
|
||||||
|
|
@ -300,29 +277,25 @@ Furthermore you need to register vdirsyncer as an application yourself to
|
||||||
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
|
obtain ``client_id`` and ``client_secret``, as it is against Google's Terms of
|
||||||
Service to hardcode those into opensource software [googleterms]_:
|
Service to hardcode those into opensource software [googleterms]_:
|
||||||
|
|
||||||
1. Go to the `Google API Manager <https://console.developers.google.com>`_
|
1. Go to the `Google API Manager <https://console.developers.google.com>`_ and
|
||||||
|
create a new project under any name.
|
||||||
2. Create a new project under any name.
|
|
||||||
|
|
||||||
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
|
2. Within that project, enable the "CalDAV" and "CardDAV" APIs (**not** the
|
||||||
Calendar and Contacts APIs, those are different and won't work). There should
|
Calendar and Contacts APIs, those are different and won't work). There should
|
||||||
be a search box where you can just enter those terms.
|
be a searchbox where you can just enter those terms.
|
||||||
|
|
||||||
3. In the sidebar, select "Credentials", then "Create Credentials" and create a
|
|
||||||
new "OAuth Client ID".
|
|
||||||
|
|
||||||
|
3. In the sidebar, select "Credentials" and create a new "OAuth Client ID". The
|
||||||
|
application type is "Other".
|
||||||
|
|
||||||
You'll be prompted to create a OAuth consent screen first. Fill out that
|
You'll be prompted to create a OAuth consent screen first. Fill out that
|
||||||
form however you like.
|
form however you like.
|
||||||
|
|
||||||
After setting up the consent screen, finish creating the new "OAuth Client
|
|
||||||
ID'. The correct application type is "Desktop application".
|
|
||||||
|
|
||||||
4. Finally you should have a Client ID and a Client secret. Provide these in
|
4. Finally you should have a Client ID and a Client secret. Provide these in
|
||||||
your storage config.
|
your storage config.
|
||||||
|
|
||||||
The ``token_file`` parameter should be a path to a file where vdirsyncer can
|
The ``token_file`` parameter should be a filepath where vdirsyncer can later
|
||||||
later store authentication-related data. You do not need to create the file
|
store authentication-related data. You do not need to create the file itself
|
||||||
itself or write anything to it.
|
or write anything to it.
|
||||||
|
|
||||||
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
|
.. [googleterms] See `ToS <https://developers.google.com/terms/?hl=th>`_,
|
||||||
section "Confidential Matters".
|
section "Confidential Matters".
|
||||||
|
|
@ -330,7 +303,7 @@ itself or write anything to it.
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
You need to configure which calendars Google should offer vdirsyncer using
|
You need to configure which calendars Google should offer vdirsyncer using
|
||||||
a secret `settings page
|
a rather hidden `settings page
|
||||||
<https://calendar.google.com/calendar/syncselect>`_.
|
<https://calendar.google.com/calendar/syncselect>`_.
|
||||||
|
|
||||||
.. storage:: google_calendar
|
.. storage:: google_calendar
|
||||||
|
|
@ -370,9 +343,55 @@ itself or write anything to it.
|
||||||
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
:param client_id/client_secret: OAuth credentials, obtained from the Google
|
||||||
API Manager.
|
API Manager.
|
||||||
|
|
||||||
The current flow is not ideal, but Google has deprecated the previous APIs used
|
EteSync
|
||||||
for this without providing a suitable replacement. See :gh:`975` for discussion
|
+++++++
|
||||||
on the topic.
|
|
||||||
|
`EteSync <https://www.etesync.com/>`_ is a new cloud provider for end to end
|
||||||
|
encrypted contacts and calendar storage. Vdirsyncer contains **experimental**
|
||||||
|
support for it.
|
||||||
|
|
||||||
|
To use it, you need to install some optional dependencies::
|
||||||
|
|
||||||
|
pip install vdirsyncer[etesync]
|
||||||
|
|
||||||
|
On first usage you will be prompted for the service password and the encryption
|
||||||
|
password. Neither are stored.
|
||||||
|
|
||||||
|
.. storage:: etesync_contacts
|
||||||
|
|
||||||
|
Contacts for etesync.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage example_for_etesync_contacts]
|
||||||
|
email = ...
|
||||||
|
secrets_dir = ...
|
||||||
|
#server_path = ...
|
||||||
|
#db_path = ...
|
||||||
|
|
||||||
|
:param email: The email address of your account.
|
||||||
|
:param secrets_dir: A directory where vdirsyncer can store the encryption
|
||||||
|
key and authentication token.
|
||||||
|
:param server_url: Optional. URL to the root of your custom server.
|
||||||
|
:param db_path: Optional. Use a different path for the database.
|
||||||
|
|
||||||
|
.. storage:: etesync_calendars
|
||||||
|
|
||||||
|
Calendars for etesync.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
[storage example_for_etesync_calendars]
|
||||||
|
email = ...
|
||||||
|
secrets_dir = ...
|
||||||
|
#server_path = ...
|
||||||
|
#db_path = ...
|
||||||
|
|
||||||
|
:param email: The email address of your account.
|
||||||
|
:param secrets_dir: A directory where vdirsyncer can store the encryption
|
||||||
|
key and authentication token.
|
||||||
|
:param server_url: Optional. URL to the root of your custom server.
|
||||||
|
:param db_path: Optional. Use a different path for the database.
|
||||||
|
|
||||||
Local
|
Local
|
||||||
+++++
|
+++++
|
||||||
|
|
@ -389,8 +408,6 @@ Local
|
||||||
fileext = "..."
|
fileext = "..."
|
||||||
#encoding = "utf-8"
|
#encoding = "utf-8"
|
||||||
#post_hook = null
|
#post_hook = null
|
||||||
#pre_deletion_hook = null
|
|
||||||
#fileignoreext = ".tmp"
|
|
||||||
|
|
||||||
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
|
Can be used with `khal <http://lostpackets.de/khal/>`_. See :doc:`vdir` for
|
||||||
a more formal description of the format.
|
a more formal description of the format.
|
||||||
|
|
@ -404,17 +421,11 @@ Local
|
||||||
:param fileext: The file extension to use (e.g. ``.txt``). Contained in the
|
:param fileext: The file extension to use (e.g. ``.txt``). Contained in the
|
||||||
href, so if you change the file extension after a sync, this will
|
href, so if you change the file extension after a sync, this will
|
||||||
trigger a re-download of everything (but *should* not cause data-loss
|
trigger a re-download of everything (but *should* not cause data-loss
|
||||||
of any kind). To be compatible with the ``vset`` format you have
|
of any kind).
|
||||||
to either use ``.vcf`` or ``.ics``. Note that metasync won't work
|
|
||||||
if you use an empty string here.
|
|
||||||
:param encoding: File encoding for items, both content and filename.
|
:param encoding: File encoding for items, both content and filename.
|
||||||
:param post_hook: A command to call for each item creation and
|
:param post_hook: A command to call for each item creation and
|
||||||
modification. The command will be called with the path of the
|
modification. The command will be called with the path of the
|
||||||
new/updated file.
|
new/updated file.
|
||||||
:param pre_deletion_hook: A command to call for each item deletion.
|
|
||||||
The command will be called with the path of the deleted file.
|
|
||||||
:param fileeignoreext: The file extention to ignore. It is only useful
|
|
||||||
if fileext is set to the empty string. The default is ``.tmp``.
|
|
||||||
|
|
||||||
.. storage:: singlefile
|
.. storage:: singlefile
|
||||||
|
|
||||||
|
|
@ -494,7 +505,6 @@ leads to an error.
|
||||||
[storage holidays_remote]
|
[storage holidays_remote]
|
||||||
type = "http"
|
type = "http"
|
||||||
url = https://example.com/holidays_from_hicksville.ics
|
url = https://example.com/holidays_from_hicksville.ics
|
||||||
#filter_hook = null
|
|
||||||
|
|
||||||
Too many WebCAL providers generate UIDs of all ``VEVENT``-components
|
Too many WebCAL providers generate UIDs of all ``VEVENT``-components
|
||||||
on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
|
on-the-fly, i.e. all UIDs change every time the calendar is downloaded.
|
||||||
|
|
@ -505,22 +515,10 @@ leads to an error.
|
||||||
of the normalized item content.
|
of the normalized item content.
|
||||||
|
|
||||||
:param url: URL to the ``.ics`` file.
|
:param url: URL to the ``.ics`` file.
|
||||||
:param username: Username for authentication.
|
:param username: Username for HTTP basic authentication.
|
||||||
:param password: Password for authentication.
|
:param password: Password for HTTP basic authentication.
|
||||||
:param verify: Optional. Local path to a self-signed SSL certificate.
|
:param useragent: Default ``vdirsyncer``.
|
||||||
See :ref:`ssl-tutorial` for more information.
|
:param verify_cert: Add one new root certificate file in PEM format. Useful
|
||||||
:param verify_fingerprint: Optional. SHA256 fingerprint of the expected
|
for servers with self-signed certificates.
|
||||||
server certificate. See :ref:`ssl-tutorial` for more information.
|
|
||||||
:param auth: Optional. Either ``basic``, ``digest`` or ``guess``. The
|
|
||||||
default is preemptive Basic auth, sending credentials even if server
|
|
||||||
didn't request them. This saves from an additional roundtrip per
|
|
||||||
request. Consider setting ``guess`` if this causes issues with your
|
|
||||||
server.
|
|
||||||
:param auth_cert: Optional. Either a path to a certificate with a client
|
:param auth_cert: Optional. Either a path to a certificate with a client
|
||||||
certificate and the key or a list of paths to the files with them.
|
certificate and the key or a list of paths to the files with them.
|
||||||
:param useragent: Default ``vdirsyncer``.
|
|
||||||
:param filter_hook: Optional. A filter command to call for each fetched
|
|
||||||
item, passed in raw form to stdin and returned via stdout.
|
|
||||||
If nothing is returned by the filter command, the item is skipped.
|
|
||||||
This can be used to alter fields as needed when dealing with providers
|
|
||||||
generating malformed events.
|
|
||||||
|
|
|
||||||
|
|
@ -2,11 +2,14 @@
|
||||||
Support and Contact
|
Support and Contact
|
||||||
===================
|
===================
|
||||||
|
|
||||||
* The ``#pimutils`` `IRC channel on Libera.Chat <https://pimutils.org/contact>`_
|
* The ``#pimutils`` `IRC channel on Freenode <https://pimutils.org/contact>`_
|
||||||
might be active, depending on your timezone. Use it for support and general
|
might be active, depending on your timezone. Use it for support and general
|
||||||
(including off-topic) discussion.
|
(including off-topic) discussion.
|
||||||
|
|
||||||
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
|
* Open `a GitHub issue <https://github.com/pimutils/vdirsyncer/issues/>`_ for
|
||||||
concrete bug reports and feature requests.
|
concrete bug reports and feature requests.
|
||||||
|
|
||||||
* For security issues, contact ``contact@pimutils.org``.
|
* Lastly, you can also `contact the author directly
|
||||||
|
<https://unterwaditzer.net/contact.html>`_. Do this for security issues. If
|
||||||
|
that doesn't work out (i.e. if I don't respond within one week), use
|
||||||
|
``contact@pimutils.org``.
|
||||||
|
|
|
||||||
|
|
@ -75,36 +75,36 @@ Submitting patches, pull requests
|
||||||
Running tests, how to set up your development environment
|
Running tests, how to set up your development environment
|
||||||
---------------------------------------------------------
|
---------------------------------------------------------
|
||||||
|
|
||||||
For many patches, it might suffice to just let CI run the tests. However,
|
For many patches, it might suffice to just let Travis run the tests. However,
|
||||||
CI is slow, so you might want to run them locally too. For this, set up a
|
Travis is slow, so you might want to run them locally too. For this, set up a
|
||||||
virtualenv_ and run this inside of it::
|
virtualenv_ and run this inside of it::
|
||||||
|
|
||||||
# Install development dependencies, including:
|
# install:
|
||||||
# - vdirsyncer from the repo into the virtualenv
|
# - vdirsyncer from the repo into the virtualenv
|
||||||
# - style checks and formatting (ruff)
|
# - stylecheckers (flake8) and code formatters (autopep8)
|
||||||
make install-dev
|
make install-dev
|
||||||
|
|
||||||
# Install git commit hook for some extra linting and checking
|
# Install git commit hook for the stylechecker
|
||||||
pre-commit install
|
make install-git-hooks
|
||||||
|
|
||||||
|
# install test dependencies
|
||||||
|
make install-test
|
||||||
|
|
||||||
Then you can run::
|
Then you can run::
|
||||||
|
|
||||||
pytest # The normal testsuite
|
make test # The normal testsuite
|
||||||
pre-commit run --all # Run all linters (which also run via pre-commit)
|
make style # Stylechecker
|
||||||
make -C docs html # Build the HTML docs, output is at docs/_build/html/
|
make docs # Build the HTML docs, output is at docs/_build/html/
|
||||||
make -C docs linkcheck # Check docs for any broken links
|
|
||||||
|
|
||||||
The ``Makefile`` has a lot of options that allow you to control which tests are
|
The ``Makefile`` has a lot of options that allow you to control which tests are
|
||||||
run, and which servers are tested. Take a look at its code where they are all
|
run, and which servers are tested. Take a look at its code where they are all
|
||||||
initialized and documented.
|
initialized and documented.
|
||||||
|
|
||||||
To tests against a specific DAV server, use ``DAV_SERVER``::
|
For example, to test xandikos, run::
|
||||||
|
|
||||||
|
make DAV_SERVER=xandikos install-test
|
||||||
make DAV_SERVER=xandikos test
|
make DAV_SERVER=xandikos test
|
||||||
|
|
||||||
The server will be initialised in a docker container and terminated at the end
|
|
||||||
of the test suite.
|
|
||||||
|
|
||||||
If you have any questions, feel free to open issues about it.
|
If you have any questions, feel free to open issues about it.
|
||||||
|
|
||||||
Structure of the testsuite
|
Structure of the testsuite
|
||||||
|
|
|
||||||
|
|
@ -2,14 +2,23 @@
|
||||||
Donations
|
Donations
|
||||||
=========
|
=========
|
||||||
|
|
||||||
vdirsyncer is and will always be free and open source software. We appreciate
|
|
||||||
sponsors willing to fund our continued work on it.
|
|
||||||
|
|
||||||
If you found my work useful, please consider donating. Thank you!
|
If you found my work useful, please consider donating. Thank you!
|
||||||
|
|
||||||
- Bitcoin: ``13p42uWDL62bNRH3KWA6cSpSgvnHy1fs2E``.
|
- Bitcoin: ``16sSHxZm263WHR9P9PJjCxp64jp9ooXKVt``
|
||||||
- Sponsor via one-time tips or recurring donations `via Ko-fi`_.
|
|
||||||
- Sponsor via recurring donations `via liberapay`_.
|
|
||||||
|
|
||||||
.. _via Ko-fi: https://ko-fi.com/whynothugo
|
- `PayPal.me <https://www.paypal.me/untitaker>`_
|
||||||
.. _via liberapay: https://liberapay.com/WhyNotHugo/
|
|
||||||
|
- `Bountysource <https://www.bountysource.com/teams/vdirsyncer>`_ is useful for
|
||||||
|
funding work on a specific GitHub issue.
|
||||||
|
|
||||||
|
- There's also `Bountysource Salt
|
||||||
|
<https://salt.bountysource.com/teams/vdirsyncer>`_, for one-time and
|
||||||
|
recurring donations.
|
||||||
|
|
||||||
|
- Donations via Bountysource are publicly listed. Use PayPal if you dislike
|
||||||
|
that.
|
||||||
|
|
||||||
|
- `Flattr
|
||||||
|
<https://flattr.com/submit/auto?user_id=untitaker&url=https%3A%2F%2Fgithub.com%2Fpimutils%2Fvdirsyncer>`_
|
||||||
|
or `Gratipay <https://gratipay.com/vdirsyncer/>`_ can be used for
|
||||||
|
recurring donations.
|
||||||
|
|
|
||||||
|
|
@ -7,18 +7,17 @@ Installation
|
||||||
OS/distro packages
|
OS/distro packages
|
||||||
------------------
|
------------------
|
||||||
|
|
||||||
The following packages are community-contributed and were up-to-date at the
|
The following packages are user-contributed and were up-to-date at the time of
|
||||||
time of writing:
|
writing:
|
||||||
|
|
||||||
- `Arch Linux <https://archlinux.org/packages/extra/any/vdirsyncer/>`_
|
- `ArchLinux <https://www.archlinux.org/packages/community/any/vdirsyncer/>`_
|
||||||
- `Ubuntu and Debian, x86_64-only
|
- `Ubuntu and Debian, x86_64-only
|
||||||
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
|
<https://packagecloud.io/pimutils/vdirsyncer>`_ (packages also exist
|
||||||
in the official repositories but may be out of date)
|
in the official repositories but may be out of date)
|
||||||
- `GNU Guix <https://packages.guix.gnu.org/packages/vdirsyncer/>`_
|
- `GNU Guix <https://www.gnu.org/software/guix/package-list.html#vdirsyncer>`_
|
||||||
- `macOS (homebrew) <https://formulae.brew.sh/formula/vdirsyncer>`_
|
- `OS X (homebrew) <http://braumeister.org/formula/vdirsyncer>`_
|
||||||
- `NetBSD <https://ftp.netbsd.org/pub/pkgsrc/current/pkgsrc/time/py-vdirsyncer/index.html>`_
|
- `BSD (pkgsrc) <http://pkgsrc.se/time/py-vdirsyncer>`_
|
||||||
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
|
- `OpenBSD <http://ports.su/productivity/vdirsyncer>`_
|
||||||
- `Slackware (SlackBuild at Slackbuilds.org) <https://slackbuilds.org/repository/15.0/network/vdirsyncer/>`_
|
|
||||||
|
|
||||||
We only support the latest version of vdirsyncer, which is at the time of this
|
We only support the latest version of vdirsyncer, which is at the time of this
|
||||||
writing |vdirsyncer_version|. Please **do not file bugs if you use an older
|
writing |vdirsyncer_version|. Please **do not file bugs if you use an older
|
||||||
|
|
@ -42,55 +41,37 @@ If your distribution doesn't provide a package for vdirsyncer, you still can
|
||||||
use Python's package manager "pip". First, you'll have to check that the
|
use Python's package manager "pip". First, you'll have to check that the
|
||||||
following things are installed:
|
following things are installed:
|
||||||
|
|
||||||
- Python 3.9 to 3.13 and pip.
|
- Python 3.4+ and pip.
|
||||||
- ``libxml`` and ``libxslt``
|
- ``libxml`` and ``libxslt``
|
||||||
- ``zlib``
|
- ``zlib``
|
||||||
- Linux or macOS. **Windows is not supported**, see :gh:`535`.
|
- `Rust <https://www.rust-lang.org/>`_, the programming language, together with
|
||||||
|
its package manager ``cargo``.
|
||||||
|
- Linux or OS X. **Windows is not supported**, see :gh:`535`.
|
||||||
|
|
||||||
On Linux systems, using the distro's package manager is the best
|
On Linux systems, using the distro's package manager is the best way to do
|
||||||
way to do this, for example, using Ubuntu::
|
this, for example, using Ubuntu (last tried on Trusty)::
|
||||||
|
|
||||||
sudo apt-get install libxml2 libxslt1.1 zlib1g python3
|
sudo apt-get install python3 python3-pip libffi-dev
|
||||||
|
|
||||||
|
Rust may need to be installed separately, as the packages in Ubuntu are usually
|
||||||
|
out-of-date. I recommend `rustup <https://rustup.rs/>`_ for that.
|
||||||
|
|
||||||
Then you have several options. The following text applies for most Python
|
Then you have several options. The following text applies for most Python
|
||||||
software by the way.
|
software by the way.
|
||||||
|
|
||||||
pipx: The clean, easy way
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
pipx_ is a new package manager for Python-based software that automatically
|
|
||||||
sets up a virtual environment for each program it installs. Please note that
|
|
||||||
installing via pipx will not include manual pages nor systemd services.
|
|
||||||
|
|
||||||
pipx will install vdirsyncer into ``~/.local/pipx/venvs/vdirsyncer``
|
|
||||||
|
|
||||||
Assuming that pipx is installed, vdirsyncer can be installed with::
|
|
||||||
|
|
||||||
pipx install vdirsyncer
|
|
||||||
|
|
||||||
It can later be updated to the latest version with::
|
|
||||||
|
|
||||||
pipx upgrade vdirsyncer
|
|
||||||
|
|
||||||
And can be uninstalled with::
|
|
||||||
|
|
||||||
pipx uninstall vdirsyncer
|
|
||||||
|
|
||||||
This last command will remove vdirsyncer and any dependencies installed into
|
|
||||||
the above location.
|
|
||||||
|
|
||||||
.. _pipx: https://github.com/pipxproject/pipx
|
|
||||||
|
|
||||||
The dirty, easy way
|
The dirty, easy way
|
||||||
~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
If pipx is not available on your distribution, the easiest way to install
|
The easiest way to install vdirsyncer at this point would be to run::
|
||||||
vdirsyncer at this point would be to run::
|
|
||||||
|
|
||||||
pip install --ignore-installed vdirsyncer
|
pip3 install -v --user --ignore-installed vdirsyncer
|
||||||
|
|
||||||
|
- ``--user`` is to install without root rights (into your home directory)
|
||||||
- ``--ignore-installed`` is to work around Debian's potentially broken packages
|
- ``--ignore-installed`` is to work around Debian's potentially broken packages
|
||||||
(see :ref:`debian-urllib3`).
|
(see :ref:`debian-urllib3`). You can try to omit it if you run into other
|
||||||
|
problems related to certificates, for example.
|
||||||
|
|
||||||
|
Your executable is then in ``~/.local/bin/``.
|
||||||
|
|
||||||
This method has a major flaw though: Pip doesn't keep track of the files it
|
This method has a major flaw though: Pip doesn't keep track of the files it
|
||||||
installs. Vdirsyncer's files would be located somewhere in
|
installs. Vdirsyncer's files would be located somewhere in
|
||||||
|
|
@ -106,9 +87,9 @@ There is a way to install Python software without scattering stuff across
|
||||||
your filesystem: virtualenv_. There are a lot of resources on how to use it,
|
your filesystem: virtualenv_. There are a lot of resources on how to use it,
|
||||||
the simplest possible way would look something like::
|
the simplest possible way would look something like::
|
||||||
|
|
||||||
virtualenv ~/vdirsyncer_env
|
virtualenv --python python3 ~/vdirsyncer_env
|
||||||
~/vdirsyncer_env/bin/pip install vdirsyncer
|
~/vdirsyncer_env/bin/pip install -v vdirsyncer
|
||||||
alias vdirsyncer="~/vdirsyncer_env/bin/vdirsyncer"
|
alias vdirsyncer="$HOME/vdirsyncer_env/bin/vdirsyncer"
|
||||||
|
|
||||||
You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``.
|
You'll have to put the last line into your ``.bashrc`` or ``.bash_profile``.
|
||||||
|
|
||||||
|
|
@ -119,4 +100,25 @@ This method has two advantages:
|
||||||
distro-specific issues.
|
distro-specific issues.
|
||||||
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
|
- You can delete ``~/vdirsyncer_env/`` to uninstall vdirsyncer entirely.
|
||||||
|
|
||||||
|
The clean, easy way
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
pipsi_ is a new package manager for Python-based software that automatically
|
||||||
|
sets up a virtualenv for each program you install. Assuming you have it
|
||||||
|
installed on your operating system, you can do::
|
||||||
|
|
||||||
|
pipsi install --python python3 vdirsyncer
|
||||||
|
|
||||||
|
and ``.local/bin/vdirsyncer`` will be your new vdirsyncer installation. To
|
||||||
|
update vdirsyncer to the latest version::
|
||||||
|
|
||||||
|
pipsi upgrade vdirsyncer
|
||||||
|
|
||||||
|
If you're done with vdirsyncer, you can do::
|
||||||
|
|
||||||
|
pipsi uninstall vdirsyncer
|
||||||
|
|
||||||
|
and vdirsyncer will be uninstalled, including its dependencies.
|
||||||
|
|
||||||
.. _virtualenv: https://virtualenv.readthedocs.io/
|
.. _virtualenv: https://virtualenv.readthedocs.io/
|
||||||
|
.. _pipsi: https://github.com/mitsuhiko/pipsi
|
||||||
|
|
|
||||||
|
|
@ -38,12 +38,6 @@ You can fetch the username as well::
|
||||||
|
|
||||||
Or really any kind of parameter in a storage section.
|
Or really any kind of parameter in a storage section.
|
||||||
|
|
||||||
You can also pass the command as a string to be executed in a shell::
|
|
||||||
|
|
||||||
[storage foo]
|
|
||||||
...
|
|
||||||
password.fetch = ["shell", "~/.local/bin/get-my-password | head -n1"]
|
|
||||||
|
|
||||||
With pass_ for example, you might find yourself writing something like this in
|
With pass_ for example, you might find yourself writing something like this in
|
||||||
your configuration file::
|
your configuration file::
|
||||||
|
|
||||||
|
|
@ -66,7 +60,7 @@ passwords from the OS's password store. Installation::
|
||||||
Basic usage::
|
Basic usage::
|
||||||
|
|
||||||
password.fetch = ["command", "keyring", "get", "example.com", "foouser"]
|
password.fetch = ["command", "keyring", "get", "example.com", "foouser"]
|
||||||
|
|
||||||
.. _keyring: https://github.com/jaraco/keyring/
|
.. _keyring: https://github.com/jaraco/keyring/
|
||||||
|
|
||||||
Password Prompt
|
Password Prompt
|
||||||
|
|
@ -78,19 +72,3 @@ You can also simply prompt for the password::
|
||||||
type = "caldav"
|
type = "caldav"
|
||||||
username = "myusername"
|
username = "myusername"
|
||||||
password.fetch = ["prompt", "Password for CalDAV"]
|
password.fetch = ["prompt", "Password for CalDAV"]
|
||||||
|
|
||||||
Environment variable
|
|
||||||
===============
|
|
||||||
|
|
||||||
To read the password from an environment variable::
|
|
||||||
|
|
||||||
[storage foo]
|
|
||||||
type = "caldav"
|
|
||||||
username = "myusername"
|
|
||||||
password.fetch = ["command", "printenv", "DAV_PW"]
|
|
||||||
|
|
||||||
This is especially handy if you use the same password multiple times
|
|
||||||
(say, for a CardDAV and a CalDAV storage).
|
|
||||||
On bash, you can read and export the password without printing::
|
|
||||||
|
|
||||||
read -s DAV_PW "DAV Password: " && export DAV_PW
|
|
||||||
|
|
|
||||||
|
|
@ -5,27 +5,23 @@ Packaging guidelines
|
||||||
Thank you very much for packaging vdirsyncer! The following guidelines should
|
Thank you very much for packaging vdirsyncer! The following guidelines should
|
||||||
help you to avoid some common pitfalls.
|
help you to avoid some common pitfalls.
|
||||||
|
|
||||||
If you find yourself needing to patch anything, or going in a different direction,
|
While they are called guidelines and therefore theoretically not mandatory, if
|
||||||
please open an issue so we can also address in a way that works for everyone. Otherwise
|
you consider going a different direction, please first open an issue or contact
|
||||||
we get bug reports for code or scenarios that don't exist in upstream vdirsycner.
|
me otherwise instead of just going ahead. These guidelines exist for my own
|
||||||
|
convenience too.
|
||||||
|
|
||||||
Obtaining the source code
|
Obtaining the source code
|
||||||
=========================
|
=========================
|
||||||
|
|
||||||
The main distribution channel is `PyPI
|
The main distribution channel is `PyPI
|
||||||
<https://pypi.python.org/pypi/vdirsyncer>`_, and source tarballs can be
|
<https://pypi.python.org/pypi/vdirsyncer>`_, and source tarballs can be
|
||||||
obtained there. We mirror the same package tarball and wheel as GitHub
|
obtained there. Do not use the ones from GitHub: Their tarballs contain useless
|
||||||
releases. Please do not confuse these with the auto-generated GitHub "Source
|
junk and are more of a distraction than anything else.
|
||||||
Code" tarball. Those are missing some important metadata and your build will fail.
|
|
||||||
|
|
||||||
We give each release a tag in the git repo. If you want to get notified of new
|
I give each release a tag in the git repo. If you want to get notified of new
|
||||||
releases, `GitHub's feed
|
releases, `GitHub's feed
|
||||||
<https://github.com/pimutils/vdirsyncer/releases.atom>`_ is a good way.
|
<https://github.com/pimutils/vdirsyncer/releases.atom>`_ is a good way.
|
||||||
|
|
||||||
Tags will be signed by the maintainer who is doing the release (starting with
|
|
||||||
0.16.8), and generation of the tarball and wheel is done by CI. Hence, only the
|
|
||||||
tag itself is signed.
|
|
||||||
|
|
||||||
Dependency versions
|
Dependency versions
|
||||||
===================
|
===================
|
||||||
|
|
||||||
|
|
@ -37,25 +33,24 @@ Testing
|
||||||
=======
|
=======
|
||||||
|
|
||||||
Everything testing-related goes through the ``Makefile`` in the root of the
|
Everything testing-related goes through the ``Makefile`` in the root of the
|
||||||
repository or PyPI package. Trying to e.g. run ``pytest`` directly will
|
repository or PyPI package. Trying to e.g. run ``py.test`` directly will
|
||||||
require a lot of environment variables to be set (for configuration) and you
|
require a lot of environment variables to be set (for configuration) and you
|
||||||
probably don't want to deal with that.
|
probably don't want to deal with that.
|
||||||
|
|
||||||
You can install the all development dependencies with::
|
You can install the testing dependencies with::
|
||||||
|
|
||||||
make install-dev
|
make install-test
|
||||||
|
|
||||||
You probably don't want this since it will use pip to download the
|
You probably don't want this since it will use pip to download the
|
||||||
dependencies. Alternatively test dependencies are listed as ``test`` optional
|
dependencies. Alternatively you can find the testing dependencies in
|
||||||
dependencies in ``pyproject.toml``, again with lower-bound version
|
``test-requirements.txt``, again with lower-bound version requirements.
|
||||||
requirements.
|
|
||||||
|
|
||||||
You also have to have vdirsyncer fully installed at this point. Merely
|
You also have to have vdirsyncer fully installed at this point. Merely
|
||||||
``cd``-ing into the tarball will not be sufficient.
|
``cd``-ing into the tarball will not be sufficient.
|
||||||
|
|
||||||
Running the tests happens with::
|
Running the tests happens with::
|
||||||
|
|
||||||
pytest
|
make test
|
||||||
|
|
||||||
Hypothesis will randomly generate test input. If you care about deterministic
|
Hypothesis will randomly generate test input. If you care about deterministic
|
||||||
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
|
tests, set the ``DETERMINISTIC_TESTS`` variable to ``"true"``::
|
||||||
|
|
@ -74,11 +69,10 @@ Using Sphinx_ you can generate the documentation you're reading right now in a
|
||||||
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
|
variety of formats, such as HTML, PDF, or even as a manpage. That said, I only
|
||||||
take care of the HTML docs' formatting.
|
take care of the HTML docs' formatting.
|
||||||
|
|
||||||
You can find a list of dependencies in ``pyproject.toml``, in the
|
You can find a list of dependencies in ``docs-requirements.txt``. Again, you
|
||||||
``project.optional-dependencies`` section as ``docs``. Again, you can install
|
can install those using pip with::
|
||||||
those using pip with::
|
|
||||||
|
|
||||||
pip install '.[docs]'
|
make install-docs
|
||||||
|
|
||||||
Then change into the ``docs/`` directory and build whatever format you want
|
Then change into the ``docs/`` directory and build whatever format you want
|
||||||
using the ``Makefile`` in there (run ``make`` for the formats you can build).
|
using the ``Makefile`` in there (run ``make`` for the formats you can build).
|
||||||
|
|
|
||||||
|
|
@ -66,7 +66,3 @@ For such purposes you can set the ``partial_sync`` parameter to ``ignore``::
|
||||||
partial_sync = ignore
|
partial_sync = ignore
|
||||||
|
|
||||||
See :ref:`the config docs <partial_sync_def>` for more information.
|
See :ref:`the config docs <partial_sync_def>` for more information.
|
||||||
|
|
||||||
.. _nextCloud: https://nextcloud.com/
|
|
||||||
.. _Baikal: http://sabre.io/baikal/
|
|
||||||
.. _DAViCal: http://www.davical.org/
|
|
||||||
|
|
|
||||||
|
|
@ -18,5 +18,5 @@ package that don't play well with packages assuming a normal ``requests``. This
|
||||||
is due to stubbornness on both sides.
|
is due to stubbornness on both sides.
|
||||||
|
|
||||||
See :gh:`82` and :gh:`140` for past discussions. You have one option to work
|
See :gh:`82` and :gh:`140` for past discussions. You have one option to work
|
||||||
around this, that is, to install vdirsyncer in a virtual environment, see
|
around this, that is, to install vdirsyncer in a virtualenv, see
|
||||||
:ref:`manual-installation`.
|
:ref:`manual-installation`.
|
||||||
|
|
|
||||||
|
|
@ -14,14 +14,21 @@ To pin the certificate by fingerprint::
|
||||||
[storage foo]
|
[storage foo]
|
||||||
type = "caldav"
|
type = "caldav"
|
||||||
...
|
...
|
||||||
verify_fingerprint = "6D:83:EA:32:6C:39:BA:08:ED:EB:C9:BC:BE:12:BB:BF:0F:D9:83:00:CC:89:7E:C7:32:05:94:96:CA:C5:59:5E"
|
verify_fingerprint = "94:FD:7A:CB:50:75:A4:69:82:0A:F8:23:DF:07:FC:69:3E:CD:90:CA"
|
||||||
|
#verify = false # Optional: Disable CA validation, useful for self-signed certs
|
||||||
|
|
||||||
SHA256-Fingerprints must be used, MD5 and SHA-1 are insecure and not supported.
|
SHA1-, SHA256- or MD5-Fingerprints can be used. They're detected by their
|
||||||
CA validation is disabled when pinning a fingerprint.
|
length.
|
||||||
|
|
||||||
You can use the following command for obtaining a SHA256 fingerprint::
|
You can use the following command for obtaining a SHA-1 fingerprint::
|
||||||
|
|
||||||
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint -sha256
|
echo -n | openssl s_client -connect unterwaditzer.net:443 | openssl x509 -noout -fingerprint
|
||||||
|
|
||||||
|
Note that ``verify_fingerprint`` doesn't suffice for vdirsyncer to work with
|
||||||
|
self-signed certificates (or certificates that are not in your trust store). You
|
||||||
|
most likely need to set ``verify = false`` as well. This disables verification
|
||||||
|
of the SSL certificate's expiration time and the existence of it in your trust
|
||||||
|
store, all that's verified now is the fingerprint.
|
||||||
|
|
||||||
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
|
However, please consider using `Let's Encrypt <https://letsencrypt.org/>`_ such
|
||||||
that you can forget about all of that. It is easier to deploy a free
|
that you can forget about all of that. It is easier to deploy a free
|
||||||
|
|
@ -40,16 +47,22 @@ To point vdirsyncer to a custom set of root CAs::
|
||||||
...
|
...
|
||||||
verify = "/path/to/cert.pem"
|
verify = "/path/to/cert.pem"
|
||||||
|
|
||||||
Vdirsyncer uses the aiohttp_ library, which uses the default `ssl.SSLContext
|
Vdirsyncer uses the requests_ library, which, by default, `uses its own set of
|
||||||
https://docs.python.org/3/library/ssl.html#ssl.SSLContext`_ by default.
|
trusted CAs
|
||||||
|
<http://www.python-requests.org/en/latest/user/advanced/#ca-certificates>`_.
|
||||||
|
|
||||||
There are cases where certificate validation fails even though you can access
|
However, the actual behavior depends on how you have installed it. Many Linux
|
||||||
the server fine through e.g. your browser. This usually indicates that your
|
distributions patch their ``python-requests`` package to use the system
|
||||||
installation of ``python`` or the ``aiohttp`` or library is somehow broken. In
|
certificate CAs. Normally these two stores are similar enough for you to not
|
||||||
such cases, it makes sense to explicitly set ``verify`` or
|
care.
|
||||||
``verify_fingerprint`` as shown above.
|
|
||||||
|
|
||||||
.. _aiohttp: https://docs.aiohttp.org/en/stable/index.html
|
But there are cases where certificate validation fails even though you can
|
||||||
|
access the server fine through e.g. your browser. This usually indicates that
|
||||||
|
your installation of the ``requests`` library is somehow broken. In such cases,
|
||||||
|
it makes sense to explicitly set ``verify`` or ``verify_fingerprint`` as shown
|
||||||
|
above.
|
||||||
|
|
||||||
|
.. _requests: http://www.python-requests.org/
|
||||||
|
|
||||||
.. _ssl-client-certs:
|
.. _ssl-client-certs:
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ Configuration
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
- The `config.example from the repository
|
- The `config.example from the repository
|
||||||
<https://github.com/pimutils/vdirsyncer/blob/main/config.example>`_
|
<https://github.com/pimutils/vdirsyncer/blob/master/config.example>`_
|
||||||
contains a very terse version of this.
|
contains a very terse version of this.
|
||||||
|
|
||||||
- In this example we set up contacts synchronization, but calendar sync
|
- In this example we set up contacts synchronization, but calendar sync
|
||||||
|
|
@ -53,7 +53,8 @@ pairs of storages should actually be synchronized is defined in :ref:`pair
|
||||||
section <pair_config>`. This format is copied from OfflineIMAP, where storages
|
section <pair_config>`. This format is copied from OfflineIMAP, where storages
|
||||||
are called repositories and pairs are called accounts.
|
are called repositories and pairs are called accounts.
|
||||||
|
|
||||||
The following example synchronizes ownCloud's addressbooks to ``~/.contacts/``::
|
The following example synchronizes addressbooks from a :doc:`NextCloud
|
||||||
|
<tutorials/nextcloud>` to ``~/.contacts/``::
|
||||||
|
|
||||||
|
|
||||||
[pair my_contacts]
|
[pair my_contacts]
|
||||||
|
|
@ -70,7 +71,7 @@ The following example synchronizes ownCloud's addressbooks to ``~/.contacts/``::
|
||||||
type = "carddav"
|
type = "carddav"
|
||||||
|
|
||||||
# We can simplify this URL here as well. In theory it shouldn't matter.
|
# We can simplify this URL here as well. In theory it shouldn't matter.
|
||||||
url = "https://owncloud.example.com/remote.php/carddav/"
|
url = "https://nextcloud.example.com/"
|
||||||
username = "bob"
|
username = "bob"
|
||||||
password = "asdf"
|
password = "asdf"
|
||||||
|
|
||||||
|
|
@ -162,13 +163,13 @@ let's switch to a different base example. This time we'll synchronize calendars:
|
||||||
[storage my_calendars_remote]
|
[storage my_calendars_remote]
|
||||||
type = "caldav"
|
type = "caldav"
|
||||||
|
|
||||||
url = "https://owncloud.example.com/remote.php/caldav/"
|
url = "https://nextcloud.example.com/"
|
||||||
username = "bob"
|
username = "bob"
|
||||||
password = "asdf"
|
password = "asdf"
|
||||||
|
|
||||||
Run ``vdirsyncer discover`` for discovery. Then you can use ``vdirsyncer
|
Run ``vdirsyncer discover`` for discovery. Then you can use ``vdirsyncer
|
||||||
metasync`` to synchronize the ``color`` property between your local calendars
|
metasync`` to synchronize the ``color`` property between your local calendars
|
||||||
in ``~/.calendars/`` and your ownCloud. Locally the color is just represented
|
in ``~/.calendars/`` and your NextCloud. Locally the color is just represented
|
||||||
as a file called ``color`` within the calendar folder.
|
as a file called ``color`` within the calendar folder.
|
||||||
|
|
||||||
.. _collections_tutorial:
|
.. _collections_tutorial:
|
||||||
|
|
@ -176,11 +177,8 @@ as a file called ``color`` within the calendar folder.
|
||||||
More information about collections
|
More information about collections
|
||||||
----------------------------------
|
----------------------------------
|
||||||
|
|
||||||
"Collection" is a collective term for addressbooks and calendars. A Cardav or
|
"Collection" is a collective term for addressbooks and calendars. Each
|
||||||
Caldav server can contains several "collections" which correspond to several
|
collection from a storage has a "collection name", a unique identifier for each
|
||||||
addressbooks or calendar.
|
|
||||||
|
|
||||||
Each collection from a storage has a "collection name", a unique identifier for each
|
|
||||||
collection. In the case of :storage:`filesystem`-storage, this is the name of the
|
collection. In the case of :storage:`filesystem`-storage, this is the name of the
|
||||||
directory that represents the collection, in the case of the DAV-storages this
|
directory that represents the collection, in the case of the DAV-storages this
|
||||||
is the last segment of the URL. We use this identifier in the ``collections``
|
is the last segment of the URL. We use this identifier in the ``collections``
|
||||||
|
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
======
|
|
||||||
Baikal
|
|
||||||
======
|
|
||||||
|
|
||||||
Vdirsyncer is continuously tested against the latest version of Baikal_.
|
|
||||||
|
|
||||||
- Baikal up to ``0.2.7`` also uses an old version of SabreDAV, with the same
|
|
||||||
issue as ownCloud, see :gh:`160`. This issue is fixed in later versions.
|
|
||||||
|
|
||||||
.. _Baikal: http://sabre.io/baikal/
|
|
||||||
|
|
@ -52,7 +52,7 @@ this:
|
||||||
setup. We also set the storage to read-only such that no changes get
|
setup. We also set the storage to read-only such that no changes get
|
||||||
synchronized back. Claws-Mail should not be able to do any changes anyway,
|
synchronized back. Claws-Mail should not be able to do any changes anyway,
|
||||||
but this is one extra safety step in case files get corrupted or vdirsyncer
|
but this is one extra safety step in case files get corrupted or vdirsyncer
|
||||||
behaves erratically. You can leave that part out if you want to be able to
|
behaves eratically. You can leave that part out if you want to be able to
|
||||||
edit those files locally.
|
edit those files locally.
|
||||||
- In the last section we configure that online contacts win in a conflict
|
- In the last section we configure that online contacts win in a conflict
|
||||||
situation. Configure this part however you like. A correct value depends on
|
situation. Configure this part however you like. A correct value depends on
|
||||||
|
|
@ -69,7 +69,7 @@ Now we discover and sync our contacts::
|
||||||
Claws Mail
|
Claws Mail
|
||||||
----------
|
----------
|
||||||
|
|
||||||
Open Claws-Mail. Go to **Tools** => **Addressbook**.
|
Open Claws-Mail. Got to **Tools** => **Addressbook**.
|
||||||
|
|
||||||
Click on **Addressbook** => **New vCard**. Choose a name for the book.
|
Click on **Addressbook** => **New vCard**. Choose a name for the book.
|
||||||
|
|
||||||
|
|
@ -77,7 +77,7 @@ Then search for the for the vCard in the folder **~/.contacts/**. Click
|
||||||
ok, and you we will see your contacts.
|
ok, and you we will see your contacts.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
Claws-Mail shows only contacts that have a mail address.
|
Claws-Mail shows only contacts that have a mail address.
|
||||||
|
|
||||||
Crontab
|
Crontab
|
||||||
|
|
|
||||||
|
|
@ -10,13 +10,13 @@ the settings to use::
|
||||||
|
|
||||||
[storage cal]
|
[storage cal]
|
||||||
type = "caldav"
|
type = "caldav"
|
||||||
url = "https://caldav.fastmail.com/"
|
url = "https://caldav.messagingengine.com/"
|
||||||
username = "..."
|
username = "..."
|
||||||
password = "..."
|
password = "..."
|
||||||
|
|
||||||
[storage card]
|
[storage card]
|
||||||
type = "carddav"
|
type = "carddav"
|
||||||
url = "https://carddav.fastmail.com/"
|
url = "https://carddav.messagingengine.com/"
|
||||||
username = "..."
|
username = "..."
|
||||||
password = "..."
|
password = "..."
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ Further applications, with missing pages:
|
||||||
|
|
||||||
.. _khal: http://lostpackets.de/khal/
|
.. _khal: http://lostpackets.de/khal/
|
||||||
.. _dayplanner: http://www.day-planner.org/
|
.. _dayplanner: http://www.day-planner.org/
|
||||||
.. _Orage: https://gitlab.xfce.org/apps/orage
|
.. _Orage: http://www.kolumbus.fi/~w408237/orage/
|
||||||
.. _rainlendar: http://www.rainlendar.net/
|
.. _rainlendar: http://www.rainlendar.net/
|
||||||
.. _khard: https://github.com/scheibler/khard/
|
.. _khard: https://github.com/scheibler/khard/
|
||||||
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c
|
.. _contactquery.c: https://github.com/t-8ch/snippets/blob/master/contactquery.c
|
||||||
|
|
@ -52,12 +52,10 @@ Servers
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
baikal
|
|
||||||
davmail
|
davmail
|
||||||
fastmail
|
fastmail
|
||||||
google
|
google
|
||||||
icloud
|
icloud
|
||||||
nextcloud
|
nextcloud
|
||||||
owncloud
|
|
||||||
radicale
|
radicale
|
||||||
xandikos
|
xandikos
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
=========
|
=========
|
||||||
nextCloud
|
NextCloud
|
||||||
=========
|
=========
|
||||||
|
|
||||||
Vdirsyncer is continuously tested against the latest version of nextCloud_::
|
Vdirsyncer is continuously tested against the latest version of NextCloud_::
|
||||||
|
|
||||||
[storage cal]
|
[storage cal]
|
||||||
type = "caldav"
|
type = "caldav"
|
||||||
|
|
@ -17,4 +17,4 @@ Vdirsyncer is continuously tested against the latest version of nextCloud_::
|
||||||
- WebCAL-subscriptions can't be discovered by vdirsyncer. See `this relevant
|
- WebCAL-subscriptions can't be discovered by vdirsyncer. See `this relevant
|
||||||
issue <https://github.com/nextcloud/calendar/issues/63>`_.
|
issue <https://github.com/nextcloud/calendar/issues/63>`_.
|
||||||
|
|
||||||
.. _nextCloud: https://nextcloud.com/
|
.. _NextCloud: https://nextcloud.com/
|
||||||
|
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
.. _owncloud_setup:
|
|
||||||
|
|
||||||
========
|
|
||||||
ownCloud
|
|
||||||
========
|
|
||||||
|
|
||||||
Vdirsyncer is continuously tested against the latest version of ownCloud_::
|
|
||||||
|
|
||||||
[storage cal]
|
|
||||||
type = "caldav"
|
|
||||||
url = "https://example.com/remote.php/dav/"
|
|
||||||
username = ...
|
|
||||||
password = ...
|
|
||||||
|
|
||||||
[storage card]
|
|
||||||
type = "carddav"
|
|
||||||
url = "https://example.com/remote.php/dav/"
|
|
||||||
username = ...
|
|
||||||
password = ...
|
|
||||||
|
|
||||||
- *Versions older than 7.0.0:* ownCloud uses SabreDAV, which had problems
|
|
||||||
detecting collisions and race-conditions. The problems were reported and are
|
|
||||||
fixed in SabreDAV's repo, and the corresponding fix is also in ownCloud since
|
|
||||||
7.0.0. See :gh:`16` for more information.
|
|
||||||
|
|
||||||
.. _ownCloud: https://owncloud.org/
|
|
||||||
|
|
@ -13,8 +13,8 @@ minutes).
|
||||||
unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_
|
unit files, you'll need to download vdirsyncer.service_ and vdirsyncer.timer_
|
||||||
into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``.
|
into either ``/etc/systemd/user/`` or ``~/.local/share/systemd/user``.
|
||||||
|
|
||||||
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.service
|
.. _vdirsyncer.service: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.service
|
||||||
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/main/contrib/vdirsyncer.timer
|
.. _vdirsyncer.timer: https://raw.githubusercontent.com/pimutils/vdirsyncer/master/contrib/vdirsyncer.timer
|
||||||
|
|
||||||
Activation
|
Activation
|
||||||
----------
|
----------
|
||||||
|
|
@ -29,7 +29,7 @@ It's quite possible that the default "every fifteen minutes" interval isn't to
|
||||||
your liking. No default will suit everybody, but this is configurable by simply
|
your liking. No default will suit everybody, but this is configurable by simply
|
||||||
running::
|
running::
|
||||||
|
|
||||||
systemctl --user edit vdirsyncer.timer
|
systemctl --user edit vdirsyncer
|
||||||
|
|
||||||
This will open a blank editor, where you can override the timer by including::
|
This will open a blank editor, where you can override the timer by including::
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -48,9 +48,10 @@ instance to subfolders of ``~/.calendar/``.
|
||||||
Setting up todoman
|
Setting up todoman
|
||||||
==================
|
==================
|
||||||
|
|
||||||
Write this to ``~/.config/todoman/config.py``::
|
Write this to ``~/.config/todoman/todoman.conf``::
|
||||||
|
|
||||||
path = "~/.calendars/*"
|
[main]
|
||||||
|
path = ~/.calendars/*
|
||||||
|
|
||||||
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
|
The glob_ pattern in ``path`` will match all subfolders in ``~/.calendars/``,
|
||||||
which is exactly the tasklists we want. Now you can use ``todoman`` as
|
which is exactly the tasklists we want. Now you can use ``todoman`` as
|
||||||
|
|
|
||||||
|
|
@ -56,11 +56,8 @@ have any file extensions.
|
||||||
known from CSS, for example) are allowed. The prefixing ``#`` must be
|
known from CSS, for example) are allowed. The prefixing ``#`` must be
|
||||||
present.
|
present.
|
||||||
|
|
||||||
- Files called ``displayname`` and ``description`` contain a UTF-8 encoded label/
|
- A file called ``displayname`` contains a UTF-8 encoded label that may be used
|
||||||
description, that may be used to represent the vdir in UIs.
|
to represent the vdir in UIs.
|
||||||
|
|
||||||
- A file called ``order`` inside the vdir includes the relative order
|
|
||||||
of the calendar, a property that is only relevant in UI design.
|
|
||||||
|
|
||||||
Writing to vdirs
|
Writing to vdirs
|
||||||
================
|
================
|
||||||
|
|
@ -99,7 +96,7 @@ collections for faster search and lookup.
|
||||||
|
|
||||||
The reason items' filenames don't contain any extra information is simple: The
|
The reason items' filenames don't contain any extra information is simple: The
|
||||||
solutions presented induced duplication of data, where one duplicate might
|
solutions presented induced duplication of data, where one duplicate might
|
||||||
become out of date because of bad implementations. As it stands right now, an
|
become out of date because of bad implementations. As it stands right now, a
|
||||||
index format could be formalized separately though.
|
index format could be formalized separately though.
|
||||||
|
|
||||||
vdirsyncer doesn't really have to bother about efficient item lookup, because
|
vdirsyncer doesn't really have to bother about efficient item lookup, because
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ program chosen:
|
||||||
* Like with ``todo.txt``, Dropbox and friends are obviously agnostic/unaware of
|
* Like with ``todo.txt``, Dropbox and friends are obviously agnostic/unaware of
|
||||||
the files' contents. If a file has changed on both sides, Dropbox just copies
|
the files' contents. If a file has changed on both sides, Dropbox just copies
|
||||||
both versions to both sides.
|
both versions to both sides.
|
||||||
|
|
||||||
This is a good idea if the user is directly interfacing with the file system
|
This is a good idea if the user is directly interfacing with the file system
|
||||||
and is able to resolve conflicts themselves. Here it might lead to
|
and is able to resolve conflicts themselves. Here it might lead to
|
||||||
erroneous behavior with e.g. ``khal``, since there are now two events with
|
erroneous behavior with e.g. ``khal``, since there are now two events with
|
||||||
|
|
@ -50,6 +50,7 @@ program chosen:
|
||||||
|
|
||||||
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
|
* Such a setup doesn't work at all with smartphones. Vdirsyncer, on the other
|
||||||
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
|
hand, synchronizes with CardDAV/CalDAV servers, which can be accessed with
|
||||||
e.g. DAVx⁵_ or other apps bundled with smartphones.
|
e.g. DAVDroid_ or the apps by dmfs_.
|
||||||
|
|
||||||
.. _DAVx⁵: https://www.davx5.com/
|
.. _DAVDroid: http://davdroid.bitfire.at/
|
||||||
|
.. _dmfs: https://dmfs.org/
|
||||||
|
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
# Push new version to PyPI.
|
|
||||||
#
|
|
||||||
# Usage: hut builds submit publish-release.yaml --follow
|
|
||||||
|
|
||||||
image: alpine/edge
|
|
||||||
packages:
|
|
||||||
- py3-build
|
|
||||||
- py3-pip
|
|
||||||
- py3-setuptools
|
|
||||||
- py3-setuptools_scm
|
|
||||||
- py3-wheel
|
|
||||||
- twine
|
|
||||||
sources:
|
|
||||||
- https://github.com/pimutils/vdirsyncer
|
|
||||||
secrets:
|
|
||||||
- a36c8ba3-fba0-4338-b402-6aea0fbe771e # PyPI token.
|
|
||||||
environment:
|
|
||||||
CI: true
|
|
||||||
tasks:
|
|
||||||
- check-tag: |
|
|
||||||
cd vdirsyncer
|
|
||||||
git fetch --tags
|
|
||||||
|
|
||||||
# Stop here unless this is a tag.
|
|
||||||
git describe --exact-match --tags || complete-build
|
|
||||||
- publish: |
|
|
||||||
cd vdirsyncer
|
|
||||||
python -m build --no-isolation
|
|
||||||
twine upload --non-interactive dist/*
|
|
||||||
114
pyproject.toml
114
pyproject.toml
|
|
@ -1,114 +0,0 @@
|
||||||
# Vdirsyncer synchronizes calendars and contacts.
|
|
||||||
#
|
|
||||||
# Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
|
|
||||||
# how to package vdirsyncer.
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["setuptools>=64", "setuptools_scm>=8"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "vdirsyncer"
|
|
||||||
authors = [
|
|
||||||
{name = "Markus Unterwaditzer", email = "markus@unterwaditzer.net"},
|
|
||||||
]
|
|
||||||
description = "Synchronize calendars and contacts"
|
|
||||||
readme = "README.rst"
|
|
||||||
requires-python = ">=3.9"
|
|
||||||
keywords = ["todo", "task", "icalendar", "cli"]
|
|
||||||
license = "BSD-3-Clause"
|
|
||||||
license-files = ["LICENSE"]
|
|
||||||
classifiers = [
|
|
||||||
"Development Status :: 4 - Beta",
|
|
||||||
"Environment :: Console",
|
|
||||||
"Operating System :: POSIX",
|
|
||||||
"Programming Language :: Python :: 3",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
|
||||||
"Programming Language :: Python :: 3.11",
|
|
||||||
"Programming Language :: Python :: 3.12",
|
|
||||||
"Programming Language :: Python :: 3.13",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Topic :: Internet",
|
|
||||||
"Topic :: Office/Business :: Scheduling",
|
|
||||||
"Topic :: Utilities",
|
|
||||||
]
|
|
||||||
dependencies = [
|
|
||||||
"click>=5.0,<9.0",
|
|
||||||
"click-log>=0.3.0,<0.5.0",
|
|
||||||
"requests>=2.20.0",
|
|
||||||
"aiohttp>=3.8.2,<4.0.0",
|
|
||||||
"aiostream>=0.4.3,<0.8.0",
|
|
||||||
"tenacity>=9.0.0",
|
|
||||||
]
|
|
||||||
dynamic = ["version"]
|
|
||||||
|
|
||||||
[project.optional-dependencies]
|
|
||||||
google = ["aiohttp-oauthlib"]
|
|
||||||
test = [
|
|
||||||
"hypothesis>=6.72.0,<7.0.0",
|
|
||||||
"pytest",
|
|
||||||
"pytest-cov",
|
|
||||||
"pytest-httpserver",
|
|
||||||
"trustme",
|
|
||||||
"pytest-asyncio",
|
|
||||||
"aioresponses",
|
|
||||||
]
|
|
||||||
docs = [
|
|
||||||
"sphinx!=1.4.7",
|
|
||||||
"sphinx_rtd_theme",
|
|
||||||
"setuptools_scm",
|
|
||||||
]
|
|
||||||
check = [
|
|
||||||
"mypy",
|
|
||||||
"ruff",
|
|
||||||
"types-docutils",
|
|
||||||
"types-requests",
|
|
||||||
"types-setuptools",
|
|
||||||
]
|
|
||||||
|
|
||||||
[project.scripts]
|
|
||||||
vdirsyncer = "vdirsyncer.cli:app"
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
extend-select = [
|
|
||||||
"B0",
|
|
||||||
"C4",
|
|
||||||
"E",
|
|
||||||
"I",
|
|
||||||
"RSE",
|
|
||||||
"SIM",
|
|
||||||
"TID",
|
|
||||||
"UP",
|
|
||||||
"W",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.ruff.lint.isort]
|
|
||||||
force-single-line = true
|
|
||||||
required-imports = ["from __future__ import annotations"]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
addopts = """
|
|
||||||
--tb=short
|
|
||||||
--cov-config .coveragerc
|
|
||||||
--cov=vdirsyncer
|
|
||||||
--cov-report=term-missing:skip-covered
|
|
||||||
--no-cov-on-fail
|
|
||||||
--color=yes
|
|
||||||
"""
|
|
||||||
# filterwarnings=error
|
|
||||||
asyncio_default_fixture_loop_scope = "function"
|
|
||||||
|
|
||||||
[tool.mypy]
|
|
||||||
ignore_missing_imports = true
|
|
||||||
|
|
||||||
[tool.coverage.report]
|
|
||||||
exclude_lines = [
|
|
||||||
"if TYPE_CHECKING:",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
include = ["vdirsyncer*"]
|
|
||||||
|
|
||||||
[tool.setuptools_scm]
|
|
||||||
write_to = "vdirsyncer/version.py"
|
|
||||||
version_scheme = "no-guess-dev"
|
|
||||||
1
rust/.gitignore
vendored
Normal file
1
rust/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
target/
|
||||||
1493
rust/Cargo.lock
generated
Normal file
1493
rust/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
23
rust/Cargo.toml
Normal file
23
rust/Cargo.toml
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
[package]
|
||||||
|
name = "vdirsyncer-rustext"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Markus Unterwaditzer <markus@unterwaditzer.net>"]
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
name = "vdirsyncer_rustext"
|
||||||
|
crate-type = ["cdylib"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
vobject = "0.4.2"
|
||||||
|
sha2 = "0.7.0"
|
||||||
|
failure = "0.1"
|
||||||
|
shippai = "0.2.3"
|
||||||
|
atomicwrites = "0.2.0"
|
||||||
|
uuid = { version = "0.6", features = ["v4"] }
|
||||||
|
libc = "0.2"
|
||||||
|
log = "0.4"
|
||||||
|
reqwest = "0.8"
|
||||||
|
quick-xml = "0.12.0"
|
||||||
|
url = "1.7"
|
||||||
|
chrono = "0.4.0"
|
||||||
|
env_logger = "0.5"
|
||||||
4
rust/cbindgen.toml
Normal file
4
rust/cbindgen.toml
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
language = "C"
|
||||||
|
|
||||||
|
[parse]
|
||||||
|
expand = ["vdirsyncer-rustext"]
|
||||||
59
rust/src/errors.rs
Normal file
59
rust/src/errors.rs
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
use failure;
|
||||||
|
|
||||||
|
pub type Fallible<T> = Result<T, failure::Error>;
|
||||||
|
|
||||||
|
shippai_export!();
|
||||||
|
|
||||||
|
#[derive(Debug, Fail, Shippai)]
|
||||||
|
pub enum Error {
|
||||||
|
#[fail(display = "The item cannot be parsed")]
|
||||||
|
ItemUnparseable,
|
||||||
|
|
||||||
|
#[fail(display = "Unexpected version {}, expected {}", found, expected)]
|
||||||
|
UnexpectedVobjectVersion { found: String, expected: String },
|
||||||
|
|
||||||
|
#[fail(display = "Unexpected component {}, expected {}", found, expected)]
|
||||||
|
UnexpectedVobject { found: String, expected: String },
|
||||||
|
|
||||||
|
#[fail(display = "Item '{}' not found", href)]
|
||||||
|
ItemNotFound { href: String },
|
||||||
|
|
||||||
|
#[fail(display = "The href '{}' is already taken", href)]
|
||||||
|
ItemAlreadyExisting { href: String },
|
||||||
|
|
||||||
|
#[fail(
|
||||||
|
display = "A wrong etag for '{}' was provided. Another client's requests might \
|
||||||
|
conflict with vdirsyncer.",
|
||||||
|
href
|
||||||
|
)]
|
||||||
|
WrongEtag { href: String },
|
||||||
|
|
||||||
|
#[fail(
|
||||||
|
display = "The mtime for '{}' has unexpectedly changed. Please close other programs\
|
||||||
|
accessing this file.",
|
||||||
|
filepath
|
||||||
|
)]
|
||||||
|
MtimeMismatch { filepath: String },
|
||||||
|
|
||||||
|
#[fail(
|
||||||
|
display = "The item '{}' has been rejected by the server because the vobject type was unexpected",
|
||||||
|
href
|
||||||
|
)]
|
||||||
|
UnsupportedVobject { href: String },
|
||||||
|
|
||||||
|
#[fail(display = "This storage is read-only.")]
|
||||||
|
ReadOnly,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub unsafe fn export_result<V>(
|
||||||
|
res: Result<V, failure::Error>,
|
||||||
|
c_err: *mut *mut ShippaiError,
|
||||||
|
) -> Option<V> {
|
||||||
|
match res {
|
||||||
|
Ok(v) => Some(v),
|
||||||
|
Err(e) => {
|
||||||
|
*c_err = Box::into_raw(Box::new(e.into()));
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
256
rust/src/item.rs
Normal file
256
rust/src/item.rs
Normal file
|
|
@ -0,0 +1,256 @@
|
||||||
|
use vobject;
|
||||||
|
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use std::fmt::Write;
|
||||||
|
|
||||||
|
use errors::*;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub enum Item {
|
||||||
|
Parsed(vobject::Component),
|
||||||
|
Unparseable(String), // FIXME: maybe use https://crates.io/crates/terminated
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Item {
|
||||||
|
pub fn from_raw(raw: String) -> Self {
|
||||||
|
match vobject::parse_component(&raw) {
|
||||||
|
Ok(x) => Item::Parsed(x),
|
||||||
|
// Don't chain vobject error here because it cannot be stored/cloned FIXME
|
||||||
|
_ => Item::Unparseable(raw),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_component(component: vobject::Component) -> Self {
|
||||||
|
Item::Parsed(component)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Global identifier of the item, across storages, doesn't change after a modification of the
|
||||||
|
/// item.
|
||||||
|
pub fn get_uid(&self) -> Option<String> {
|
||||||
|
// FIXME: Cache
|
||||||
|
if let Item::Parsed(ref c) = *self {
|
||||||
|
let mut stack: Vec<&vobject::Component> = vec![c];
|
||||||
|
|
||||||
|
while let Some(vobj) = stack.pop() {
|
||||||
|
if let Some(prop) = vobj.get_only("UID") {
|
||||||
|
return Some(prop.value_as_string());
|
||||||
|
}
|
||||||
|
stack.extend(vobj.subcomponents.iter());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_uid(&self, uid: &str) -> Fallible<Self> {
|
||||||
|
if let Item::Parsed(ref component) = *self {
|
||||||
|
let mut new_component = component.clone();
|
||||||
|
change_uid(&mut new_component, uid);
|
||||||
|
Ok(Item::from_raw(vobject::write_component(&new_component)))
|
||||||
|
} else {
|
||||||
|
Err(Error::ItemUnparseable.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Raw unvalidated content of the item
|
||||||
|
pub fn get_raw(&self) -> String {
|
||||||
|
match *self {
|
||||||
|
Item::Parsed(ref component) => vobject::write_component(component),
|
||||||
|
Item::Unparseable(ref x) => x.to_owned(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Component of item if parseable
|
||||||
|
pub fn get_component(&self) -> Fallible<&vobject::Component> {
|
||||||
|
match *self {
|
||||||
|
Item::Parsed(ref component) => Ok(component),
|
||||||
|
_ => Err(Error::ItemUnparseable.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Component of item if parseable
|
||||||
|
pub fn into_component(self) -> Fallible<vobject::Component> {
|
||||||
|
match self {
|
||||||
|
Item::Parsed(component) => Ok(component),
|
||||||
|
_ => Err(Error::ItemUnparseable.into()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Used for etags
|
||||||
|
pub fn get_hash(&self) -> Fallible<String> {
|
||||||
|
// FIXME: cache
|
||||||
|
if let Item::Parsed(ref component) = *self {
|
||||||
|
Ok(hash_component(component))
|
||||||
|
} else {
|
||||||
|
Err(Error::ItemUnparseable.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Used for generating hrefs and matching up items during synchronization. This is either the
|
||||||
|
/// UID or the hash of the item's content.
|
||||||
|
pub fn get_ident(&self) -> Fallible<String> {
|
||||||
|
if let Some(x) = self.get_uid() {
|
||||||
|
return Ok(x);
|
||||||
|
}
|
||||||
|
// We hash the item instead of directly using its raw content, because
|
||||||
|
// 1. The raw content might be really large, e.g. when it's a contact
|
||||||
|
// with a picture, which bloats the status file.
|
||||||
|
//
|
||||||
|
// 2. The status file would contain really sensitive information.
|
||||||
|
self.get_hash()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_parseable(&self) -> bool {
|
||||||
|
if let Item::Parsed(_) = *self {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn change_uid(c: &mut vobject::Component, uid: &str) {
|
||||||
|
let mut stack = vec![c];
|
||||||
|
while let Some(component) = stack.pop() {
|
||||||
|
match component.name.as_ref() {
|
||||||
|
"VEVENT" | "VTODO" | "VJOURNAL" | "VCARD" => {
|
||||||
|
if !uid.is_empty() {
|
||||||
|
component.set(vobject::Property::new("UID", uid));
|
||||||
|
} else {
|
||||||
|
component.remove("UID");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
stack.extend(component.subcomponents.iter_mut());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hash_component(c: &vobject::Component) -> String {
|
||||||
|
let mut new_c = c.clone();
|
||||||
|
{
|
||||||
|
let mut stack = vec![&mut new_c];
|
||||||
|
while let Some(component) = stack.pop() {
|
||||||
|
// PRODID is changed by radicale for some reason after upload
|
||||||
|
component.remove("PRODID");
|
||||||
|
// Sometimes METHOD:PUBLISH is added by WebCAL providers, for us it doesn't make a difference
|
||||||
|
component.remove("METHOD");
|
||||||
|
// X-RADICALE-NAME is used by radicale, because hrefs don't really exist in their filesystem backend
|
||||||
|
component.remove("X-RADICALE-NAME");
|
||||||
|
// Those are from the VCARD specification and is supposed to change when the
|
||||||
|
// item does -- however, we can determine that ourselves
|
||||||
|
component.remove("REV");
|
||||||
|
component.remove("LAST-MODIFIED");
|
||||||
|
component.remove("CREATED");
|
||||||
|
// Some iCalendar HTTP calendars generate the DTSTAMP at request time, so
|
||||||
|
// this property always changes when the rest of the item didn't. Some do
|
||||||
|
// the same with the UID.
|
||||||
|
//
|
||||||
|
// - Google's read-only calendar links
|
||||||
|
// - http://www.feiertage-oesterreich.at/
|
||||||
|
component.remove("DTSTAMP");
|
||||||
|
component.remove("UID");
|
||||||
|
|
||||||
|
if component.name == "VCALENDAR" {
|
||||||
|
// CALSCALE's default value is gregorian
|
||||||
|
let calscale = component.get_only("CALSCALE").map(|x| x.value_as_string());
|
||||||
|
|
||||||
|
if let Some(x) = calscale {
|
||||||
|
if x == "GREGORIAN" {
|
||||||
|
component.remove("CALSCALE");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apparently this is set by Horde?
|
||||||
|
// https://github.com/pimutils/vdirsyncer/issues/318
|
||||||
|
// Also Google sets those properties
|
||||||
|
component.remove("X-WR-CALNAME");
|
||||||
|
component.remove("X-WR-TIMEZONE");
|
||||||
|
|
||||||
|
component.subcomponents.retain(|c| c.name != "VTIMEZONE");
|
||||||
|
}
|
||||||
|
|
||||||
|
stack.extend(component.subcomponents.iter_mut());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME: Possible optimization: Stream component to hasher instead of allocating new string
|
||||||
|
let raw = vobject::write_component(&new_c);
|
||||||
|
let mut lines: Vec<_> = raw.lines().collect();
|
||||||
|
lines.sort();
|
||||||
|
let mut hasher = Sha256::default();
|
||||||
|
hasher.input(lines.join("\r\n").as_bytes());
|
||||||
|
let digest = hasher.result();
|
||||||
|
let mut rv = String::new();
|
||||||
|
for &byte in digest.as_ref() {
|
||||||
|
write!(&mut rv, "{:x}", byte).unwrap();
|
||||||
|
}
|
||||||
|
rv
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod exports {
|
||||||
|
use super::Item;
|
||||||
|
use errors::*;
|
||||||
|
use std::ffi::{CStr, CString};
|
||||||
|
use std::os::raw::c_char;
|
||||||
|
use std::ptr;
|
||||||
|
|
||||||
|
const EMPTY_STRING: *const c_char = b"\0" as *const u8 as *const c_char;
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_get_uid(c: *mut Item) -> *const c_char {
|
||||||
|
match (*c).get_uid() {
|
||||||
|
Some(x) => CString::new(x).unwrap().into_raw(),
|
||||||
|
None => EMPTY_STRING,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_get_raw(c: *mut Item) -> *const c_char {
|
||||||
|
CString::new((*c).get_raw()).unwrap().into_raw()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_item_from_raw(s: *const c_char) -> *mut Item {
|
||||||
|
let cstring = CStr::from_ptr(s);
|
||||||
|
Box::into_raw(Box::new(Item::from_raw(
|
||||||
|
cstring.to_str().unwrap().to_owned(),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_free_item(c: *mut Item) {
|
||||||
|
let _: Box<Item> = Box::from_raw(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_with_uid(
|
||||||
|
c: *mut Item,
|
||||||
|
uid: *const c_char,
|
||||||
|
err: *mut *mut ShippaiError,
|
||||||
|
) -> *mut Item {
|
||||||
|
let uid_cstring = CStr::from_ptr(uid);
|
||||||
|
if let Some(x) = export_result((*c).with_uid(uid_cstring.to_str().unwrap()), err) {
|
||||||
|
Box::into_raw(Box::new(x))
|
||||||
|
} else {
|
||||||
|
ptr::null_mut()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_get_hash(
|
||||||
|
c: *mut Item,
|
||||||
|
err: *mut *mut ShippaiError,
|
||||||
|
) -> *const c_char {
|
||||||
|
if let Some(x) = export_result((*c).get_hash(), err) {
|
||||||
|
CString::new(x).unwrap().into_raw()
|
||||||
|
} else {
|
||||||
|
ptr::null_mut()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_item_is_parseable(c: *mut Item) -> bool {
|
||||||
|
(*c).is_parseable()
|
||||||
|
}
|
||||||
|
}
|
||||||
40
rust/src/lib.rs
Normal file
40
rust/src/lib.rs
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
#![cfg_attr(feature = "cargo-clippy", allow(single_match))]
|
||||||
|
|
||||||
|
extern crate atomicwrites;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate failure;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate shippai;
|
||||||
|
extern crate libc;
|
||||||
|
extern crate uuid;
|
||||||
|
extern crate vobject;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
extern crate chrono;
|
||||||
|
extern crate env_logger;
|
||||||
|
extern crate quick_xml;
|
||||||
|
extern crate reqwest;
|
||||||
|
extern crate sha2;
|
||||||
|
extern crate url;
|
||||||
|
|
||||||
|
pub mod errors;
|
||||||
|
mod item;
|
||||||
|
mod storage;
|
||||||
|
|
||||||
|
pub mod exports {
|
||||||
|
use std::ffi::CStr;
|
||||||
|
use std::os::raw::c_char;
|
||||||
|
|
||||||
|
pub use super::item::exports::*;
|
||||||
|
pub use super::storage::exports::*;
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_free_str(s: *const c_char) {
|
||||||
|
CStr::from_ptr(s);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_init_logger() {
|
||||||
|
::env_logger::init();
|
||||||
|
}
|
||||||
|
}
|
||||||
465
rust/src/storage/dav/mod.rs
Normal file
465
rust/src/storage/dav/mod.rs
Normal file
|
|
@ -0,0 +1,465 @@
|
||||||
|
mod parser;
|
||||||
|
|
||||||
|
use chrono;
|
||||||
|
|
||||||
|
use std::collections::BTreeSet;
|
||||||
|
use std::io::{BufReader, Read};
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use quick_xml;
|
||||||
|
use reqwest;
|
||||||
|
use reqwest::header::{ContentType, ETag, EntityTag, IfMatch, IfNoneMatch};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
use super::http::{handle_http_error, send_request, HttpConfig};
|
||||||
|
use super::utils::generate_href;
|
||||||
|
use super::Storage;
|
||||||
|
use errors::*;
|
||||||
|
|
||||||
|
use item::Item;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn propfind() -> reqwest::Method {
|
||||||
|
reqwest::Method::Extension("PROPFIND".to_owned())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn report() -> reqwest::Method {
|
||||||
|
reqwest::Method::Extension("REPORT".to_owned())
|
||||||
|
}
|
||||||
|
|
||||||
|
static CALDAV_DT_FORMAT: &'static str = "%Y%m%dT%H%M%SZ";
|
||||||
|
|
||||||
|
struct DavStorage {
|
||||||
|
pub url: String,
|
||||||
|
pub http_config: HttpConfig,
|
||||||
|
pub http: Option<reqwest::Client>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DavStorage {
|
||||||
|
pub fn new(url: &str, http_config: HttpConfig) -> Self {
|
||||||
|
DavStorage {
|
||||||
|
url: format!("{}/", url.trim_right_matches('/')),
|
||||||
|
http_config,
|
||||||
|
http: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DavStorage {
|
||||||
|
#[inline]
|
||||||
|
pub fn get_http(&mut self) -> Fallible<reqwest::Client> {
|
||||||
|
if let Some(ref http) = self.http {
|
||||||
|
return Ok(http.clone());
|
||||||
|
}
|
||||||
|
let client = self.http_config.clone().into_connection()?.build()?;
|
||||||
|
self.http = Some(client.clone());
|
||||||
|
Ok(client)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn send_request(&mut self, request: reqwest::Request) -> Fallible<reqwest::Response> {
|
||||||
|
let url = request.url().to_string();
|
||||||
|
handle_http_error(&url, send_request(&self.get_http()?, request)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&mut self, href: &str) -> Fallible<(Item, String)> {
|
||||||
|
let base = Url::parse(&self.url)?;
|
||||||
|
let url = base.join(href)?;
|
||||||
|
if href != url.path() {
|
||||||
|
Err(Error::ItemNotFound {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let request = self.get_http()?.get(url).build()?;
|
||||||
|
let mut response = self.send_request(request)?;
|
||||||
|
let mut s = String::new();
|
||||||
|
response.read_to_string(&mut s)?;
|
||||||
|
let etag = match response.headers().get::<ETag>() {
|
||||||
|
Some(x) => format!("\"{}\"", x.tag()),
|
||||||
|
None => Err(DavError::EtagNotFound)?,
|
||||||
|
};
|
||||||
|
Ok((Item::from_raw(s), etag))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn list<'a>(
|
||||||
|
&'a mut self,
|
||||||
|
mimetype_contains: &'a str,
|
||||||
|
) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>> {
|
||||||
|
let mut headers = reqwest::header::Headers::new();
|
||||||
|
headers.set(ContentType::xml());
|
||||||
|
headers.set_raw("Depth", "1");
|
||||||
|
|
||||||
|
let request = self
|
||||||
|
.get_http()?
|
||||||
|
.request(propfind(), &self.url)
|
||||||
|
.headers(headers)
|
||||||
|
.body(
|
||||||
|
r#"<?xml version="1.0" encoding="utf-8" ?>
|
||||||
|
<D:propfind xmlns:D="DAV:">
|
||||||
|
<D:prop>
|
||||||
|
<D:resourcetype/>
|
||||||
|
<D:getcontenttype/>
|
||||||
|
<D:getetag/>
|
||||||
|
</D:prop>
|
||||||
|
</D:propfind>"#,
|
||||||
|
)
|
||||||
|
.build()?;
|
||||||
|
let response = self.send_request(request)?;
|
||||||
|
self.parse_prop_response(response, mimetype_contains)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_prop_response<'a>(
|
||||||
|
&'a mut self,
|
||||||
|
response: reqwest::Response,
|
||||||
|
mimetype_contains: &'a str,
|
||||||
|
) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>> {
|
||||||
|
let buf_reader = BufReader::new(response);
|
||||||
|
let xml_reader = quick_xml::Reader::from_reader(buf_reader);
|
||||||
|
|
||||||
|
let mut parser = parser::ListingParser::new(xml_reader);
|
||||||
|
let base = Url::parse(&self.url)?;
|
||||||
|
let mut seen_hrefs = BTreeSet::new();
|
||||||
|
|
||||||
|
Ok(Box::new(
|
||||||
|
parser
|
||||||
|
.get_all_responses()?
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(move |response| {
|
||||||
|
if response.has_collection_tag {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if !response.mimetype?.contains(mimetype_contains) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let href = base.join(&response.href?).ok()?.path().to_owned();
|
||||||
|
|
||||||
|
if seen_hrefs.contains(&href) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
seen_hrefs.insert(href.clone());
|
||||||
|
Some((href, response.etag?))
|
||||||
|
}),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn put(
|
||||||
|
&mut self,
|
||||||
|
href: &str,
|
||||||
|
item: &Item,
|
||||||
|
mimetype: &str,
|
||||||
|
etag: Option<&str>,
|
||||||
|
) -> Fallible<(String, String)> {
|
||||||
|
let base = Url::parse(&self.url)?;
|
||||||
|
let url = base.join(href)?;
|
||||||
|
let mut request = self.get_http()?.request(reqwest::Method::Put, url);
|
||||||
|
request.header(ContentType(reqwest::mime::Mime::from_str(mimetype)?));
|
||||||
|
if let Some(etag) = etag {
|
||||||
|
request.header(IfMatch::Items(vec![EntityTag::new(
|
||||||
|
false,
|
||||||
|
etag.trim_matches('"').to_owned(),
|
||||||
|
)]));
|
||||||
|
} else {
|
||||||
|
request.header(IfNoneMatch::Any);
|
||||||
|
}
|
||||||
|
|
||||||
|
let raw = item.get_raw();
|
||||||
|
let response = send_request(&self.get_http()?, request.body(raw).build()?)?;
|
||||||
|
|
||||||
|
match (etag, response.status()) {
|
||||||
|
(Some(_), reqwest::StatusCode::PreconditionFailed) => Err(Error::WrongEtag {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?,
|
||||||
|
(None, reqwest::StatusCode::PreconditionFailed) => Err(Error::ItemAlreadyExisting {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?,
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
let response = assert_multistatus_success(handle_http_error(href, response)?)?;
|
||||||
|
|
||||||
|
// The server may not return an etag under certain conditions:
|
||||||
|
//
|
||||||
|
// An origin server MUST NOT send a validator header field (Section
|
||||||
|
// 7.2), such as an ETag or Last-Modified field, in a successful
|
||||||
|
// response to PUT unless the request's representation data was saved
|
||||||
|
// without any transformation applied to the body (i.e., the
|
||||||
|
// resource's new representation data is identical to the
|
||||||
|
// representation data received in the PUT request) and the validator
|
||||||
|
// field value reflects the new representation.
|
||||||
|
//
|
||||||
|
// -- https://tools.ietf.org/html/rfc7231#section-4.3.4
|
||||||
|
//
|
||||||
|
// In such cases we return a constant etag. The next synchronization
|
||||||
|
// will then detect an etag change and will download the new item.
|
||||||
|
let etag = match response.headers().get::<ETag>() {
|
||||||
|
Some(x) => format!("\"{}\"", x.tag()),
|
||||||
|
None => "".to_owned(),
|
||||||
|
};
|
||||||
|
Ok((response.url().path().to_owned(), etag))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(&mut self, href: &str, etag: &str) -> Fallible<()> {
|
||||||
|
let base = Url::parse(&self.url)?;
|
||||||
|
let url = base.join(href)?;
|
||||||
|
let request = self
|
||||||
|
.get_http()?
|
||||||
|
.request(reqwest::Method::Delete, url)
|
||||||
|
.header(IfMatch::Items(vec![EntityTag::new(
|
||||||
|
false,
|
||||||
|
etag.trim_matches('"').to_owned(),
|
||||||
|
)]))
|
||||||
|
.build()?;
|
||||||
|
let response = send_request(&self.get_http()?, request)?;
|
||||||
|
|
||||||
|
if response.status() == reqwest::StatusCode::PreconditionFailed {
|
||||||
|
Err(Error::WrongEtag {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_multistatus_success(handle_http_error(href, response)?)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn assert_multistatus_success(r: reqwest::Response) -> Fallible<reqwest::Response> {
|
||||||
|
// TODO
|
||||||
|
Ok(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CarddavStorage {
|
||||||
|
inner: DavStorage,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CarddavStorage {
|
||||||
|
pub fn new(url: &str, http_config: HttpConfig) -> Self {
|
||||||
|
CarddavStorage {
|
||||||
|
inner: DavStorage::new(url, http_config),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Storage for CarddavStorage {
|
||||||
|
fn list<'a>(&'a mut self) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>> {
|
||||||
|
self.inner.list("vcard")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get(&mut self, href: &str) -> Fallible<(Item, String)> {
|
||||||
|
self.inner.get(href)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn upload(&mut self, item: Item) -> Fallible<(String, String)> {
|
||||||
|
let href = format!("{}.vcf", generate_href(&item.get_ident()?));
|
||||||
|
self.inner.put(&href, &item, "text/vcard", None)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(&mut self, href: &str, item: Item, etag: &str) -> Fallible<String> {
|
||||||
|
self.inner
|
||||||
|
.put(&href, &item, "text/vcard", Some(etag))
|
||||||
|
.map(|x| x.1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(&mut self, href: &str, etag: &str) -> Fallible<()> {
|
||||||
|
self.inner.delete(href, etag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CaldavStorage {
|
||||||
|
inner: DavStorage,
|
||||||
|
start_date: Option<chrono::DateTime<chrono::Utc>>, // FIXME: store as Option<(start, end)>
|
||||||
|
end_date: Option<chrono::DateTime<chrono::Utc>>,
|
||||||
|
item_types: Vec<&'static str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CaldavStorage {
|
||||||
|
pub fn new(
|
||||||
|
url: &str,
|
||||||
|
http_config: HttpConfig,
|
||||||
|
start_date: Option<chrono::DateTime<chrono::Utc>>,
|
||||||
|
end_date: Option<chrono::DateTime<chrono::Utc>>,
|
||||||
|
item_types: Vec<&'static str>,
|
||||||
|
) -> Self {
|
||||||
|
CaldavStorage {
|
||||||
|
inner: DavStorage::new(url, http_config),
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
item_types,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn get_caldav_filters(&self) -> Vec<String> {
|
||||||
|
let mut item_types = self.item_types.clone();
|
||||||
|
let mut timefilter = "".to_owned();
|
||||||
|
|
||||||
|
if let (Some(start), Some(end)) = (self.start_date, self.end_date) {
|
||||||
|
timefilter = format!(
|
||||||
|
"<C:time-range start=\"{}\" end=\"{}\" />",
|
||||||
|
start.format(CALDAV_DT_FORMAT),
|
||||||
|
end.format(CALDAV_DT_FORMAT)
|
||||||
|
);
|
||||||
|
|
||||||
|
if item_types.is_empty() {
|
||||||
|
item_types.push("VTODO");
|
||||||
|
item_types.push("VEVENT");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
item_types
|
||||||
|
.into_iter()
|
||||||
|
.map(|item_type| {
|
||||||
|
format!(
|
||||||
|
"<C:comp-filter name=\"VCALENDAR\">\
|
||||||
|
<C:comp-filter name=\"{}\">{}</C:comp-filter>\
|
||||||
|
</C:comp-filter>",
|
||||||
|
item_type, timefilter
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Storage for CaldavStorage {
|
||||||
|
fn list<'a>(&'a mut self) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>> {
|
||||||
|
let filters = self.get_caldav_filters();
|
||||||
|
if filters.is_empty() {
|
||||||
|
// If we don't have any filters (which is the default), taking the
|
||||||
|
// risk of sending a calendar-query is not necessary. There doesn't
|
||||||
|
// seem to be a widely-usable way to send calendar-queries with the
|
||||||
|
// same semantics as a PROPFIND request... so why not use PROPFIND
|
||||||
|
// instead?
|
||||||
|
//
|
||||||
|
// See https://github.com/dmfs/tasks/issues/118 for backstory.
|
||||||
|
self.inner.list("text/calendar")
|
||||||
|
} else {
|
||||||
|
let mut rv = vec![];
|
||||||
|
let mut headers = reqwest::header::Headers::new();
|
||||||
|
headers.set(ContentType::xml());
|
||||||
|
headers.set_raw("Depth", "1");
|
||||||
|
|
||||||
|
for filter in filters {
|
||||||
|
let data =
|
||||||
|
format!(
|
||||||
|
"<?xml version=\"1.0\" encoding=\"utf-8\" ?>\
|
||||||
|
<C:calendar-query xmlns:D=\"DAV:\" xmlns:C=\"urn:ietf:params:xml:ns:caldav\">\
|
||||||
|
<D:prop><D:getcontenttype/><D:getetag/></D:prop>\
|
||||||
|
<C:filter>{}</C:filter>\
|
||||||
|
</C:calendar-query>", filter);
|
||||||
|
|
||||||
|
let request = self
|
||||||
|
.inner
|
||||||
|
.get_http()?
|
||||||
|
.request(report(), &self.inner.url)
|
||||||
|
.headers(headers.clone())
|
||||||
|
.body(data)
|
||||||
|
.build()?;
|
||||||
|
let response = self.inner.send_request(request)?;
|
||||||
|
rv.extend(self.inner.parse_prop_response(response, "text/calendar")?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Box::new(rv.into_iter()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get(&mut self, href: &str) -> Fallible<(Item, String)> {
|
||||||
|
self.inner.get(href)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn upload(&mut self, item: Item) -> Fallible<(String, String)> {
|
||||||
|
let href = format!("{}.ics", generate_href(&item.get_ident()?));
|
||||||
|
self.inner.put(&href, &item, "text/calendar", None)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(&mut self, href: &str, item: Item, etag: &str) -> Fallible<String> {
|
||||||
|
self.inner
|
||||||
|
.put(href, &item, "text/calendar", Some(etag))
|
||||||
|
.map(|x| x.1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(&mut self, href: &str, etag: &str) -> Fallible<()> {
|
||||||
|
self.inner.delete(href, etag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod exports {
|
||||||
|
use super::super::http::init_http_config;
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[derive(Debug, Fail, Shippai)]
|
||||||
|
pub enum DavError {
|
||||||
|
#[fail(display = "Server did not return etag.")]
|
||||||
|
EtagNotFound,
|
||||||
|
}
|
||||||
|
|
||||||
|
use std::ffi::CStr;
|
||||||
|
use std::os::raw::c_char;
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_init_carddav(
|
||||||
|
url: *const c_char,
|
||||||
|
username: *const c_char,
|
||||||
|
password: *const c_char,
|
||||||
|
useragent: *const c_char,
|
||||||
|
verify_cert: *const c_char,
|
||||||
|
auth_cert: *const c_char,
|
||||||
|
) -> *mut Box<Storage> {
|
||||||
|
let url = CStr::from_ptr(url);
|
||||||
|
|
||||||
|
Box::into_raw(Box::new(Box::new(CarddavStorage::new(
|
||||||
|
url.to_str().unwrap(),
|
||||||
|
init_http_config(username, password, useragent, verify_cert, auth_cert),
|
||||||
|
))))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_init_caldav(
|
||||||
|
url: *const c_char,
|
||||||
|
username: *const c_char,
|
||||||
|
password: *const c_char,
|
||||||
|
useragent: *const c_char,
|
||||||
|
verify_cert: *const c_char,
|
||||||
|
auth_cert: *const c_char,
|
||||||
|
start_date: i64,
|
||||||
|
end_date: i64,
|
||||||
|
include_vevent: bool,
|
||||||
|
include_vjournal: bool,
|
||||||
|
include_vtodo: bool,
|
||||||
|
) -> *mut Box<Storage> {
|
||||||
|
let url = CStr::from_ptr(url);
|
||||||
|
|
||||||
|
let parse_date = |i| {
|
||||||
|
if i > 0 {
|
||||||
|
Some(chrono::DateTime::from_utc(
|
||||||
|
chrono::NaiveDateTime::from_timestamp(i, 0),
|
||||||
|
chrono::Utc,
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut item_types = vec![];
|
||||||
|
if include_vevent {
|
||||||
|
item_types.push("VEVENT");
|
||||||
|
}
|
||||||
|
if include_vjournal {
|
||||||
|
item_types.push("VJOURNAL");
|
||||||
|
}
|
||||||
|
if include_vtodo {
|
||||||
|
item_types.push("VTODO");
|
||||||
|
}
|
||||||
|
|
||||||
|
Box::into_raw(Box::new(Box::new(CaldavStorage::new(
|
||||||
|
url.to_str().unwrap(),
|
||||||
|
init_http_config(username, password, useragent, verify_cert, auth_cert),
|
||||||
|
parse_date(start_date),
|
||||||
|
parse_date(end_date),
|
||||||
|
item_types,
|
||||||
|
))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
use exports::DavError;
|
||||||
110
rust/src/storage/dav/parser.rs
Normal file
110
rust/src/storage/dav/parser.rs
Normal file
|
|
@ -0,0 +1,110 @@
|
||||||
|
use quick_xml;
|
||||||
|
use quick_xml::events::Event;
|
||||||
|
|
||||||
|
use errors::*;
|
||||||
|
|
||||||
|
use std::io::BufRead;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Response {
|
||||||
|
pub href: Option<String>,
|
||||||
|
pub etag: Option<String>,
|
||||||
|
pub mimetype: Option<String>,
|
||||||
|
pub has_collection_tag: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Response {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Response {
|
||||||
|
href: None,
|
||||||
|
etag: None,
|
||||||
|
has_collection_tag: false,
|
||||||
|
mimetype: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ListingParser<T: BufRead> {
|
||||||
|
reader: quick_xml::Reader<T>,
|
||||||
|
ns_buf: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: BufRead> ListingParser<T> {
|
||||||
|
pub fn new(mut reader: quick_xml::Reader<T>) -> Self {
|
||||||
|
reader.expand_empty_elements(true);
|
||||||
|
reader.trim_text(true);
|
||||||
|
reader.check_end_names(true);
|
||||||
|
reader.check_comments(false);
|
||||||
|
|
||||||
|
ListingParser {
|
||||||
|
reader,
|
||||||
|
ns_buf: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_response(&mut self) -> Fallible<Option<Response>> {
|
||||||
|
let mut buf = vec![];
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
enum State {
|
||||||
|
Outer,
|
||||||
|
Response,
|
||||||
|
Href,
|
||||||
|
ContentType,
|
||||||
|
Etag,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut state = State::Outer;
|
||||||
|
let mut current_response = Response::new();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match self
|
||||||
|
.reader
|
||||||
|
.read_namespaced_event(&mut buf, &mut self.ns_buf)?
|
||||||
|
{
|
||||||
|
(ns, Event::Start(ref e)) => {
|
||||||
|
match (state, ns, e.local_name()) {
|
||||||
|
(State::Outer, Some(b"DAV:"), b"response") => state = State::Response,
|
||||||
|
(State::Response, Some(b"DAV:"), b"href") => state = State::Href,
|
||||||
|
(State::Response, Some(b"DAV:"), b"getetag") => state = State::Etag,
|
||||||
|
(State::Response, Some(b"DAV:"), b"getcontenttype") => {
|
||||||
|
state = State::ContentType
|
||||||
|
}
|
||||||
|
(State::Response, Some(b"DAV:"), b"collection") => {
|
||||||
|
current_response.has_collection_tag = true;
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("State: {:?}", state);
|
||||||
|
}
|
||||||
|
(_, Event::Text(e)) => {
|
||||||
|
let txt = e.unescape_and_decode(&self.reader)?;
|
||||||
|
match state {
|
||||||
|
State::Href => current_response.href = Some(txt),
|
||||||
|
State::ContentType => current_response.mimetype = Some(txt),
|
||||||
|
State::Etag => current_response.etag = Some(txt),
|
||||||
|
_ => continue,
|
||||||
|
}
|
||||||
|
state = State::Response;
|
||||||
|
}
|
||||||
|
(ns, Event::End(e)) => match (state, ns, e.local_name()) {
|
||||||
|
(State::Response, Some(b"DAV:"), b"response") => {
|
||||||
|
return Ok(Some(current_response))
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
},
|
||||||
|
(_, Event::Eof) => return Ok(None),
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_all_responses(&mut self) -> Fallible<Vec<Response>> {
|
||||||
|
let mut rv = vec![];
|
||||||
|
while let Some(x) = self.next_response()? {
|
||||||
|
rv.push(x);
|
||||||
|
}
|
||||||
|
Ok(rv)
|
||||||
|
}
|
||||||
|
}
|
||||||
196
rust/src/storage/exports.rs
Normal file
196
rust/src/storage/exports.rs
Normal file
|
|
@ -0,0 +1,196 @@
|
||||||
|
pub use super::dav::exports::*;
|
||||||
|
pub use super::filesystem::exports::*;
|
||||||
|
pub use super::http::exports::*;
|
||||||
|
pub use super::singlefile::exports::*;
|
||||||
|
use super::Storage;
|
||||||
|
use errors::*;
|
||||||
|
use item::Item;
|
||||||
|
use std::ffi::{CStr, CString};
|
||||||
|
use std::os::raw::c_char;
|
||||||
|
use std::ptr;
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_free(storage: *mut Box<Storage>) {
|
||||||
|
let _: Box<Box<Storage>> = Box::from_raw(storage);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_list(
|
||||||
|
storage: *mut Box<Storage>,
|
||||||
|
err: *mut *mut ShippaiError,
|
||||||
|
) -> *mut VdirsyncerStorageListing {
|
||||||
|
if let Some(x) = export_result((**storage).list(), err) {
|
||||||
|
Box::into_raw(Box::new(VdirsyncerStorageListing {
|
||||||
|
iterator: x,
|
||||||
|
href: None,
|
||||||
|
etag: None,
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
ptr::null_mut()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_get(
|
||||||
|
storage: *mut Box<Storage>,
|
||||||
|
c_href: *const c_char,
|
||||||
|
err: *mut *mut ShippaiError,
|
||||||
|
) -> *mut VdirsyncerStorageGetResult {
|
||||||
|
let href = CStr::from_ptr(c_href);
|
||||||
|
if let Some((item, href)) = export_result((**storage).get(href.to_str().unwrap()), err) {
|
||||||
|
Box::into_raw(Box::new(VdirsyncerStorageGetResult {
|
||||||
|
item: Box::into_raw(Box::new(item)),
|
||||||
|
etag: CString::new(href).unwrap().into_raw(),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
ptr::null_mut()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_upload(
|
||||||
|
storage: *mut Box<Storage>,
|
||||||
|
item: *mut Item,
|
||||||
|
err: *mut *mut ShippaiError,
|
||||||
|
) -> *mut VdirsyncerStorageUploadResult {
|
||||||
|
if let Some((href, etag)) = export_result((**storage).upload((*item).clone()), err) {
|
||||||
|
Box::into_raw(Box::new(VdirsyncerStorageUploadResult {
|
||||||
|
href: CString::new(href).unwrap().into_raw(),
|
||||||
|
etag: CString::new(etag).unwrap().into_raw(),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
ptr::null_mut()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_update(
|
||||||
|
storage: *mut Box<Storage>,
|
||||||
|
c_href: *const c_char,
|
||||||
|
item: *mut Item,
|
||||||
|
c_etag: *const c_char,
|
||||||
|
err: *mut *mut ShippaiError,
|
||||||
|
) -> *const c_char {
|
||||||
|
let href = CStr::from_ptr(c_href);
|
||||||
|
let etag = CStr::from_ptr(c_etag);
|
||||||
|
let res = (**storage).update(
|
||||||
|
href.to_str().unwrap(),
|
||||||
|
(*item).clone(),
|
||||||
|
etag.to_str().unwrap(),
|
||||||
|
);
|
||||||
|
if let Some(etag) = export_result(res, err) {
|
||||||
|
CString::new(etag).unwrap().into_raw()
|
||||||
|
} else {
|
||||||
|
ptr::null_mut()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_delete(
|
||||||
|
storage: *mut Box<Storage>,
|
||||||
|
c_href: *const c_char,
|
||||||
|
c_etag: *const c_char,
|
||||||
|
err: *mut *mut ShippaiError,
|
||||||
|
) {
|
||||||
|
let href = CStr::from_ptr(c_href);
|
||||||
|
let etag = CStr::from_ptr(c_etag);
|
||||||
|
let res = (**storage).delete(href.to_str().unwrap(), etag.to_str().unwrap());
|
||||||
|
let _ = export_result(res, err);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_buffered(storage: *mut Box<Storage>) {
|
||||||
|
(**storage).buffered();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_flush(
|
||||||
|
storage: *mut Box<Storage>,
|
||||||
|
err: *mut *mut ShippaiError,
|
||||||
|
) {
|
||||||
|
let _ = export_result((**storage).flush(), err);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct VdirsyncerStorageListing {
|
||||||
|
iterator: Box<Iterator<Item = (String, String)>>,
|
||||||
|
href: Option<String>,
|
||||||
|
etag: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VdirsyncerStorageListing {
|
||||||
|
pub fn advance(&mut self) -> bool {
|
||||||
|
match self.iterator.next() {
|
||||||
|
Some((href, etag)) => {
|
||||||
|
self.href = Some(href);
|
||||||
|
self.etag = Some(etag);
|
||||||
|
true
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
self.href = None;
|
||||||
|
self.etag = None;
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_href(&mut self) -> Option<String> {
|
||||||
|
self.href.take()
|
||||||
|
}
|
||||||
|
pub fn get_etag(&mut self) -> Option<String> {
|
||||||
|
self.etag.take()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_free_storage_listing(listing: *mut VdirsyncerStorageListing) {
|
||||||
|
let _: Box<VdirsyncerStorageListing> = Box::from_raw(listing);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_advance_storage_listing(
|
||||||
|
listing: *mut VdirsyncerStorageListing,
|
||||||
|
) -> bool {
|
||||||
|
(*listing).advance()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_listing_get_href(
|
||||||
|
listing: *mut VdirsyncerStorageListing,
|
||||||
|
) -> *const c_char {
|
||||||
|
CString::new((*listing).get_href().unwrap())
|
||||||
|
.unwrap()
|
||||||
|
.into_raw()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_storage_listing_get_etag(
|
||||||
|
listing: *mut VdirsyncerStorageListing,
|
||||||
|
) -> *const c_char {
|
||||||
|
CString::new((*listing).get_etag().unwrap())
|
||||||
|
.unwrap()
|
||||||
|
.into_raw()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(C)]
|
||||||
|
pub struct VdirsyncerStorageGetResult {
|
||||||
|
pub item: *mut Item,
|
||||||
|
pub etag: *const c_char,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_free_storage_get_result(res: *mut VdirsyncerStorageGetResult) {
|
||||||
|
let _: Box<VdirsyncerStorageGetResult> = Box::from_raw(res);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(C)]
|
||||||
|
pub struct VdirsyncerStorageUploadResult {
|
||||||
|
pub href: *const c_char,
|
||||||
|
pub etag: *const c_char,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_free_storage_upload_result(
|
||||||
|
res: *mut VdirsyncerStorageUploadResult,
|
||||||
|
) {
|
||||||
|
let _: Box<VdirsyncerStorageUploadResult> = Box::from_raw(res);
|
||||||
|
}
|
||||||
220
rust/src/storage/filesystem.rs
Normal file
220
rust/src/storage/filesystem.rs
Normal file
|
|
@ -0,0 +1,220 @@
|
||||||
|
use super::Storage;
|
||||||
|
use errors::*;
|
||||||
|
use failure;
|
||||||
|
use libc;
|
||||||
|
use std::fs;
|
||||||
|
use std::io;
|
||||||
|
use std::io::{Read, Write};
|
||||||
|
use std::os::unix::fs::MetadataExt;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use super::utils;
|
||||||
|
|
||||||
|
use item::Item;
|
||||||
|
|
||||||
|
use atomicwrites::{AllowOverwrite, AtomicFile, DisallowOverwrite};
|
||||||
|
|
||||||
|
pub struct FilesystemStorage {
|
||||||
|
path: PathBuf,
|
||||||
|
fileext: String,
|
||||||
|
post_hook: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FilesystemStorage {
|
||||||
|
pub fn new<P: AsRef<Path>>(path: P, fileext: &str, post_hook: Option<String>) -> Self {
|
||||||
|
FilesystemStorage {
|
||||||
|
path: path.as_ref().to_owned(),
|
||||||
|
fileext: fileext.into(),
|
||||||
|
post_hook,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_href(&self, ident: Option<&str>) -> String {
|
||||||
|
let href_base = match ident {
|
||||||
|
Some(x) => utils::generate_href(x),
|
||||||
|
None => utils::random_href(),
|
||||||
|
};
|
||||||
|
format!("{}{}", href_base, self.fileext)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_filepath(&self, href: &str) -> PathBuf {
|
||||||
|
self.path.join(href)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_post_hook<S: AsRef<::std::ffi::OsStr>>(&self, fpath: S) {
|
||||||
|
if let Some(ref cmd) = self.post_hook {
|
||||||
|
let status = match Command::new(cmd).arg(fpath).status() {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => {
|
||||||
|
warn!("Failed to run external hook: {}", e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !status.success() {
|
||||||
|
if let Some(code) = status.code() {
|
||||||
|
warn!("External hook exited with error code {}.", code);
|
||||||
|
} else {
|
||||||
|
warn!("External hook was killed.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn handle_io_error(href: &str, e: io::Error) -> failure::Error {
|
||||||
|
match e.kind() {
|
||||||
|
io::ErrorKind::NotFound => Error::ItemNotFound {
|
||||||
|
href: href.to_owned(),
|
||||||
|
}.into(),
|
||||||
|
io::ErrorKind::AlreadyExists => Error::ItemAlreadyExisting {
|
||||||
|
href: href.to_owned(),
|
||||||
|
}.into(),
|
||||||
|
_ => e.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod exports {
|
||||||
|
use super::*;
|
||||||
|
use std::ffi::CStr;
|
||||||
|
use std::os::raw::c_char;
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_init_filesystem(
|
||||||
|
path: *const c_char,
|
||||||
|
fileext: *const c_char,
|
||||||
|
post_hook: *const c_char,
|
||||||
|
) -> *mut Box<Storage> {
|
||||||
|
let path_c = CStr::from_ptr(path);
|
||||||
|
let fileext_c = CStr::from_ptr(fileext);
|
||||||
|
let post_hook_c = CStr::from_ptr(post_hook);
|
||||||
|
let post_hook_str = post_hook_c.to_str().unwrap();
|
||||||
|
|
||||||
|
Box::into_raw(Box::new(Box::new(FilesystemStorage::new(
|
||||||
|
path_c.to_str().unwrap(),
|
||||||
|
fileext_c.to_str().unwrap(),
|
||||||
|
if post_hook_str.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(post_hook_str.to_owned())
|
||||||
|
},
|
||||||
|
))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn etag_from_file(metadata: &fs::Metadata) -> String {
|
||||||
|
format!(
|
||||||
|
"{}.{};{}",
|
||||||
|
metadata.mtime(),
|
||||||
|
metadata.mtime_nsec(),
|
||||||
|
metadata.ino()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Storage for FilesystemStorage {
|
||||||
|
fn list<'a>(&'a mut self) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>> {
|
||||||
|
let mut rv: Vec<(String, String)> = vec![];
|
||||||
|
|
||||||
|
for entry_res in fs::read_dir(&self.path)? {
|
||||||
|
let entry = entry_res?;
|
||||||
|
let metadata = entry.metadata()?;
|
||||||
|
|
||||||
|
if !metadata.is_file() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let fname: String = match entry.file_name().into_string() {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(_) => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !fname.ends_with(&self.fileext) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
rv.push((fname, etag_from_file(&metadata)));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Box::new(rv.into_iter()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get(&mut self, href: &str) -> Fallible<(Item, String)> {
|
||||||
|
let fpath = self.get_filepath(href);
|
||||||
|
let mut f = match fs::File::open(fpath) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => Err(handle_io_error(href, e))?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut s = String::new();
|
||||||
|
f.read_to_string(&mut s)?;
|
||||||
|
Ok((Item::from_raw(s), etag_from_file(&f.metadata()?)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn upload(&mut self, item: Item) -> Fallible<(String, String)> {
|
||||||
|
#[inline]
|
||||||
|
fn inner(s: &mut FilesystemStorage, item: &Item, href: &str) -> io::Result<String> {
|
||||||
|
let filepath = s.get_filepath(href);
|
||||||
|
let af = AtomicFile::new(&filepath, DisallowOverwrite);
|
||||||
|
let content = item.get_raw();
|
||||||
|
af.write(|f| f.write_all(content.as_bytes()))?;
|
||||||
|
let new_etag = etag_from_file(&fs::metadata(&filepath)?);
|
||||||
|
s.run_post_hook(filepath);
|
||||||
|
Ok(new_etag)
|
||||||
|
}
|
||||||
|
|
||||||
|
let ident = item.get_ident()?;
|
||||||
|
let mut href = self.get_href(Some(&ident));
|
||||||
|
let etag = match inner(self, &item, &href) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(ref e) if e.raw_os_error() == Some(libc::ENAMETOOLONG) => {
|
||||||
|
href = self.get_href(None);
|
||||||
|
match inner(self, &item, &href) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => Err(handle_io_error(&href, e))?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => Err(handle_io_error(&href, e))?,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((href, etag))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(&mut self, href: &str, item: Item, etag: &str) -> Fallible<String> {
|
||||||
|
let filepath = self.get_filepath(href);
|
||||||
|
let metadata = match fs::metadata(&filepath) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => Err(handle_io_error(href, e))?,
|
||||||
|
};
|
||||||
|
let actual_etag = etag_from_file(&metadata);
|
||||||
|
if actual_etag != etag {
|
||||||
|
Err(Error::WrongEtag {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let af = AtomicFile::new(&filepath, AllowOverwrite);
|
||||||
|
let content = item.get_raw();
|
||||||
|
af.write(|f| f.write_all(content.as_bytes()))?;
|
||||||
|
let new_etag = etag_from_file(&fs::metadata(filepath)?);
|
||||||
|
Ok(new_etag)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(&mut self, href: &str, etag: &str) -> Fallible<()> {
|
||||||
|
let filepath = self.get_filepath(href);
|
||||||
|
let metadata = match fs::metadata(&filepath) {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => Err(handle_io_error(href, e))?,
|
||||||
|
};
|
||||||
|
let actual_etag = etag_from_file(&metadata);
|
||||||
|
if actual_etag != etag {
|
||||||
|
Err(Error::WrongEtag {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
fs::remove_file(filepath)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
230
rust/src/storage/http.rs
Normal file
230
rust/src/storage/http.rs
Normal file
|
|
@ -0,0 +1,230 @@
|
||||||
|
use std::collections::BTreeMap;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::Read;
|
||||||
|
|
||||||
|
use std::ffi::CStr;
|
||||||
|
use std::os::raw::c_char;
|
||||||
|
|
||||||
|
use reqwest;
|
||||||
|
|
||||||
|
use super::singlefile::split_collection;
|
||||||
|
use super::Storage;
|
||||||
|
use errors::*;
|
||||||
|
|
||||||
|
use item::Item;
|
||||||
|
|
||||||
|
type ItemCache = BTreeMap<String, (Item, String)>;
|
||||||
|
pub type Username = String;
|
||||||
|
pub type Password = String;
|
||||||
|
pub type Auth = (Username, Password);
|
||||||
|
|
||||||
|
/// Wrapper around Client.execute to enable logging
|
||||||
|
#[inline]
|
||||||
|
pub fn send_request(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
request: reqwest::Request,
|
||||||
|
) -> Fallible<reqwest::Response> {
|
||||||
|
debug!("> {} {}", request.method(), request.url());
|
||||||
|
for header in request.headers().iter() {
|
||||||
|
debug!("> {}: {}", header.name(), header.value_string());
|
||||||
|
}
|
||||||
|
debug!("> {:?}", request.body());
|
||||||
|
debug!("> ---");
|
||||||
|
let response = client.execute(request)?;
|
||||||
|
debug!("< {:?}", response.status());
|
||||||
|
for header in response.headers().iter() {
|
||||||
|
debug!("< {}: {}", header.name(), header.value_string());
|
||||||
|
}
|
||||||
|
Ok(response)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct HttpConfig {
|
||||||
|
pub auth: Option<Auth>,
|
||||||
|
pub useragent: Option<String>,
|
||||||
|
pub verify_cert: Option<String>,
|
||||||
|
pub auth_cert: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HttpConfig {
|
||||||
|
pub fn into_connection(self) -> Fallible<reqwest::ClientBuilder> {
|
||||||
|
let mut headers = reqwest::header::Headers::new();
|
||||||
|
|
||||||
|
if let Some((username, password)) = self.auth {
|
||||||
|
headers.set(reqwest::header::Authorization(reqwest::header::Basic {
|
||||||
|
username,
|
||||||
|
password: Some(password),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(useragent) = self.useragent {
|
||||||
|
headers.set(reqwest::header::UserAgent::new(useragent));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut client = reqwest::Client::builder();
|
||||||
|
client.default_headers(headers);
|
||||||
|
|
||||||
|
if let Some(verify_cert) = self.verify_cert {
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
File::open(verify_cert)?.read_to_end(&mut buf)?;
|
||||||
|
let cert = reqwest::Certificate::from_pem(&buf)?;
|
||||||
|
client.add_root_certificate(cert);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: auth_cert https://github.com/sfackler/rust-native-tls/issues/27
|
||||||
|
Ok(client)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct HttpStorage {
|
||||||
|
url: String,
|
||||||
|
// href -> (item, etag)
|
||||||
|
items_cache: Option<ItemCache>,
|
||||||
|
http_config: HttpConfig,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HttpStorage {
|
||||||
|
pub fn new(url: String, http_config: HttpConfig) -> Self {
|
||||||
|
HttpStorage {
|
||||||
|
url,
|
||||||
|
items_cache: None,
|
||||||
|
http_config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_items(&mut self) -> Fallible<&mut ItemCache> {
|
||||||
|
if self.items_cache.is_none() {
|
||||||
|
self.list()?;
|
||||||
|
}
|
||||||
|
Ok(self.items_cache.as_mut().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Storage for HttpStorage {
|
||||||
|
fn list<'a>(&'a mut self) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>> {
|
||||||
|
let client = self.http_config.clone().into_connection()?.build()?;
|
||||||
|
|
||||||
|
let mut response = handle_http_error(&self.url, client.get(&self.url).send()?)?;
|
||||||
|
let s = response.text()?;
|
||||||
|
|
||||||
|
let mut new_cache = BTreeMap::new();
|
||||||
|
for component in split_collection(&s)? {
|
||||||
|
let mut item = Item::from_component(component);
|
||||||
|
item = item.with_uid(&item.get_hash()?)?;
|
||||||
|
let ident = item.get_ident()?;
|
||||||
|
let hash = item.get_hash()?;
|
||||||
|
new_cache.insert(ident, (item, hash));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.items_cache = Some(new_cache);
|
||||||
|
Ok(Box::new(self.items_cache.as_ref().unwrap().iter().map(
|
||||||
|
|(href, &(_, ref etag))| (href.clone(), etag.clone()),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get(&mut self, href: &str) -> Fallible<(Item, String)> {
|
||||||
|
match self.get_items()?.get(href) {
|
||||||
|
Some(&(ref href, ref etag)) => Ok((href.clone(), etag.clone())),
|
||||||
|
None => Err(Error::ItemNotFound {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn upload(&mut self, _item: Item) -> Fallible<(String, String)> {
|
||||||
|
Err(Error::ReadOnly)?
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(&mut self, _href: &str, _item: Item, _etag: &str) -> Fallible<String> {
|
||||||
|
Err(Error::ReadOnly)?
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(&mut self, _href: &str, _etag: &str) -> Fallible<()> {
|
||||||
|
Err(Error::ReadOnly)?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod exports {
|
||||||
|
use super::*;
|
||||||
|
use std::ffi::CStr;
|
||||||
|
use std::os::raw::c_char;
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_init_http(
|
||||||
|
url: *const c_char,
|
||||||
|
username: *const c_char,
|
||||||
|
password: *const c_char,
|
||||||
|
useragent: *const c_char,
|
||||||
|
verify_cert: *const c_char,
|
||||||
|
auth_cert: *const c_char,
|
||||||
|
) -> *mut Box<Storage> {
|
||||||
|
let url = CStr::from_ptr(url);
|
||||||
|
|
||||||
|
Box::into_raw(Box::new(Box::new(HttpStorage::new(
|
||||||
|
url.to_str().unwrap().to_owned(),
|
||||||
|
init_http_config(username, password, useragent, verify_cert, auth_cert),
|
||||||
|
))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handle_http_error(href: &str, mut r: reqwest::Response) -> Fallible<reqwest::Response> {
|
||||||
|
if !r.status().is_success() {
|
||||||
|
debug!("< Error response, dumping body:");
|
||||||
|
debug!("< {:?}", r.text());
|
||||||
|
}
|
||||||
|
|
||||||
|
match r.status() {
|
||||||
|
reqwest::StatusCode::NotFound => Err(Error::ItemNotFound {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?,
|
||||||
|
reqwest::StatusCode::UnsupportedMediaType => Err(Error::UnsupportedVobject {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?,
|
||||||
|
_ => Ok(r.error_for_status()?),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub unsafe fn init_http_config(
|
||||||
|
username: *const c_char,
|
||||||
|
password: *const c_char,
|
||||||
|
useragent: *const c_char,
|
||||||
|
verify_cert: *const c_char,
|
||||||
|
auth_cert: *const c_char,
|
||||||
|
) -> HttpConfig {
|
||||||
|
let username = CStr::from_ptr(username);
|
||||||
|
let password = CStr::from_ptr(password);
|
||||||
|
let username_dec = username.to_str().unwrap();
|
||||||
|
let password_dec = password.to_str().unwrap();
|
||||||
|
|
||||||
|
let useragent = CStr::from_ptr(useragent);
|
||||||
|
let useragent_dec = useragent.to_str().unwrap();
|
||||||
|
let verify_cert = CStr::from_ptr(verify_cert);
|
||||||
|
let verify_cert_dec = verify_cert.to_str().unwrap();
|
||||||
|
let auth_cert = CStr::from_ptr(auth_cert);
|
||||||
|
let auth_cert_dec = auth_cert.to_str().unwrap();
|
||||||
|
|
||||||
|
let auth = if !username_dec.is_empty() && !password_dec.is_empty() {
|
||||||
|
Some((username_dec.to_owned(), password_dec.to_owned()))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
HttpConfig {
|
||||||
|
auth,
|
||||||
|
useragent: if useragent_dec.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(useragent_dec.to_owned())
|
||||||
|
},
|
||||||
|
verify_cert: if verify_cert_dec.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(verify_cert_dec.to_owned())
|
||||||
|
},
|
||||||
|
auth_cert: if auth_cert_dec.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(auth_cert_dec.to_owned())
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
54
rust/src/storage/mod.rs
Normal file
54
rust/src/storage/mod.rs
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
mod dav;
|
||||||
|
pub mod exports;
|
||||||
|
mod filesystem;
|
||||||
|
mod http;
|
||||||
|
mod singlefile;
|
||||||
|
mod utils;
|
||||||
|
use errors::Fallible;
|
||||||
|
use item::Item;
|
||||||
|
|
||||||
|
type ItemAndEtag = (Item, String);
|
||||||
|
|
||||||
|
pub trait Storage {
|
||||||
|
/// returns an iterator of `(href, etag)`
|
||||||
|
fn list<'a>(&'a mut self) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>>;
|
||||||
|
|
||||||
|
///Fetch a single item.
|
||||||
|
///
|
||||||
|
///:param href: href to fetch
|
||||||
|
///:returns: (item, etag)
|
||||||
|
///:raises: :exc:`vdirsyncer.exceptions.PreconditionFailed` if item can't be found.
|
||||||
|
fn get(&mut self, href: &str) -> Fallible<ItemAndEtag>;
|
||||||
|
|
||||||
|
/// Upload a new item.
|
||||||
|
///
|
||||||
|
/// In cases where the new etag cannot be atomically determined (i.e. in the same
|
||||||
|
/// "transaction" as the upload itself), this method may return `None` as etag. This
|
||||||
|
/// special case only exists because of DAV. Avoid this situation whenever possible.
|
||||||
|
///
|
||||||
|
/// Returns `(href, etag)`
|
||||||
|
fn upload(&mut self, item: Item) -> Fallible<(String, String)>;
|
||||||
|
|
||||||
|
/// Update an item.
|
||||||
|
///
|
||||||
|
/// The etag may be none in some cases, see `upload`.
|
||||||
|
///
|
||||||
|
/// Returns `etag`
|
||||||
|
fn update(&mut self, href: &str, item: Item, etag: &str) -> Fallible<String>;
|
||||||
|
|
||||||
|
/// Delete an item by href.
|
||||||
|
fn delete(&mut self, href: &str, etag: &str) -> Fallible<()>;
|
||||||
|
|
||||||
|
/// Enter buffered mode for storages that support it.
|
||||||
|
///
|
||||||
|
/// Uploads, updates and deletions may not be effective until `flush` is explicitly called.
|
||||||
|
///
|
||||||
|
/// Use this if you will potentially write a lot of data to the storage, it improves
|
||||||
|
/// performance for storages that implement it.
|
||||||
|
fn buffered(&mut self) {}
|
||||||
|
|
||||||
|
/// Write back all changes to the collection.
|
||||||
|
fn flush(&mut self) -> Fallible<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
370
rust/src/storage/singlefile.rs
Normal file
370
rust/src/storage/singlefile.rs
Normal file
|
|
@ -0,0 +1,370 @@
|
||||||
|
use super::Storage;
|
||||||
|
use errors::*;
|
||||||
|
use std::collections::btree_map::Entry::*;
|
||||||
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
|
use std::fs::{metadata, File};
|
||||||
|
use std::io::{Read, Write};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::time::SystemTime;
|
||||||
|
use vobject;
|
||||||
|
|
||||||
|
use atomicwrites::{AllowOverwrite, AtomicFile};
|
||||||
|
|
||||||
|
use item::Item;
|
||||||
|
|
||||||
|
type ItemCache = BTreeMap<String, (Item, String)>;
|
||||||
|
|
||||||
|
pub struct SinglefileStorage {
|
||||||
|
path: PathBuf,
|
||||||
|
// href -> (item, etag)
|
||||||
|
items_cache: Option<(ItemCache, SystemTime)>,
|
||||||
|
buffered_mode: bool,
|
||||||
|
dirty_cache: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SinglefileStorage {
|
||||||
|
pub fn new<P: AsRef<Path>>(path: P) -> Self {
|
||||||
|
SinglefileStorage {
|
||||||
|
path: path.as_ref().to_owned(),
|
||||||
|
items_cache: None,
|
||||||
|
buffered_mode: false,
|
||||||
|
dirty_cache: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_items(&mut self) -> Fallible<&mut ItemCache> {
|
||||||
|
if self.items_cache.is_none() {
|
||||||
|
self.list()?;
|
||||||
|
}
|
||||||
|
Ok(&mut self.items_cache.as_mut().unwrap().0)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_back(&mut self) -> Fallible<()> {
|
||||||
|
self.dirty_cache = true;
|
||||||
|
if self.buffered_mode {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.flush()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod exports {
|
||||||
|
use super::*;
|
||||||
|
use std::ffi::CStr;
|
||||||
|
use std::os::raw::c_char;
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe extern "C" fn vdirsyncer_init_singlefile(path: *const c_char) -> *mut Box<Storage> {
|
||||||
|
let cstring = CStr::from_ptr(path);
|
||||||
|
Box::into_raw(Box::new(Box::new(SinglefileStorage::new(
|
||||||
|
cstring.to_str().unwrap(),
|
||||||
|
))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Storage for SinglefileStorage {
|
||||||
|
fn list<'a>(&'a mut self) -> Fallible<Box<Iterator<Item = (String, String)> + 'a>> {
|
||||||
|
let mut new_cache = BTreeMap::new();
|
||||||
|
let mtime = metadata(&self.path)?.modified()?;
|
||||||
|
let mut f = File::open(&self.path)?;
|
||||||
|
let mut s = String::new();
|
||||||
|
f.read_to_string(&mut s)?;
|
||||||
|
for component in split_collection(&s)? {
|
||||||
|
let item = Item::from_component(component);
|
||||||
|
let hash = item.get_hash()?;
|
||||||
|
let ident = item.get_ident()?;
|
||||||
|
new_cache.insert(ident, (item, hash));
|
||||||
|
}
|
||||||
|
|
||||||
|
self.items_cache = Some((new_cache, mtime));
|
||||||
|
self.dirty_cache = false;
|
||||||
|
Ok(Box::new(self.items_cache.as_ref().unwrap().0.iter().map(
|
||||||
|
|(href, &(_, ref etag))| (href.clone(), etag.clone()),
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get(&mut self, href: &str) -> Fallible<(Item, String)> {
|
||||||
|
match self.get_items()?.get(href) {
|
||||||
|
Some(&(ref href, ref etag)) => Ok((href.clone(), etag.clone())),
|
||||||
|
None => Err(Error::ItemNotFound {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn upload(&mut self, item: Item) -> Fallible<(String, String)> {
|
||||||
|
let hash = item.get_hash()?;
|
||||||
|
let href = item.get_ident()?;
|
||||||
|
match self.get_items()?.entry(href.clone()) {
|
||||||
|
Occupied(_) => Err(Error::ItemAlreadyExisting { href: href.clone() })?,
|
||||||
|
Vacant(vc) => vc.insert((item, hash.clone())),
|
||||||
|
};
|
||||||
|
self.write_back()?;
|
||||||
|
Ok((href, hash))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update(&mut self, href: &str, item: Item, etag: &str) -> Fallible<String> {
|
||||||
|
let hash = match self.get_items()?.entry(href.to_owned()) {
|
||||||
|
Occupied(mut oc) => {
|
||||||
|
if oc.get().1 == etag {
|
||||||
|
let hash = item.get_hash()?;
|
||||||
|
oc.insert((item, hash.clone()));
|
||||||
|
hash
|
||||||
|
} else {
|
||||||
|
Err(Error::WrongEtag {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Vacant(_) => Err(Error::ItemNotFound {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?,
|
||||||
|
};
|
||||||
|
self.write_back()?;
|
||||||
|
Ok(hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn delete(&mut self, href: &str, etag: &str) -> Fallible<()> {
|
||||||
|
match self.get_items()?.entry(href.to_owned()) {
|
||||||
|
Occupied(oc) => {
|
||||||
|
if oc.get().1 == etag {
|
||||||
|
oc.remove();
|
||||||
|
} else {
|
||||||
|
Err(Error::WrongEtag {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Vacant(_) => Err(Error::ItemNotFound {
|
||||||
|
href: href.to_owned(),
|
||||||
|
})?,
|
||||||
|
}
|
||||||
|
self.write_back()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn buffered(&mut self) {
|
||||||
|
self.buffered_mode = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&mut self) -> Fallible<()> {
|
||||||
|
if !self.dirty_cache {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
let (items, mtime) = self.items_cache.take().unwrap();
|
||||||
|
|
||||||
|
let af = AtomicFile::new(&self.path, AllowOverwrite);
|
||||||
|
let content = join_collection(items.into_iter().map(|(_, (item, _))| item))?;
|
||||||
|
|
||||||
|
let path = &self.path;
|
||||||
|
let write_inner = |f: &mut File| -> Fallible<()> {
|
||||||
|
f.write_all(content.as_bytes())?;
|
||||||
|
let real_mtime = metadata(path)?.modified()?;
|
||||||
|
if mtime != real_mtime {
|
||||||
|
Err(Error::MtimeMismatch {
|
||||||
|
filepath: path.to_string_lossy().into_owned(),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
};
|
||||||
|
|
||||||
|
af.write::<(), ::failure::Compat<::failure::Error>, _>(|f| {
|
||||||
|
write_inner(f).map_err(|e| e.compat())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
self.dirty_cache = false;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn split_collection(mut input: &str) -> Fallible<Vec<vobject::Component>> {
|
||||||
|
let mut rv = vec![];
|
||||||
|
while !input.is_empty() {
|
||||||
|
let (component, remainder) =
|
||||||
|
vobject::read_component(input).map_err(::failure::SyncFailure::new)?;
|
||||||
|
input = remainder;
|
||||||
|
|
||||||
|
match component.name.as_ref() {
|
||||||
|
"VCALENDAR" => rv.extend(split_vcalendar(component)?),
|
||||||
|
"VCARD" => rv.push(component),
|
||||||
|
"VADDRESSBOOK" => for vcard in component.subcomponents {
|
||||||
|
if vcard.name != "VCARD" {
|
||||||
|
Err(Error::UnexpectedVobject {
|
||||||
|
found: vcard.name.clone(),
|
||||||
|
expected: "VCARD".to_owned(),
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
rv.push(vcard);
|
||||||
|
},
|
||||||
|
_ => Err(Error::UnexpectedVobject {
|
||||||
|
found: component.name.clone(),
|
||||||
|
expected: "VCALENDAR | VCARD | VADDRESSBOOK".to_owned(),
|
||||||
|
})?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(rv)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Split one VCALENDAR component into multiple VCALENDAR components
|
||||||
|
#[inline]
|
||||||
|
fn split_vcalendar(mut vcalendar: vobject::Component) -> Fallible<Vec<vobject::Component>> {
|
||||||
|
vcalendar.props.remove("METHOD");
|
||||||
|
|
||||||
|
let mut timezones = BTreeMap::new(); // tzid => component
|
||||||
|
let mut subcomponents = vec![];
|
||||||
|
|
||||||
|
for component in vcalendar.subcomponents.drain(..) {
|
||||||
|
match component.name.as_ref() {
|
||||||
|
"VTIMEZONE" => {
|
||||||
|
let tzid = match component.get_only("TZID") {
|
||||||
|
Some(x) => x.value_as_string().clone(),
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
timezones.insert(tzid, component);
|
||||||
|
}
|
||||||
|
"VTODO" | "VEVENT" | "VJOURNAL" => subcomponents.push(component),
|
||||||
|
_ => Err(Error::UnexpectedVobject {
|
||||||
|
found: component.name.clone(),
|
||||||
|
expected: "VTIMEZONE | VTODO | VEVENT | VJOURNAL".to_owned(),
|
||||||
|
})?,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut by_uid = BTreeMap::new();
|
||||||
|
let mut no_uid = vec![];
|
||||||
|
|
||||||
|
for component in subcomponents {
|
||||||
|
let uid = component.get_only("UID").cloned();
|
||||||
|
|
||||||
|
let mut wrapper = match uid
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|u| by_uid.remove(&u.value_as_string()))
|
||||||
|
{
|
||||||
|
Some(x) => x,
|
||||||
|
None => vcalendar.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut required_tzids = BTreeSet::new();
|
||||||
|
for props in component.props.values() {
|
||||||
|
for prop in props {
|
||||||
|
if let Some(x) = prop.params.get("TZID") {
|
||||||
|
required_tzids.insert(x.to_owned());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for tzid in required_tzids {
|
||||||
|
if let Some(tz) = timezones.get(&tzid) {
|
||||||
|
wrapper.subcomponents.push(tz.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
wrapper.subcomponents.push(component);
|
||||||
|
|
||||||
|
match uid {
|
||||||
|
Some(p) => {
|
||||||
|
by_uid.insert(p.value_as_string(), wrapper);
|
||||||
|
}
|
||||||
|
None => no_uid.push(wrapper),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(by_uid
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, v)| v)
|
||||||
|
.chain(no_uid.into_iter())
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn join_collection<I: Iterator<Item = Item>>(item_iter: I) -> Fallible<String> {
|
||||||
|
let mut items = item_iter.peekable();
|
||||||
|
|
||||||
|
let item_name = match items.peek() {
|
||||||
|
Some(x) => x.get_component()?.name.clone(),
|
||||||
|
None => return Ok("".to_owned()),
|
||||||
|
};
|
||||||
|
|
||||||
|
let wrapper_name = match item_name.as_ref() {
|
||||||
|
"VCARD" => "VADDRESSBOOK",
|
||||||
|
"VCALENDAR" => "VCALENDAR",
|
||||||
|
_ => Err(Error::UnexpectedVobject {
|
||||||
|
found: item_name.clone(),
|
||||||
|
expected: "VCARD | VCALENDAR".to_owned(),
|
||||||
|
})?,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut wrapper = vobject::Component::new(wrapper_name);
|
||||||
|
let mut version: Option<vobject::Property> = None;
|
||||||
|
|
||||||
|
for item in items {
|
||||||
|
let mut c = item.into_component()?;
|
||||||
|
if c.name != item_name {
|
||||||
|
return Err(Error::UnexpectedVobject {
|
||||||
|
found: c.name,
|
||||||
|
expected: item_name.clone(),
|
||||||
|
}.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
if item_name == wrapper_name {
|
||||||
|
wrapper.subcomponents.extend(c.subcomponents.drain(..));
|
||||||
|
match (version.as_ref(), c.get_only("VERSION")) {
|
||||||
|
(Some(x), Some(y)) if x.raw_value != y.raw_value => {
|
||||||
|
return Err(Error::UnexpectedVobjectVersion {
|
||||||
|
expected: x.raw_value.clone(),
|
||||||
|
found: y.raw_value.clone(),
|
||||||
|
}.into());
|
||||||
|
}
|
||||||
|
(None, Some(_)) => (),
|
||||||
|
_ => continue,
|
||||||
|
}
|
||||||
|
version = c.get_only("VERSION").cloned();
|
||||||
|
} else {
|
||||||
|
wrapper.subcomponents.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(v) = version {
|
||||||
|
wrapper.set(v);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(vobject::write_component(&wrapper))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn check_roundtrip(raw: &str) {
|
||||||
|
let components = split_collection(raw).unwrap();
|
||||||
|
let raw2 = join_collection(components.into_iter().map(Item::from_component)).unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
Item::from_raw(raw.to_owned()).get_hash().unwrap(),
|
||||||
|
Item::from_raw(raw2.to_owned()).get_hash().unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_wrapper_properties_roundtrip() {
|
||||||
|
let raw = r#"BEGIN:VCALENDAR
|
||||||
|
PRODID:-//Google Inc//Google Calendar 70.9054//EN
|
||||||
|
X-WR-CALNAME:markus.unterwaditzer@runtastic.com
|
||||||
|
X-WR-TIMEZONE:Europe/Vienna
|
||||||
|
VERSION:2.0
|
||||||
|
CALSCALE:GREGORIAN
|
||||||
|
BEGIN:VEVENT
|
||||||
|
DTSTART;TZID=Europe/Vienna:20171012T153000
|
||||||
|
DTEND;TZID=Europe/Vienna:20171012T170000
|
||||||
|
DTSTAMP:20171009T085029Z
|
||||||
|
UID:test@test.com
|
||||||
|
STATUS:CONFIRMED
|
||||||
|
SUMMARY:Test
|
||||||
|
TRANSP:OPAQUE
|
||||||
|
END:VEVENT
|
||||||
|
END:VCALENDAR"#;
|
||||||
|
check_roundtrip(raw);
|
||||||
|
}
|
||||||
|
}
|
||||||
24
rust/src/storage/utils.rs
Normal file
24
rust/src/storage/utils.rs
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
fn is_href_safe(ident: &str) -> bool {
|
||||||
|
for c in ident.chars() {
|
||||||
|
match c {
|
||||||
|
'_' | '.' | '-' | '+' => (),
|
||||||
|
_ if c.is_alphanumeric() => (),
|
||||||
|
_ => return false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_href(ident: &str) -> String {
|
||||||
|
if is_href_safe(ident) {
|
||||||
|
ident.to_owned()
|
||||||
|
} else {
|
||||||
|
random_href()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn random_href() -> String {
|
||||||
|
format!("{}", Uuid::new_v4())
|
||||||
|
}
|
||||||
146
rust/vdirsyncer_rustext.h
Normal file
146
rust/vdirsyncer_rustext.h
Normal file
|
|
@ -0,0 +1,146 @@
|
||||||
|
#include <stdint.h>
|
||||||
|
#include <stdlib.h>
|
||||||
|
#include <stdbool.h>
|
||||||
|
|
||||||
|
typedef struct Box_Storage Box_Storage;
|
||||||
|
|
||||||
|
typedef struct Item Item;
|
||||||
|
|
||||||
|
typedef struct ShippaiError ShippaiError;
|
||||||
|
|
||||||
|
typedef struct VdirsyncerStorageListing VdirsyncerStorageListing;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
Item *item;
|
||||||
|
const char *etag;
|
||||||
|
} VdirsyncerStorageGetResult;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
const char *href;
|
||||||
|
const char *etag;
|
||||||
|
} VdirsyncerStorageUploadResult;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_DavError_EtagNotFound;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_ItemAlreadyExisting;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_ItemNotFound;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_ItemUnparseable;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_MtimeMismatch;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_ReadOnly;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_UnexpectedVobject;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_UnexpectedVobjectVersion;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_UnsupportedVobject;
|
||||||
|
|
||||||
|
extern const uint8_t SHIPPAI_VARIANT_Error_WrongEtag;
|
||||||
|
|
||||||
|
void shippai_free_failure(ShippaiError *t);
|
||||||
|
|
||||||
|
void shippai_free_str(char *t);
|
||||||
|
|
||||||
|
const char *shippai_get_debug(ShippaiError *t);
|
||||||
|
|
||||||
|
const char *shippai_get_display(ShippaiError *t);
|
||||||
|
|
||||||
|
uint8_t shippai_get_variant_DavError(ShippaiError *t);
|
||||||
|
|
||||||
|
uint8_t shippai_get_variant_Error(ShippaiError *t);
|
||||||
|
|
||||||
|
bool shippai_is_error_DavError(ShippaiError *t);
|
||||||
|
|
||||||
|
bool shippai_is_error_Error(ShippaiError *t);
|
||||||
|
|
||||||
|
bool vdirsyncer_advance_storage_listing(VdirsyncerStorageListing *listing);
|
||||||
|
|
||||||
|
void vdirsyncer_free_item(Item *c);
|
||||||
|
|
||||||
|
void vdirsyncer_free_storage_get_result(VdirsyncerStorageGetResult *res);
|
||||||
|
|
||||||
|
void vdirsyncer_free_storage_listing(VdirsyncerStorageListing *listing);
|
||||||
|
|
||||||
|
void vdirsyncer_free_storage_upload_result(VdirsyncerStorageUploadResult *res);
|
||||||
|
|
||||||
|
void vdirsyncer_free_str(const char *s);
|
||||||
|
|
||||||
|
const char *vdirsyncer_get_hash(Item *c, ShippaiError **err);
|
||||||
|
|
||||||
|
const char *vdirsyncer_get_raw(Item *c);
|
||||||
|
|
||||||
|
const char *vdirsyncer_get_uid(Item *c);
|
||||||
|
|
||||||
|
Box_Storage *vdirsyncer_init_caldav(const char *url,
|
||||||
|
const char *username,
|
||||||
|
const char *password,
|
||||||
|
const char *useragent,
|
||||||
|
const char *verify_cert,
|
||||||
|
const char *auth_cert,
|
||||||
|
int64_t start_date,
|
||||||
|
int64_t end_date,
|
||||||
|
bool include_vevent,
|
||||||
|
bool include_vjournal,
|
||||||
|
bool include_vtodo);
|
||||||
|
|
||||||
|
Box_Storage *vdirsyncer_init_carddav(const char *url,
|
||||||
|
const char *username,
|
||||||
|
const char *password,
|
||||||
|
const char *useragent,
|
||||||
|
const char *verify_cert,
|
||||||
|
const char *auth_cert);
|
||||||
|
|
||||||
|
Box_Storage *vdirsyncer_init_filesystem(const char *path,
|
||||||
|
const char *fileext,
|
||||||
|
const char *post_hook);
|
||||||
|
|
||||||
|
Box_Storage *vdirsyncer_init_http(const char *url,
|
||||||
|
const char *username,
|
||||||
|
const char *password,
|
||||||
|
const char *useragent,
|
||||||
|
const char *verify_cert,
|
||||||
|
const char *auth_cert);
|
||||||
|
|
||||||
|
void vdirsyncer_init_logger(void);
|
||||||
|
|
||||||
|
Box_Storage *vdirsyncer_init_singlefile(const char *path);
|
||||||
|
|
||||||
|
Item *vdirsyncer_item_from_raw(const char *s);
|
||||||
|
|
||||||
|
bool vdirsyncer_item_is_parseable(Item *c);
|
||||||
|
|
||||||
|
void vdirsyncer_storage_buffered(Box_Storage *storage);
|
||||||
|
|
||||||
|
void vdirsyncer_storage_delete(Box_Storage *storage,
|
||||||
|
const char *c_href,
|
||||||
|
const char *c_etag,
|
||||||
|
ShippaiError **err);
|
||||||
|
|
||||||
|
void vdirsyncer_storage_flush(Box_Storage *storage, ShippaiError **err);
|
||||||
|
|
||||||
|
void vdirsyncer_storage_free(Box_Storage *storage);
|
||||||
|
|
||||||
|
VdirsyncerStorageGetResult *vdirsyncer_storage_get(Box_Storage *storage,
|
||||||
|
const char *c_href,
|
||||||
|
ShippaiError **err);
|
||||||
|
|
||||||
|
VdirsyncerStorageListing *vdirsyncer_storage_list(Box_Storage *storage, ShippaiError **err);
|
||||||
|
|
||||||
|
const char *vdirsyncer_storage_listing_get_etag(VdirsyncerStorageListing *listing);
|
||||||
|
|
||||||
|
const char *vdirsyncer_storage_listing_get_href(VdirsyncerStorageListing *listing);
|
||||||
|
|
||||||
|
const char *vdirsyncer_storage_update(Box_Storage *storage,
|
||||||
|
const char *c_href,
|
||||||
|
Item *item,
|
||||||
|
const char *c_etag,
|
||||||
|
ShippaiError **err);
|
||||||
|
|
||||||
|
VdirsyncerStorageUploadResult *vdirsyncer_storage_upload(Box_Storage *storage,
|
||||||
|
Item *item,
|
||||||
|
ShippaiError **err);
|
||||||
|
|
||||||
|
Item *vdirsyncer_with_uid(Item *c, const char *uid, ShippaiError **err);
|
||||||
|
|
@ -1,49 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# This script is mean to be run inside a dedicated container,
|
|
||||||
# and not interatively.
|
|
||||||
|
|
||||||
set -ex
|
|
||||||
|
|
||||||
export DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
apt-get update
|
|
||||||
apt-get install -y build-essential fakeroot debhelper git
|
|
||||||
apt-get install -y python3-all python3-pip python3-venv
|
|
||||||
apt-get install -y ruby ruby-dev
|
|
||||||
|
|
||||||
pip3 install virtualenv virtualenv-tools3
|
|
||||||
virtualenv -p python3 /vdirsyncer/env/
|
|
||||||
|
|
||||||
gem install fpm
|
|
||||||
|
|
||||||
# See https://github.com/jordansissel/fpm/issues/1106#issuecomment-461678970
|
|
||||||
pip3 uninstall -y virtualenv
|
|
||||||
echo 'python3 -m venv "$@"' > /usr/local/bin/virtualenv
|
|
||||||
chmod +x /usr/local/bin/virtualenv
|
|
||||||
|
|
||||||
cp -r /source/ /vdirsyncer/vdirsyncer/
|
|
||||||
cd /vdirsyncer/vdirsyncer/ || exit 2
|
|
||||||
mkdir /vdirsyncer/pkgs/
|
|
||||||
|
|
||||||
basename -- *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
|
|
||||||
# XXX: Do I really not want google support included?
|
|
||||||
(echo -n *.tar.gz; echo '[google]') | tee requirements.txt
|
|
||||||
fpm --verbose \
|
|
||||||
--input-type virtualenv \
|
|
||||||
--output-type deb \
|
|
||||||
--name "vdirsyncer-latest" \
|
|
||||||
--version "$(cat version)" \
|
|
||||||
--prefix /opt/venvs/vdirsyncer-latest \
|
|
||||||
--depends python3 \
|
|
||||||
requirements.txt
|
|
||||||
|
|
||||||
mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
|
|
||||||
|
|
||||||
cd /vdirsyncer/pkgs/
|
|
||||||
dpkg -i -- *.deb
|
|
||||||
|
|
||||||
# Check that it works:
|
|
||||||
LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
|
|
||||||
|
|
||||||
cp -- *.deb /source/
|
|
||||||
11
scripts/circleci-install.sh
Normal file
11
scripts/circleci-install.sh
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
echo "export PATH=$HOME/.cargo/bin/:$HOME/.local/bin/:$PATH" >> $BASH_ENV
|
||||||
|
. $BASH_ENV
|
||||||
|
|
||||||
|
make install-rust
|
||||||
|
sudo apt-get install -y cmake
|
||||||
|
|
||||||
|
pip install --user virtualenv
|
||||||
|
virtualenv ~/env
|
||||||
|
|
||||||
|
echo ". ~/env/bin/activate" >> $BASH_ENV
|
||||||
|
. $BASH_ENV
|
||||||
40
scripts/dpkg.Dockerfile
Normal file
40
scripts/dpkg.Dockerfile
Normal file
|
|
@ -0,0 +1,40 @@
|
||||||
|
ARG distro
|
||||||
|
ARG distrover
|
||||||
|
|
||||||
|
FROM $distro:$distrover
|
||||||
|
|
||||||
|
ARG distro
|
||||||
|
ARG distrover
|
||||||
|
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get install -y build-essential fakeroot debhelper git
|
||||||
|
RUN apt-get install -y python3-all python3-dev python3-pip
|
||||||
|
RUN apt-get install -y ruby ruby-dev
|
||||||
|
RUN apt-get install -y python-all python-pip
|
||||||
|
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
|
||||||
|
RUN apt-get install -y libssl-dev libffi-dev
|
||||||
|
ENV PATH="/root/.cargo/bin/:${PATH}"
|
||||||
|
|
||||||
|
RUN gem install fpm
|
||||||
|
|
||||||
|
RUN pip2 install virtualenv-tools
|
||||||
|
RUN pip3 install virtualenv
|
||||||
|
RUN virtualenv -p python3 /vdirsyncer/env/
|
||||||
|
|
||||||
|
COPY . /vdirsyncer/vdirsyncer/
|
||||||
|
WORKDIR /vdirsyncer/vdirsyncer/
|
||||||
|
RUN mkdir /vdirsyncer/pkgs/
|
||||||
|
|
||||||
|
RUN basename *.tar.gz .tar.gz | cut -d'-' -f2 | sed -e 's/\.dev/~/g' | tee version
|
||||||
|
RUN (echo -n *.tar.gz; echo '[google]') | tee requirements.txt
|
||||||
|
RUN . /vdirsyncer/env/bin/activate; fpm --verbose -s virtualenv -t deb \
|
||||||
|
-n "vdirsyncer-latest" \
|
||||||
|
-v "$(cat version)" \
|
||||||
|
--prefix /opt/venvs/vdirsyncer-latest \
|
||||||
|
requirements.txt
|
||||||
|
|
||||||
|
RUN mv /vdirsyncer/vdirsyncer/*.deb /vdirsyncer/pkgs/
|
||||||
|
|
||||||
|
WORKDIR /vdirsyncer/pkgs/
|
||||||
|
RUN dpkg -i *.deb
|
||||||
|
RUN LC_ALL=C.UTF-8 LANG=C.UTF-8 /opt/venvs/vdirsyncer-latest/bin/vdirsyncer --version
|
||||||
|
|
@ -1,56 +1,19 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
set -xe
|
||||||
|
distro=$1
|
||||||
|
distrover=$2
|
||||||
|
name=vdirsyncer-$distro-$distrover:latest
|
||||||
|
context="$(mktemp -d)"
|
||||||
|
|
||||||
set -xeu
|
python setup.py sdist -d "$context"
|
||||||
|
cp scripts/dpkg.Dockerfile "$context/Dockerfile"
|
||||||
|
|
||||||
SCRIPT_PATH=$(realpath "$0")
|
docker build \
|
||||||
SCRIPT_DIR=$(dirname "$SCRIPT_PATH")
|
--build-arg distro=$distro \
|
||||||
|
--build-arg distrover=$distrover \
|
||||||
|
-t $name \
|
||||||
|
"$context"
|
||||||
|
|
||||||
# E.g.: debian, ubuntu
|
docker run $name tar -c -C /vdirsyncer pkgs | tar x -C "$context"
|
||||||
DISTRO=${DISTRO:1}
|
package_cloud push pimutils/vdirsyncer/$distro/$distrover $context/pkgs/*.deb
|
||||||
# E.g.: bullseye, bookwork
|
rm -rf "$context"
|
||||||
DISTROVER=${DISTROVER:2}
|
|
||||||
CONTAINER_NAME="vdirsyncer-${DISTRO}-${DISTROVER}"
|
|
||||||
CONTEXT="$(mktemp -d)"
|
|
||||||
|
|
||||||
DEST_DIR="$SCRIPT_DIR/../$DISTRO-$DISTROVER"
|
|
||||||
|
|
||||||
cleanup() {
|
|
||||||
rm -rf "$CONTEXT"
|
|
||||||
}
|
|
||||||
trap cleanup EXIT
|
|
||||||
|
|
||||||
# Prepare files.
|
|
||||||
cp scripts/_build_deb_in_container.bash "$CONTEXT"
|
|
||||||
python setup.py sdist -d "$CONTEXT"
|
|
||||||
|
|
||||||
docker run -it \
|
|
||||||
--name "$CONTAINER_NAME" \
|
|
||||||
--volume "$CONTEXT:/source" \
|
|
||||||
"$DISTRO:$DISTROVER" \
|
|
||||||
bash /source/_build_deb_in_container.bash
|
|
||||||
|
|
||||||
# Keep around the package filename.
|
|
||||||
PACKAGE=$(ls "$CONTEXT"/*.deb)
|
|
||||||
PACKAGE=$(basename "$PACKAGE")
|
|
||||||
|
|
||||||
# Save the build deb files.
|
|
||||||
mkdir -p "$DEST_DIR"
|
|
||||||
cp "$CONTEXT"/*.deb "$DEST_DIR"
|
|
||||||
|
|
||||||
echo Build complete! 🤖
|
|
||||||
|
|
||||||
# Packagecloud uses some internal IDs for each distro.
|
|
||||||
# Extract the one for the distro we're publishing.
|
|
||||||
DISTRO_ID=$(
|
|
||||||
curl -s \
|
|
||||||
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/distributions.json | \
|
|
||||||
jq '.deb | .[] | select(.index_name=="'"$DISTRO"'") | .versions | .[] | select(.index_name=="'"$DISTROVER"'") | .id'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Actually push the package.
|
|
||||||
curl \
|
|
||||||
-F "package[distro_version_id]=$DISTRO_ID" \
|
|
||||||
-F "package[package_file]=@$DEST_DIR/$PACKAGE" \
|
|
||||||
https://"$PACKAGECLOUD_TOKEN":@packagecloud.io/api/v1/repos/pimutils/vdirsyncer/packages.json
|
|
||||||
|
|
||||||
echo Done! ✨
|
|
||||||
|
|
|
||||||
11
setup.cfg
Normal file
11
setup.cfg
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
[tool:pytest]
|
||||||
|
norecursedirs = tests/storage/servers/*
|
||||||
|
addopts = --tb=short --duration 3
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
# E731: Use a def instead of lambda expr
|
||||||
|
# E743: Ambiguous function definition
|
||||||
|
ignore = E731, E743
|
||||||
|
select = C,E,F,W,B,B9
|
||||||
|
exclude = .eggs/, tests/storage/servers/nextcloud/, build/, vdirsyncer/_native*
|
||||||
|
application-package-names = tests,vdirsyncer
|
||||||
137
setup.py
Normal file
137
setup.py
Normal file
|
|
@ -0,0 +1,137 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
|
Vdirsyncer synchronizes calendars and contacts.
|
||||||
|
|
||||||
|
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
|
||||||
|
how to package vdirsyncer.
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
from setuptools import Command, find_packages, setup
|
||||||
|
|
||||||
|
milksnake = 'milksnake'
|
||||||
|
|
||||||
|
requirements = [
|
||||||
|
# https://github.com/mitsuhiko/click/issues/200
|
||||||
|
'click>=5.0',
|
||||||
|
'click-log>=0.3.0, <0.4.0',
|
||||||
|
|
||||||
|
# https://github.com/pimutils/vdirsyncer/issues/478
|
||||||
|
'click-threading>=0.2',
|
||||||
|
|
||||||
|
# !=2.9.0: https://github.com/kennethreitz/requests/issues/2930
|
||||||
|
#
|
||||||
|
# >=2.4.1: https://github.com/shazow/urllib3/pull/444
|
||||||
|
# Without the above pull request, `verify=False` also disables fingerprint
|
||||||
|
# validation. This is *not* what we want, and it's not possible to
|
||||||
|
# replicate vdirsyncer's current behavior (verifying fingerprints without
|
||||||
|
# verifying against CAs) with older versions of urllib3.
|
||||||
|
'requests >=2.4.1, !=2.9.0',
|
||||||
|
|
||||||
|
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
|
||||||
|
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
|
||||||
|
'requests_toolbelt >=0.4.0',
|
||||||
|
|
||||||
|
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
|
||||||
|
'atomicwrites>=0.1.7',
|
||||||
|
|
||||||
|
milksnake,
|
||||||
|
|
||||||
|
'shippai >= 0.2.3',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def build_native(spec):
|
||||||
|
cmd = ['cargo', 'build']
|
||||||
|
if os.environ.get('RUST_BACKTRACE', 'false') in ('true', '1', 'full'):
|
||||||
|
dylib_folder = 'target/debug'
|
||||||
|
else:
|
||||||
|
dylib_folder = 'target/release'
|
||||||
|
cmd.append('--release')
|
||||||
|
|
||||||
|
build = spec.add_external_build(cmd=cmd, path='./rust/')
|
||||||
|
|
||||||
|
spec.add_cffi_module(
|
||||||
|
module_path='vdirsyncer._native',
|
||||||
|
dylib=lambda: build.find_dylib('vdirsyncer_rustext',
|
||||||
|
in_path=dylib_folder),
|
||||||
|
header_filename='rust/vdirsyncer_rustext.h',
|
||||||
|
# Rust bug: If thread-local storage is used, this flag is necessary
|
||||||
|
# (mitsuhiko)
|
||||||
|
rtld_flags=['NOW', 'NODELETE']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PrintRequirements(Command):
|
||||||
|
description = 'Prints minimal requirements'
|
||||||
|
user_options = []
|
||||||
|
|
||||||
|
def initialize_options(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def finalize_options(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for requirement in requirements:
|
||||||
|
print(requirement.replace(">", "=").replace(" ", ""))
|
||||||
|
|
||||||
|
|
||||||
|
with open('README.rst') as f:
|
||||||
|
long_description = f.read()
|
||||||
|
|
||||||
|
|
||||||
|
setup(
|
||||||
|
# General metadata
|
||||||
|
name='vdirsyncer',
|
||||||
|
author='Markus Unterwaditzer',
|
||||||
|
author_email='markus@unterwaditzer.net',
|
||||||
|
url='https://github.com/pimutils/vdirsyncer',
|
||||||
|
description='Synchronize calendars and contacts',
|
||||||
|
license='BSD',
|
||||||
|
long_description=long_description,
|
||||||
|
|
||||||
|
# Runtime dependencies
|
||||||
|
install_requires=requirements,
|
||||||
|
|
||||||
|
# Optional dependencies
|
||||||
|
extras_require={
|
||||||
|
'google': ['requests-oauthlib'],
|
||||||
|
'etesync': ['etesync']
|
||||||
|
},
|
||||||
|
|
||||||
|
# Build dependencies
|
||||||
|
setup_requires=[
|
||||||
|
'setuptools_scm != 1.12.0',
|
||||||
|
milksnake,
|
||||||
|
],
|
||||||
|
|
||||||
|
# Other
|
||||||
|
packages=find_packages(exclude=['tests.*', 'tests']),
|
||||||
|
include_package_data=True,
|
||||||
|
cmdclass={
|
||||||
|
'minimal_requirements': PrintRequirements
|
||||||
|
},
|
||||||
|
use_scm_version={
|
||||||
|
'write_to': 'vdirsyncer/version.py'
|
||||||
|
},
|
||||||
|
entry_points={
|
||||||
|
'console_scripts': ['vdirsyncer = vdirsyncer.cli:main']
|
||||||
|
},
|
||||||
|
classifiers=[
|
||||||
|
'Development Status :: 4 - Beta',
|
||||||
|
'Environment :: Console',
|
||||||
|
'License :: OSI Approved :: BSD License',
|
||||||
|
'Operating System :: POSIX',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
|
'Programming Language :: Python :: 3.4',
|
||||||
|
'Programming Language :: Python :: 3.5',
|
||||||
|
'Programming Language :: Python :: 3.6',
|
||||||
|
'Topic :: Internet',
|
||||||
|
'Topic :: Utilities',
|
||||||
|
],
|
||||||
|
milksnake_tasks=[build_native],
|
||||||
|
zip_safe=False,
|
||||||
|
platforms='any'
|
||||||
|
)
|
||||||
4
test-requirements.txt
Normal file
4
test-requirements.txt
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
hypothesis>=3.1
|
||||||
|
pytest
|
||||||
|
pytest-localserver
|
||||||
|
pytest-subtesthack
|
||||||
|
|
@ -1,26 +1,29 @@
|
||||||
"""
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
Test suite for vdirsyncer.
|
Test suite for vdirsyncer.
|
||||||
"""
|
'''
|
||||||
|
|
||||||
from __future__ import annotations
|
import random
|
||||||
|
|
||||||
import hypothesis.strategies as st
|
import hypothesis.strategies as st
|
||||||
import urllib3.exceptions
|
|
||||||
|
|
||||||
from vdirsyncer.vobject import normalize_item
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
|
import urllib3
|
||||||
|
import urllib3.exceptions
|
||||||
|
|
||||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
def blow_up(*a, **kw):
|
def blow_up(*a, **kw):
|
||||||
raise AssertionError("Did not expect to be called.")
|
raise AssertionError('Did not expect to be called.')
|
||||||
|
|
||||||
|
|
||||||
def assert_item_equals(a, b):
|
def assert_item_equals(a, b):
|
||||||
assert normalize_item(a) == normalize_item(b)
|
assert a.hash == b.hash
|
||||||
|
|
||||||
|
|
||||||
VCARD_TEMPLATE = """BEGIN:VCARD
|
VCARD_TEMPLATE = u'''BEGIN:VCARD
|
||||||
VERSION:3.0
|
VERSION:3.0
|
||||||
FN:Cyrus Daboo
|
FN:Cyrus Daboo
|
||||||
N:Daboo;Cyrus;;;
|
N:Daboo;Cyrus;;;
|
||||||
|
|
@ -34,9 +37,9 @@ TEL;TYPE=FAX:412 605 0705
|
||||||
URL;VALUE=URI:http://www.example.com
|
URL;VALUE=URI:http://www.example.com
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
END:VCARD"""
|
END:VCARD'''
|
||||||
|
|
||||||
TASK_TEMPLATE = """BEGIN:VCALENDAR
|
TASK_TEMPLATE = u'''BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//dmfs.org//mimedir.icalendar//EN
|
PRODID:-//dmfs.org//mimedir.icalendar//EN
|
||||||
BEGIN:VTODO
|
BEGIN:VTODO
|
||||||
|
|
@ -48,30 +51,26 @@ SUMMARY:Book: Kowlani - Tödlicher Staub
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
END:VTODO
|
END:VTODO
|
||||||
END:VCALENDAR"""
|
END:VCALENDAR'''
|
||||||
|
|
||||||
|
|
||||||
BARE_EVENT_TEMPLATE = """BEGIN:VEVENT
|
BARE_EVENT_TEMPLATE = u'''BEGIN:VEVENT
|
||||||
DTSTART:19970714T170000Z
|
DTSTART:19970714T170000Z
|
||||||
DTEND:19970715T035959Z
|
DTEND:19970715T035959Z
|
||||||
|
DTSTAMP:19970610T172345Z
|
||||||
SUMMARY:Bastille Day Party
|
SUMMARY:Bastille Day Party
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
END:VEVENT"""
|
END:VEVENT'''
|
||||||
|
|
||||||
|
|
||||||
EVENT_TEMPLATE = (
|
EVENT_TEMPLATE = u'''BEGIN:VCALENDAR
|
||||||
"""BEGIN:VCALENDAR
|
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
"""
|
''' + BARE_EVENT_TEMPLATE + u'''
|
||||||
+ BARE_EVENT_TEMPLATE
|
END:VCALENDAR'''
|
||||||
+ """
|
|
||||||
END:VCALENDAR"""
|
|
||||||
)
|
|
||||||
|
|
||||||
EVENT_WITH_TIMEZONE_TEMPLATE = (
|
EVENT_WITH_TIMEZONE_TEMPLATE = '''BEGIN:VCALENDAR
|
||||||
"""BEGIN:VCALENDAR
|
|
||||||
BEGIN:VTIMEZONE
|
BEGIN:VTIMEZONE
|
||||||
TZID:Europe/Rome
|
TZID:Europe/Rome
|
||||||
X-LIC-LOCATION:Europe/Rome
|
X-LIC-LOCATION:Europe/Rome
|
||||||
|
|
@ -90,21 +89,33 @@ DTSTART:19701025T030000
|
||||||
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
|
||||||
END:STANDARD
|
END:STANDARD
|
||||||
END:VTIMEZONE
|
END:VTIMEZONE
|
||||||
"""
|
''' + BARE_EVENT_TEMPLATE + '''
|
||||||
+ BARE_EVENT_TEMPLATE
|
END:VCALENDAR'''
|
||||||
+ """
|
|
||||||
END:VCALENDAR"""
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
SIMPLE_TEMPLATE = """BEGIN:FOO
|
SIMPLE_TEMPLATE = u'''BEGIN:FOO
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
X-SOMETHING:{r}
|
X-SOMETHING:{r}
|
||||||
HAHA:YES
|
HAHA:YES
|
||||||
END:FOO"""
|
END:FOO'''
|
||||||
|
|
||||||
printable_characters_strategy = st.text(st.characters(exclude_categories=("Cc", "Cs")))
|
printable_characters_strategy = st.text(
|
||||||
|
st.characters(blacklist_categories=(
|
||||||
|
'Cc', 'Cs'
|
||||||
|
))
|
||||||
|
)
|
||||||
|
|
||||||
uid_strategy = st.text(
|
uid_strategy = st.text(
|
||||||
st.characters(exclude_categories=("Zs", "Zl", "Zp", "Cc", "Cs")), min_size=1
|
st.characters(blacklist_categories=(
|
||||||
|
'Zs', 'Zl', 'Zp',
|
||||||
|
'Cc', 'Cs'
|
||||||
|
)),
|
||||||
|
min_size=1
|
||||||
).filter(lambda x: x.strip() == x)
|
).filter(lambda x: x.strip() == x)
|
||||||
|
|
||||||
|
|
||||||
|
def format_item(uid=None, item_template=VCARD_TEMPLATE):
|
||||||
|
# assert that special chars are handled correctly.
|
||||||
|
r = random.random()
|
||||||
|
uid = uid or r
|
||||||
|
return Item(item_template.format(r=r, uid=uid))
|
||||||
|
|
|
||||||
|
|
@ -1,70 +1,44 @@
|
||||||
"""
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
General-purpose fixtures for vdirsyncer's testsuite.
|
General-purpose fixtures for vdirsyncer's testsuite.
|
||||||
"""
|
'''
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import click_log
|
import click_log
|
||||||
|
|
||||||
|
from hypothesis import HealthCheck, Verbosity, settings
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
|
||||||
from hypothesis import HealthCheck
|
|
||||||
from hypothesis import Verbosity
|
|
||||||
from hypothesis import settings
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def setup_logging():
|
def setup_logging():
|
||||||
click_log.basic_config("vdirsyncer").setLevel(logging.DEBUG)
|
click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pytest_benchmark
|
import pytest_benchmark
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def benchmark():
|
def benchmark():
|
||||||
return lambda x: x()
|
return lambda x: x()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
del pytest_benchmark
|
del pytest_benchmark
|
||||||
|
|
||||||
|
|
||||||
settings.register_profile(
|
settings.suppress_health_check = [HealthCheck.too_slow]
|
||||||
"ci",
|
|
||||||
settings(
|
|
||||||
max_examples=1000,
|
|
||||||
verbosity=Verbosity.verbose,
|
|
||||||
suppress_health_check=[HealthCheck.too_slow],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
settings.register_profile(
|
|
||||||
"deterministic",
|
|
||||||
settings(
|
|
||||||
derandomize=True,
|
|
||||||
suppress_health_check=list(HealthCheck),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
settings.register_profile("dev", settings(suppress_health_check=[HealthCheck.too_slow]))
|
|
||||||
|
|
||||||
if os.environ.get("DETERMINISTIC_TESTS", "false").lower() == "true":
|
settings.register_profile("ci", settings(
|
||||||
|
max_examples=1000,
|
||||||
|
verbosity=Verbosity.verbose,
|
||||||
|
))
|
||||||
|
settings.register_profile("deterministic", settings(
|
||||||
|
derandomize=True,
|
||||||
|
perform_health_check=False
|
||||||
|
))
|
||||||
|
|
||||||
|
if os.environ.get('DETERMINISTIC_TESTS', 'false').lower() == 'true':
|
||||||
settings.load_profile("deterministic")
|
settings.load_profile("deterministic")
|
||||||
elif os.environ.get("CI", "false").lower() == "true":
|
elif os.environ.get('CI', 'false').lower() == 'true':
|
||||||
settings.load_profile("ci")
|
settings.load_profile("ci")
|
||||||
else:
|
|
||||||
settings.load_profile("dev")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
|
||||||
async def aio_session():
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
yield session
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
|
||||||
async def aio_connector():
|
|
||||||
async with aiohttp.TCPConnector(limit_per_host=16) as conn:
|
|
||||||
yield conn
|
|
||||||
|
|
|
||||||
|
|
@ -1,390 +1,280 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import random
|
|
||||||
import textwrap
|
|
||||||
import uuid
|
import uuid
|
||||||
from urllib.parse import quote as urlquote
|
|
||||||
from urllib.parse import unquote as urlunquote
|
|
||||||
|
|
||||||
import aiostream
|
import textwrap
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_asyncio
|
|
||||||
|
|
||||||
from tests import EVENT_TEMPLATE
|
|
||||||
from tests import TASK_TEMPLATE
|
|
||||||
from tests import VCARD_TEMPLATE
|
|
||||||
from tests import assert_item_equals
|
|
||||||
from tests import normalize_item
|
|
||||||
from vdirsyncer import exceptions
|
from vdirsyncer import exceptions
|
||||||
from vdirsyncer.storage.base import normalize_meta_value
|
from vdirsyncer.storage.base import normalize_meta_value
|
||||||
from vdirsyncer.vobject import Item
|
from vdirsyncer.vobject import Item
|
||||||
|
|
||||||
|
from .. import EVENT_TEMPLATE, TASK_TEMPLATE, VCARD_TEMPLATE, \
|
||||||
|
assert_item_equals, format_item
|
||||||
|
|
||||||
|
|
||||||
def get_server_mixin(server_name):
|
def get_server_mixin(server_name):
|
||||||
from . import __name__ as base
|
from . import __name__ as base
|
||||||
|
x = __import__('{}.servers.{}'.format(base, server_name), fromlist=[''])
|
||||||
x = __import__(f"{base}.servers.{server_name}", fromlist=[""])
|
|
||||||
return x.ServerMixin
|
return x.ServerMixin
|
||||||
|
|
||||||
|
|
||||||
def format_item(item_template, uid=None):
|
class StorageTests(object):
|
||||||
# assert that special chars are handled correctly.
|
|
||||||
r = random.random()
|
|
||||||
return Item(item_template.format(r=r, uid=uid or r))
|
|
||||||
|
|
||||||
|
|
||||||
class StorageTests:
|
|
||||||
storage_class = None
|
storage_class = None
|
||||||
supports_collections = True
|
supports_collections = True
|
||||||
supports_metadata = True
|
supports_metadata = True
|
||||||
|
|
||||||
@pytest.fixture(params=["VEVENT", "VTODO", "VCARD"])
|
@pytest.fixture(params=['VEVENT', 'VTODO', 'VCARD'])
|
||||||
def item_type(self, request):
|
def item_type(self, request):
|
||||||
"""Parametrize with all supported item types."""
|
'''Parametrize with all supported item types.'''
|
||||||
return request.param
|
return request.param
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self):
|
def get_storage_args(self):
|
||||||
"""
|
'''
|
||||||
Return a function with the following properties:
|
Return a function with the following properties:
|
||||||
|
|
||||||
:param collection: The name of the collection to create and use.
|
:param collection: The name of the collection to create and use.
|
||||||
"""
|
'''
|
||||||
raise NotImplementedError
|
raise NotImplementedError()
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
@pytest.fixture
|
||||||
async def s(self, get_storage_args):
|
def s(self, get_storage_args):
|
||||||
rv = self.storage_class(**await get_storage_args())
|
return self.storage_class(**get_storage_args())
|
||||||
return rv
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_item(self, item_type):
|
def get_item(self, item_type):
|
||||||
template = {
|
template = {
|
||||||
"VEVENT": EVENT_TEMPLATE,
|
'VEVENT': EVENT_TEMPLATE,
|
||||||
"VTODO": TASK_TEMPLATE,
|
'VTODO': TASK_TEMPLATE,
|
||||||
"VCARD": VCARD_TEMPLATE,
|
'VCARD': VCARD_TEMPLATE,
|
||||||
}[item_type]
|
}[item_type]
|
||||||
|
|
||||||
return lambda **kw: format_item(template, **kw)
|
return lambda **kw: format_item(item_template=template, **kw)
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def requires_collections(self):
|
def requires_collections(self):
|
||||||
if not self.supports_collections:
|
if not self.supports_collections:
|
||||||
pytest.skip("This storage does not support collections.")
|
pytest.skip('This storage does not support collections.')
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def requires_metadata(self):
|
def requires_metadata(self):
|
||||||
if not self.supports_metadata:
|
if not self.supports_metadata:
|
||||||
pytest.skip("This storage does not support metadata.")
|
pytest.skip('This storage does not support metadata.')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_generic(self, s, get_item):
|
||||||
async def test_generic(self, s, get_item):
|
|
||||||
items = [get_item() for i in range(1, 10)]
|
items = [get_item() for i in range(1, 10)]
|
||||||
hrefs = []
|
hrefs = []
|
||||||
for item in items:
|
for item in items:
|
||||||
href, etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = await s.get(href)
|
_, etag = s.get(href)
|
||||||
hrefs.append((href, etag))
|
hrefs.append((href, etag))
|
||||||
hrefs.sort()
|
hrefs.sort()
|
||||||
assert hrefs == sorted(await aiostream.stream.list(s.list()))
|
assert hrefs == sorted(s.list())
|
||||||
for href, etag in hrefs:
|
for href, etag in hrefs:
|
||||||
assert isinstance(href, (str, bytes))
|
assert isinstance(href, (str, bytes))
|
||||||
assert isinstance(etag, (str, bytes))
|
assert isinstance(etag, (str, bytes))
|
||||||
assert await s.has(href)
|
assert s.has(href)
|
||||||
item, etag2 = await s.get(href)
|
item, etag2 = s.get(href)
|
||||||
assert etag == etag2
|
assert etag == etag2
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_empty_get_multi(self, s):
|
||||||
async def test_empty_get_multi(self, s):
|
assert list(s.get_multi([])) == []
|
||||||
assert await aiostream.stream.list(s.get_multi([])) == []
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_get_multi_duplicates(self, s, get_item):
|
||||||
async def test_get_multi_duplicates(self, s, get_item):
|
href, etag = s.upload(get_item())
|
||||||
href, etag = await s.upload(get_item())
|
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = await s.get(href)
|
_, etag = s.get(href)
|
||||||
((href2, _item, etag2),) = await aiostream.stream.list(s.get_multi([href] * 2))
|
(href2, item, etag2), = s.get_multi([href] * 2)
|
||||||
assert href2 == href
|
assert href2 == href
|
||||||
assert etag2 == etag
|
assert etag2 == etag
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_upload_already_existing(self, s, get_item):
|
||||||
async def test_upload_already_existing(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
await s.upload(item)
|
s.upload(item)
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.upload(item)
|
s.upload(item)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_upload(self, s, get_item):
|
||||||
async def test_upload(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, _etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
assert_item_equals((await s.get(href))[0], item)
|
assert_item_equals(s.get(href)[0], item)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_update(self, s, get_item):
|
||||||
async def test_update(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = await s.get(href)
|
_, etag = s.get(href)
|
||||||
assert_item_equals((await s.get(href))[0], item)
|
assert_item_equals(s.get(href)[0], item)
|
||||||
|
|
||||||
new_item = get_item(uid=item.uid)
|
new_item = get_item(uid=item.uid)
|
||||||
new_etag = await s.update(href, new_item, etag)
|
new_etag = s.update(href, new_item, etag)
|
||||||
if new_etag is None:
|
if new_etag is None:
|
||||||
_, new_etag = await s.get(href)
|
_, new_etag = s.get(href)
|
||||||
# See https://github.com/pimutils/vdirsyncer/issues/48
|
# See https://github.com/pimutils/vdirsyncer/issues/48
|
||||||
assert isinstance(new_etag, (bytes, str))
|
assert isinstance(new_etag, (bytes, str))
|
||||||
assert_item_equals((await s.get(href))[0], new_item)
|
assert_item_equals(s.get(href)[0], new_item)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_update_nonexisting(self, s, get_item):
|
||||||
async def test_update_nonexisting(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.update("huehue", item, '"123"')
|
s.update('huehue', item, '"123"')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_wrong_etag(self, s, get_item):
|
||||||
async def test_wrong_etag(self, s, get_item):
|
|
||||||
item = get_item()
|
item = get_item()
|
||||||
href, _etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.update(href, item, '"lolnope"')
|
s.update(href, item, '"lolnope"')
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
await s.delete(href, '"lolnope"')
|
s.delete(href, '"lolnope"')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_delete(self, s, get_item):
|
||||||
async def test_delete(self, s, get_item):
|
href, etag = s.upload(get_item())
|
||||||
href, etag = await s.upload(get_item())
|
|
||||||
await s.delete(href, etag)
|
|
||||||
assert not await aiostream.stream.list(s.list())
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_delete_nonexisting(self, s, get_item):
|
|
||||||
with pytest.raises(exceptions.PreconditionFailed):
|
|
||||||
await s.delete("1", '"123"')
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_list(self, s, get_item):
|
|
||||||
assert not await aiostream.stream.list(s.list())
|
|
||||||
href, etag = await s.upload(get_item())
|
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = await s.get(href)
|
_, etag = s.get(href)
|
||||||
assert await aiostream.stream.list(s.list()) == [(href, etag)]
|
s.delete(href, etag)
|
||||||
|
assert not list(s.list())
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_delete_nonexisting(self, s, get_item):
|
||||||
async def test_has(self, s, get_item):
|
with pytest.raises(exceptions.PreconditionFailed):
|
||||||
assert not await s.has("asd")
|
s.delete('1', '"123"')
|
||||||
href, etag = await s.upload(get_item())
|
|
||||||
assert await s.has(href)
|
|
||||||
assert not await s.has("asd")
|
|
||||||
await s.delete(href, etag)
|
|
||||||
assert not await s.has(href)
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_list(self, s, get_item):
|
||||||
async def test_update_others_stay_the_same(self, s, get_item):
|
assert not list(s.list())
|
||||||
|
href, etag = s.upload(get_item())
|
||||||
|
if etag is None:
|
||||||
|
_, etag = s.get(href)
|
||||||
|
assert list(s.list()) == [(href, etag)]
|
||||||
|
|
||||||
|
def test_has(self, s, get_item):
|
||||||
|
assert not s.has('asd')
|
||||||
|
href, etag = s.upload(get_item())
|
||||||
|
if etag is None:
|
||||||
|
_, etag = s.get(href)
|
||||||
|
assert s.has(href)
|
||||||
|
assert not s.has('asd')
|
||||||
|
s.delete(href, etag)
|
||||||
|
assert not s.has(href)
|
||||||
|
|
||||||
|
def test_update_others_stay_the_same(self, s, get_item):
|
||||||
info = {}
|
info = {}
|
||||||
for _ in range(4):
|
for _ in range(4):
|
||||||
href, etag = await s.upload(get_item())
|
href, etag = s.upload(get_item())
|
||||||
if etag is None:
|
if etag is None:
|
||||||
_, etag = await s.get(href)
|
_, etag = s.get(href)
|
||||||
info[href] = etag
|
info[href] = etag
|
||||||
|
|
||||||
items = await aiostream.stream.list(
|
assert dict(
|
||||||
s.get_multi(href for href, etag in info.items())
|
(href, etag) for href, item, etag
|
||||||
)
|
in s.get_multi(href for href, etag in info.items())
|
||||||
assert {href: etag for href, item, etag in items} == info
|
) == info
|
||||||
|
|
||||||
def test_repr(self, s):
|
def test_repr(self, s, get_storage_args):
|
||||||
assert self.storage_class.__name__ in repr(s)
|
assert self.storage_class.__name__ in repr(s)
|
||||||
assert s.instance_name is None
|
assert s.instance_name is None
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_discover(self, requires_collections, get_storage_args, get_item):
|
||||||
async def test_discover(
|
|
||||||
self,
|
|
||||||
requires_collections,
|
|
||||||
get_storage_args,
|
|
||||||
get_item,
|
|
||||||
aio_connector,
|
|
||||||
):
|
|
||||||
collections = set()
|
collections = set()
|
||||||
for i in range(1, 5):
|
for i in range(1, 5):
|
||||||
collection = f"test{i}"
|
collection = 'test{}'.format(i)
|
||||||
s = self.storage_class(**await get_storage_args(collection=collection))
|
s = self.storage_class(**get_storage_args(collection=collection))
|
||||||
assert not await aiostream.stream.list(s.list())
|
assert not list(s.list())
|
||||||
await s.upload(get_item())
|
s.upload(get_item())
|
||||||
collections.add(s.collection)
|
collections.add(s.collection)
|
||||||
|
|
||||||
discovered = await aiostream.stream.list(
|
actual = set(
|
||||||
self.storage_class.discover(**await get_storage_args(collection=None))
|
c['collection'] for c in
|
||||||
|
self.storage_class.discover(**get_storage_args(collection=None))
|
||||||
)
|
)
|
||||||
actual = {c["collection"] for c in discovered}
|
|
||||||
|
|
||||||
assert actual >= collections
|
assert actual >= collections
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_create_collection(self, requires_collections, get_storage_args,
|
||||||
async def test_create_collection(
|
get_item):
|
||||||
self,
|
if getattr(self, 'dav_server', '') in \
|
||||||
requires_collections,
|
('icloud', 'fastmail', 'davical'):
|
||||||
get_storage_args,
|
pytest.skip('Manual cleanup would be necessary.')
|
||||||
get_item,
|
|
||||||
):
|
|
||||||
if getattr(self, "dav_server", "") in ("icloud", "fastmail", "davical"):
|
|
||||||
pytest.skip("Manual cleanup would be necessary.")
|
|
||||||
if getattr(self, "dav_server", "") == "radicale":
|
|
||||||
pytest.skip("Radicale does not support collection creation")
|
|
||||||
|
|
||||||
args = await get_storage_args(collection=None)
|
args = get_storage_args(collection=None)
|
||||||
args["collection"] = "test"
|
args['collection'] = 'test'
|
||||||
|
|
||||||
s = self.storage_class(**await self.storage_class.create_collection(**args))
|
s = self.storage_class(
|
||||||
|
**self.storage_class.create_collection(**args)
|
||||||
href = (await s.upload(get_item()))[0]
|
|
||||||
assert href in await aiostream.stream.list(
|
|
||||||
(href async for href, etag in s.list())
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
href = s.upload(get_item())[0]
|
||||||
async def test_discover_collection_arg(
|
assert href in set(href for href, etag in s.list())
|
||||||
self, requires_collections, get_storage_args
|
|
||||||
):
|
def test_discover_collection_arg(self, requires_collections,
|
||||||
args = await get_storage_args(collection="test2")
|
get_storage_args):
|
||||||
|
args = get_storage_args(collection='test2')
|
||||||
with pytest.raises(TypeError) as excinfo:
|
with pytest.raises(TypeError) as excinfo:
|
||||||
await aiostream.stream.list(self.storage_class.discover(**args))
|
list(self.storage_class.discover(**args))
|
||||||
|
|
||||||
assert "collection argument must not be given" in str(excinfo.value)
|
assert 'collection argument must not be given' in str(excinfo.value)
|
||||||
|
|
||||||
|
def test_collection_arg(self, get_storage_args):
|
||||||
|
if self.storage_class.storage_name.startswith('etesync'):
|
||||||
|
pytest.skip('etesync uses UUIDs.')
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_collection_arg(self, get_storage_args):
|
|
||||||
if self.supports_collections:
|
if self.supports_collections:
|
||||||
s = self.storage_class(**await get_storage_args(collection="test2"))
|
s = self.storage_class(**get_storage_args(collection='test2'))
|
||||||
# Can't do stronger assertion because of radicale, which needs a
|
# Can't do stronger assertion because of radicale, which needs a
|
||||||
# fileextension to guess the collection type.
|
# fileextension to guess the collection type.
|
||||||
assert "test2" in s.collection
|
assert 'test2' in s.collection
|
||||||
else:
|
else:
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
self.storage_class(collection="ayy", **await get_storage_args())
|
self.storage_class(collection='ayy', **get_storage_args())
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_case_sensitive_uids(self, s, get_item):
|
||||||
async def test_case_sensitive_uids(self, s, get_item):
|
if s.storage_name == 'filesystem':
|
||||||
if s.storage_name == "filesystem":
|
pytest.skip('Behavior depends on the filesystem.')
|
||||||
pytest.skip("Behavior depends on the filesystem.")
|
|
||||||
|
|
||||||
uid = str(uuid.uuid4())
|
uid = str(uuid.uuid4())
|
||||||
await s.upload(get_item(uid=uid.upper()))
|
s.upload(get_item(uid=uid.upper()))
|
||||||
await s.upload(get_item(uid=uid.lower()))
|
s.upload(get_item(uid=uid.lower()))
|
||||||
items = [href async for href, etag in s.list()]
|
items = list(href for href, etag in s.list())
|
||||||
assert len(items) == 2
|
assert len(items) == 2
|
||||||
assert len(set(items)) == 2
|
assert len(set(items)) == 2
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_metadata(self, requires_metadata, s):
|
||||||
async def test_specialchars(
|
if not getattr(self, 'dav_server', ''):
|
||||||
self, monkeypatch, requires_collections, get_storage_args, get_item
|
assert not s.get_meta('color')
|
||||||
):
|
assert not s.get_meta('displayname')
|
||||||
if getattr(self, "dav_server", "") in ("icloud", "fastmail"):
|
|
||||||
pytest.skip("iCloud and FastMail reject this name.")
|
|
||||||
|
|
||||||
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
|
||||||
|
|
||||||
uid = "test @ foo ät bar град сатану"
|
|
||||||
collection = "test @ foo ät bar"
|
|
||||||
|
|
||||||
s = self.storage_class(**await get_storage_args(collection=collection))
|
|
||||||
item = get_item(uid=uid)
|
|
||||||
|
|
||||||
href, etag = await s.upload(item)
|
|
||||||
item2, etag2 = await s.get(href)
|
|
||||||
if etag is not None:
|
|
||||||
assert etag2 == etag
|
|
||||||
assert_item_equals(item2, item)
|
|
||||||
|
|
||||||
((_, etag3),) = await aiostream.stream.list(s.list())
|
|
||||||
assert etag2 == etag3
|
|
||||||
|
|
||||||
assert collection in urlunquote(s.collection)
|
|
||||||
if self.storage_class.storage_name.endswith("dav"):
|
|
||||||
assert urlquote(uid, "/@:") in href
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_newline_in_uid(
|
|
||||||
self, monkeypatch, requires_collections, get_storage_args, get_item
|
|
||||||
):
|
|
||||||
monkeypatch.setattr("vdirsyncer.utils.generate_href", lambda x: x)
|
|
||||||
|
|
||||||
uid = "UID:20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%0A.ics"
|
|
||||||
|
|
||||||
s = self.storage_class(**await get_storage_args())
|
|
||||||
item = get_item(uid=uid)
|
|
||||||
|
|
||||||
href, etag = await s.upload(item)
|
|
||||||
item2, etag2 = await s.get(href)
|
|
||||||
if etag is not None:
|
|
||||||
assert etag2 == etag
|
|
||||||
assert_item_equals(item2, item)
|
|
||||||
|
|
||||||
((_, etag3),) = await aiostream.stream.list(s.list())
|
|
||||||
assert etag2 == etag3
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_empty_metadata(self, requires_metadata, s):
|
|
||||||
if getattr(self, "dav_server", ""):
|
|
||||||
pytest.skip()
|
|
||||||
|
|
||||||
assert await s.get_meta("color") is None
|
|
||||||
assert await s.get_meta("displayname") is None
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_metadata(self, requires_metadata, s):
|
|
||||||
if getattr(self, "dav_server", "") == "xandikos":
|
|
||||||
pytest.skip("xandikos does not support removing metadata.")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await s.set_meta("color", None)
|
s.set_meta('color', None)
|
||||||
assert await s.get_meta("color") is None
|
assert not s.get_meta('color')
|
||||||
await s.set_meta("color", "#ff0000")
|
s.set_meta('color', u'#ff0000')
|
||||||
assert await s.get_meta("color") == "#ff0000"
|
assert s.get_meta('color') == u'#ff0000'
|
||||||
except exceptions.UnsupportedMetadataError:
|
except exceptions.UnsupportedMetadataError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
for x in (u'hello world', u'hello wörld'):
|
||||||
async def test_encoding_metadata(self, requires_metadata, s):
|
s.set_meta('displayname', x)
|
||||||
for x in ("hello world", "hello wörld"):
|
rv = s.get_meta('displayname')
|
||||||
await s.set_meta("displayname", x)
|
|
||||||
rv = await s.get_meta("displayname")
|
|
||||||
assert rv == x
|
assert rv == x
|
||||||
assert isinstance(rv, str)
|
assert isinstance(rv, str)
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize('value', [
|
||||||
"value",
|
'fööbör',
|
||||||
[
|
'ананасовое перо'
|
||||||
None,
|
])
|
||||||
"",
|
def test_metadata_normalization(self, requires_metadata, s, value):
|
||||||
"Hello there!",
|
x = s.get_meta('displayname')
|
||||||
"Österreich",
|
|
||||||
"中国",
|
|
||||||
"한글",
|
|
||||||
"42a4ec99-b1c2-4859-b142-759112f2ca50",
|
|
||||||
"فلسطين",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_metadata_normalization(self, requires_metadata, s, value):
|
|
||||||
x = await s.get_meta("displayname")
|
|
||||||
assert x == normalize_meta_value(x)
|
assert x == normalize_meta_value(x)
|
||||||
|
|
||||||
if not getattr(self, "dav_server", None):
|
s.set_meta('displayname', value)
|
||||||
# ownCloud replaces "" with "unnamed"
|
assert s.get_meta('displayname') == normalize_meta_value(value)
|
||||||
await s.set_meta("displayname", value)
|
|
||||||
assert await s.get_meta("displayname") == normalize_meta_value(value)
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_recurring_events(self, s, item_type):
|
||||||
async def test_recurring_events(self, s, item_type):
|
if item_type != 'VEVENT':
|
||||||
if item_type != "VEVENT":
|
pytest.skip('This storage instance doesn\'t support iCalendar.')
|
||||||
pytest.skip("This storage instance doesn't support iCalendar.")
|
|
||||||
|
|
||||||
uid = str(uuid.uuid4())
|
uid = str(uuid.uuid4())
|
||||||
item = Item(
|
item = Item(textwrap.dedent(u'''
|
||||||
textwrap.dedent(
|
|
||||||
f"""
|
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
BEGIN:VEVENT
|
BEGIN:VEVENT
|
||||||
|
|
@ -405,7 +295,7 @@ class StorageTests:
|
||||||
BEGIN:VEVENT
|
BEGIN:VEVENT
|
||||||
DTSTART;TZID=UTC:20140128T083000Z
|
DTSTART;TZID=UTC:20140128T083000Z
|
||||||
DTEND;TZID=UTC:20140128T100000Z
|
DTEND;TZID=UTC:20140128T100000Z
|
||||||
RRULE:FREQ=WEEKLY;BYDAY=TU;UNTIL=20141208T213000Z
|
RRULE:FREQ=WEEKLY;UNTIL=20141208T213000Z;BYDAY=TU
|
||||||
DTSTAMP:20140327T060506Z
|
DTSTAMP:20140327T060506Z
|
||||||
UID:{uid}
|
UID:{uid}
|
||||||
CREATED:20131216T033331Z
|
CREATED:20131216T033331Z
|
||||||
|
|
@ -418,11 +308,65 @@ class StorageTests:
|
||||||
TRANSP:OPAQUE
|
TRANSP:OPAQUE
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"""
|
'''.format(uid=uid)).strip())
|
||||||
).strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
href, _etag = await s.upload(item)
|
href, etag = s.upload(item)
|
||||||
|
|
||||||
item2, _etag2 = await s.get(href)
|
item2, etag2 = s.get(href)
|
||||||
assert normalize_item(item) == normalize_item(item2)
|
assert item2.raw.count('BEGIN:VEVENT') == 2
|
||||||
|
assert 'RRULE' in item2.raw
|
||||||
|
|
||||||
|
def test_buffered(self, get_storage_args, get_item, requires_collections):
|
||||||
|
args = get_storage_args()
|
||||||
|
s1 = self.storage_class(**args)
|
||||||
|
s2 = self.storage_class(**args)
|
||||||
|
s1.upload(get_item())
|
||||||
|
assert sorted(list(s1.list())) == sorted(list(s2.list()))
|
||||||
|
|
||||||
|
s1.buffered()
|
||||||
|
s1.upload(get_item())
|
||||||
|
s1.flush()
|
||||||
|
assert sorted(list(s1.list())) == sorted(list(s2.list()))
|
||||||
|
|
||||||
|
def test_retain_timezones(self, item_type, s):
|
||||||
|
if item_type != 'VEVENT':
|
||||||
|
pytest.skip('This storage instance doesn\'t support iCalendar.')
|
||||||
|
|
||||||
|
item = Item(textwrap.dedent('''
|
||||||
|
BEGIN:VCALENDAR
|
||||||
|
PRODID:-//ownCloud calendar v1.4.0
|
||||||
|
VERSION:2.0
|
||||||
|
CALSCALE:GREGORIAN
|
||||||
|
BEGIN:VEVENT
|
||||||
|
CREATED:20161004T110533
|
||||||
|
DTSTAMP:20161004T110533
|
||||||
|
LAST-MODIFIED:20161004T110533
|
||||||
|
UID:y2lmgz48mg
|
||||||
|
SUMMARY:Test
|
||||||
|
CLASS:PUBLIC
|
||||||
|
STATUS:CONFIRMED
|
||||||
|
DTSTART;TZID=Europe/Berlin:20161014T101500
|
||||||
|
DTEND;TZID=Europe/Berlin:20161014T114500
|
||||||
|
END:VEVENT
|
||||||
|
BEGIN:VTIMEZONE
|
||||||
|
TZID:Europe/Berlin
|
||||||
|
BEGIN:DAYLIGHT
|
||||||
|
DTSTART:20160327T030000
|
||||||
|
TZNAME:CEST
|
||||||
|
TZOFFSETFROM:+0100
|
||||||
|
TZOFFSETTO:+0200
|
||||||
|
END:DAYLIGHT
|
||||||
|
BEGIN:STANDARD
|
||||||
|
DTSTART:20161030T020000
|
||||||
|
TZNAME:CET
|
||||||
|
TZOFFSETFROM:+0200
|
||||||
|
TZOFFSETTO:+0100
|
||||||
|
END:STANDARD
|
||||||
|
END:VTIMEZONE
|
||||||
|
END:VCALENDAR
|
||||||
|
''').strip())
|
||||||
|
|
||||||
|
href, etag = s.upload(item)
|
||||||
|
item2, _ = s.get(href)
|
||||||
|
assert 'VTIMEZONE' in item2.raw
|
||||||
|
assert item2.hash == item.hash
|
||||||
|
|
|
||||||
|
|
@ -1,116 +1,36 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import contextlib
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import aiostream
|
|
||||||
import pytest
|
|
||||||
import pytest_asyncio
|
|
||||||
import requests
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
def wait_for_container(url):
|
def slow_create_collection(request):
|
||||||
"""Wait for a container to initialise.
|
|
||||||
|
|
||||||
Polls a URL every 100ms until the server responds.
|
|
||||||
"""
|
|
||||||
# give the server 5 seconds to settle
|
|
||||||
for _ in range(50):
|
|
||||||
print(_)
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = requests.get(url)
|
|
||||||
response.raise_for_status()
|
|
||||||
except requests.ConnectionError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return
|
|
||||||
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
pytest.exit(
|
|
||||||
"Server did not initialise in 5 seconds.\n"
|
|
||||||
"WARNING: There may be a stale docker container still running."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def dockerised_server(name, container_port, exposed_port):
|
|
||||||
"""Run a dockerised DAV server as a contenxt manager."""
|
|
||||||
container_id = None
|
|
||||||
url = f"http://127.0.0.1:{exposed_port}/"
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Hint: This will block while the pull happends, and only return once
|
|
||||||
# the container has actually started.
|
|
||||||
output = subprocess.check_output(
|
|
||||||
[
|
|
||||||
"docker",
|
|
||||||
"run",
|
|
||||||
"--rm",
|
|
||||||
"--detach",
|
|
||||||
"--publish",
|
|
||||||
f"{exposed_port}:{container_port}",
|
|
||||||
f"whynothugo/vdirsyncer-devkit-{name}",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
container_id = output.decode().strip()
|
|
||||||
wait_for_container(url)
|
|
||||||
|
|
||||||
yield url
|
|
||||||
finally:
|
|
||||||
if container_id:
|
|
||||||
subprocess.check_output(["docker", "kill", container_id])
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def baikal_server():
|
|
||||||
with dockerised_server("baikal", "80", "8002"):
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def radicale_server():
|
|
||||||
with dockerised_server("radicale", "8001", "8001"):
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def xandikos_server():
|
|
||||||
with dockerised_server("xandikos", "8000", "8000"):
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@pytest_asyncio.fixture
|
|
||||||
async def slow_create_collection(request, aio_connector):
|
|
||||||
# We need to properly clean up because otherwise we might run into
|
# We need to properly clean up because otherwise we might run into
|
||||||
# storage limits.
|
# storage limits.
|
||||||
to_delete = []
|
to_delete = []
|
||||||
|
|
||||||
async def inner(cls: type, args: dict, collection_name: str) -> dict:
|
def delete_collections():
|
||||||
"""Create a collection
|
for s in to_delete:
|
||||||
|
s.session.request('DELETE', '')
|
||||||
|
|
||||||
Returns args necessary to create a Storage instance pointing to it.
|
request.addfinalizer(delete_collections)
|
||||||
"""
|
|
||||||
assert collection_name.startswith("test")
|
|
||||||
|
|
||||||
# Make each name unique
|
def inner(cls, args, collection):
|
||||||
collection_name = f"{collection_name}-vdirsyncer-ci-{uuid.uuid4()}"
|
assert collection.startswith('test')
|
||||||
|
collection += '-vdirsyncer-ci-' + str(uuid.uuid4())
|
||||||
|
|
||||||
# Create the collection:
|
args = cls.create_collection(collection, **args)
|
||||||
args = await cls.create_collection(collection_name, **args)
|
s = cls(**args)
|
||||||
collection = cls(**args)
|
_clear_collection(s)
|
||||||
|
assert not list(s.list())
|
||||||
# Keep collection in a list to be deleted once tests end:
|
to_delete.append(s)
|
||||||
to_delete.append(collection)
|
|
||||||
|
|
||||||
assert not await aiostream.stream.list(collection.list())
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
yield inner
|
return inner
|
||||||
|
|
||||||
await asyncio.gather(*(c.session.request("DELETE", "") for c in to_delete))
|
|
||||||
|
def _clear_collection(s):
|
||||||
|
for href, etag in s.list():
|
||||||
|
s.delete(href, etag)
|
||||||
|
|
|
||||||
|
|
@ -1,53 +1,25 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import uuid
|
|
||||||
|
|
||||||
import aiohttp
|
from .. import StorageTests, get_server_mixin
|
||||||
import aiostream
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from tests import assert_item_equals
|
|
||||||
from tests.storage import StorageTests
|
|
||||||
from tests.storage import get_server_mixin
|
|
||||||
from vdirsyncer import exceptions
|
|
||||||
from vdirsyncer.vobject import Item
|
|
||||||
|
|
||||||
dav_server = os.environ.get("DAV_SERVER", "skip")
|
dav_server = os.environ.get('DAV_SERVER', 'skip')
|
||||||
ServerMixin = get_server_mixin(dav_server)
|
ServerMixin = get_server_mixin(dav_server)
|
||||||
|
|
||||||
|
|
||||||
class DAVStorageTests(ServerMixin, StorageTests):
|
class DAVStorageTests(ServerMixin, StorageTests):
|
||||||
dav_server = dav_server
|
dav_server = dav_server
|
||||||
|
|
||||||
@pytest.mark.skipif(dav_server == "radicale", reason="Radicale is very tolerant.")
|
def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_dav_broken_item(self, s):
|
|
||||||
item = Item("HAHA:YES")
|
|
||||||
with pytest.raises((exceptions.Error, aiohttp.ClientResponseError)):
|
|
||||||
await s.upload(item)
|
|
||||||
assert not await aiostream.stream.list(s.list())
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_dav_empty_get_multi_performance(self, s, monkeypatch):
|
|
||||||
def breakdown(*a, **kw):
|
def breakdown(*a, **kw):
|
||||||
raise AssertionError("Expected not to be called.")
|
raise AssertionError('Expected not to be called.')
|
||||||
|
|
||||||
monkeypatch.setattr("requests.sessions.Session.request", breakdown)
|
monkeypatch.setattr('requests.sessions.Session.request', breakdown)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
assert list(await aiostream.stream.list(s.get_multi([]))) == []
|
assert list(s.get_multi([])) == []
|
||||||
finally:
|
finally:
|
||||||
# Make sure monkeypatch doesn't interfere with DAV server teardown
|
# Make sure monkeypatch doesn't interfere with DAV server teardown
|
||||||
monkeypatch.undo()
|
monkeypatch.undo()
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_dav_unicode_href(self, s, get_item, monkeypatch):
|
|
||||||
if self.dav_server == "radicale":
|
|
||||||
pytest.skip("Radicale is unable to deal with unicode hrefs")
|
|
||||||
|
|
||||||
monkeypatch.setattr(s, "_get_href", lambda item: item.ident + s.fileext)
|
|
||||||
item = get_item(uid="град сатану" + str(uuid.uuid4()))
|
|
||||||
href, _etag = await s.upload(item)
|
|
||||||
item2, _etag2 = await s.get(href)
|
|
||||||
assert_item_equals(item, item2)
|
|
||||||
|
|
|
||||||
|
|
@ -1,85 +1,43 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import datetime
|
import datetime
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import aiostream
|
|
||||||
import pytest
|
import pytest
|
||||||
from aioresponses import aioresponses
|
|
||||||
|
|
||||||
from tests import EVENT_TEMPLATE
|
from tests import EVENT_TEMPLATE, TASK_TEMPLATE, VCARD_TEMPLATE
|
||||||
from tests import TASK_TEMPLATE
|
|
||||||
from tests import VCARD_TEMPLATE
|
|
||||||
from tests.storage import format_item
|
|
||||||
from vdirsyncer import exceptions
|
|
||||||
from vdirsyncer.storage.dav import CalDAVStorage
|
from vdirsyncer.storage.dav import CalDAVStorage
|
||||||
|
|
||||||
from . import DAVStorageTests
|
from . import DAVStorageTests, dav_server
|
||||||
from . import dav_server
|
from .. import format_item
|
||||||
|
|
||||||
|
|
||||||
class TestCalDAVStorage(DAVStorageTests):
|
class TestCalDAVStorage(DAVStorageTests):
|
||||||
storage_class = CalDAVStorage
|
storage_class = CalDAVStorage
|
||||||
|
|
||||||
@pytest.fixture(params=["VTODO", "VEVENT"])
|
@pytest.fixture(params=['VTODO', 'VEVENT'])
|
||||||
def item_type(self, request):
|
def item_type(self, request):
|
||||||
return request.param
|
return request.param
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
||||||
async def test_doesnt_accept_vcard(self, item_type, get_storage_args):
|
s = self.storage_class(item_types=(item_type,), **get_storage_args())
|
||||||
s = self.storage_class(item_types=(item_type,), **await get_storage_args())
|
|
||||||
|
|
||||||
# Most storages hard-fail, but xandikos doesn't.
|
try:
|
||||||
with contextlib.suppress(exceptions.Error, aiohttp.ClientResponseError):
|
s.upload(format_item(item_template=VCARD_TEMPLATE))
|
||||||
await s.upload(format_item(VCARD_TEMPLATE))
|
except Exception:
|
||||||
|
pass
|
||||||
|
assert not list(s.list())
|
||||||
|
|
||||||
assert not await aiostream.stream.list(s.list())
|
@pytest.mark.xfail(dav_server == 'radicale',
|
||||||
|
reason='Radicale doesn\'t support timeranges.')
|
||||||
# The `arg` param is not named `item_types` because that would hit
|
def test_timerange_correctness(self, get_storage_args):
|
||||||
# https://bitbucket.org/pytest-dev/pytest/issue/745/
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
("arg", "calls_num"),
|
|
||||||
[
|
|
||||||
(("VTODO",), 1),
|
|
||||||
(("VEVENT",), 1),
|
|
||||||
(("VTODO", "VEVENT"), 2),
|
|
||||||
(("VTODO", "VEVENT", "VJOURNAL"), 3),
|
|
||||||
((), 1),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_item_types_performance(
|
|
||||||
self, get_storage_args, arg, calls_num, monkeypatch
|
|
||||||
):
|
|
||||||
s = self.storage_class(item_types=arg, **await get_storage_args())
|
|
||||||
old_parse = s._parse_prop_responses
|
|
||||||
calls = []
|
|
||||||
|
|
||||||
def new_parse(*a, **kw):
|
|
||||||
calls.append(None)
|
|
||||||
return old_parse(*a, **kw)
|
|
||||||
|
|
||||||
monkeypatch.setattr(s, "_parse_prop_responses", new_parse)
|
|
||||||
await aiostream.stream.list(s.list())
|
|
||||||
assert len(calls) == calls_num
|
|
||||||
|
|
||||||
@pytest.mark.xfail(
|
|
||||||
dav_server == "radicale", reason="Radicale doesn't support timeranges."
|
|
||||||
)
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_timerange_correctness(self, get_storage_args):
|
|
||||||
start_date = datetime.datetime(2013, 9, 10)
|
start_date = datetime.datetime(2013, 9, 10)
|
||||||
end_date = datetime.datetime(2013, 9, 13)
|
end_date = datetime.datetime(2013, 9, 13)
|
||||||
s = self.storage_class(
|
s = self.storage_class(start_date=start_date, end_date=end_date,
|
||||||
start_date=start_date, end_date=end_date, **await get_storage_args()
|
**get_storage_args())
|
||||||
)
|
|
||||||
|
|
||||||
too_old_item = format_item(
|
too_old_item = format_item(item_template=dedent(u'''
|
||||||
dedent(
|
|
||||||
"""
|
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
|
@ -91,13 +49,9 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
UID:{r}
|
UID:{r}
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"""
|
''').strip())
|
||||||
).strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
too_new_item = format_item(
|
too_new_item = format_item(item_template=dedent(u'''
|
||||||
dedent(
|
|
||||||
"""
|
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
|
@ -109,13 +63,9 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
UID:{r}
|
UID:{r}
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"""
|
''').strip())
|
||||||
).strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
good_item = format_item(
|
good_item = format_item(item_template=dedent(u'''
|
||||||
dedent(
|
|
||||||
"""
|
|
||||||
BEGIN:VCALENDAR
|
BEGIN:VCALENDAR
|
||||||
VERSION:2.0
|
VERSION:2.0
|
||||||
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
PRODID:-//hacksw/handcal//NONSGML v1.0//EN
|
||||||
|
|
@ -127,48 +77,28 @@ class TestCalDAVStorage(DAVStorageTests):
|
||||||
UID:{r}
|
UID:{r}
|
||||||
END:VEVENT
|
END:VEVENT
|
||||||
END:VCALENDAR
|
END:VCALENDAR
|
||||||
"""
|
''').strip())
|
||||||
).strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
await s.upload(too_old_item)
|
s.upload(too_old_item)
|
||||||
await s.upload(too_new_item)
|
s.upload(too_new_item)
|
||||||
expected_href, _ = await s.upload(good_item)
|
expected_href, _ = s.upload(good_item)
|
||||||
|
|
||||||
((actual_href, _),) = await aiostream.stream.list(s.list())
|
(actual_href, _), = s.list()
|
||||||
assert actual_href == expected_href
|
assert actual_href == expected_href
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.skipif(dav_server == 'icloud',
|
||||||
async def test_invalid_resource(self, monkeypatch, get_storage_args):
|
reason='iCloud only accepts VEVENT')
|
||||||
args = await get_storage_args(collection=None)
|
def test_item_types_general(self, get_storage_args):
|
||||||
|
args = get_storage_args()
|
||||||
|
s = self.storage_class(**args)
|
||||||
|
event = s.upload(format_item(item_template=EVENT_TEMPLATE))[0]
|
||||||
|
task = s.upload(format_item(item_template=TASK_TEMPLATE))[0]
|
||||||
|
|
||||||
with aioresponses() as m:
|
for item_types, expected_items in [
|
||||||
m.add(args["url"], method="PROPFIND", status=200, body="Hello world")
|
(('VTODO', 'VEVENT'), {event, task}),
|
||||||
|
(('VTODO',), {task}),
|
||||||
with pytest.raises(ValueError):
|
(('VEVENT',), {event}),
|
||||||
s = self.storage_class(**args)
|
]:
|
||||||
await aiostream.stream.list(s.list())
|
args['item_types'] = item_types
|
||||||
|
s = self.storage_class(**args)
|
||||||
assert len(m.requests) == 1
|
assert set(href for href, etag in s.list()) == expected_items
|
||||||
|
|
||||||
@pytest.mark.skipif(dav_server == "icloud", reason="iCloud only accepts VEVENT")
|
|
||||||
@pytest.mark.skipif(
|
|
||||||
dav_server == "fastmail", reason="Fastmail has non-standard hadling of VTODOs."
|
|
||||||
)
|
|
||||||
@pytest.mark.xfail(dav_server == "baikal", reason="Baikal returns 500.")
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_item_types_general(self, s):
|
|
||||||
event = (await s.upload(format_item(EVENT_TEMPLATE)))[0]
|
|
||||||
task = (await s.upload(format_item(TASK_TEMPLATE)))[0]
|
|
||||||
s.item_types = ("VTODO", "VEVENT")
|
|
||||||
|
|
||||||
async def hrefs():
|
|
||||||
return {href async for href, etag in s.list()}
|
|
||||||
|
|
||||||
assert await hrefs() == {event, task}
|
|
||||||
s.item_types = ("VTODO",)
|
|
||||||
assert await hrefs() == {task}
|
|
||||||
s.item_types = ("VEVENT",)
|
|
||||||
assert await hrefs() == {event}
|
|
||||||
s.item_types = ()
|
|
||||||
assert await hrefs() == {event, task}
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
@ -10,6 +10,6 @@ from . import DAVStorageTests
|
||||||
class TestCardDAVStorage(DAVStorageTests):
|
class TestCardDAVStorage(DAVStorageTests):
|
||||||
storage_class = CardDAVStorage
|
storage_class = CardDAVStorage
|
||||||
|
|
||||||
@pytest.fixture(params=["VCARD"])
|
@pytest.fixture(params=['VCARD'])
|
||||||
def item_type(self, request):
|
def item_type(self, request):
|
||||||
return request.param
|
return request.param
|
||||||
|
|
|
||||||
|
|
@ -1,59 +1,40 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vdirsyncer.storage.dav import _BAD_XML_CHARS
|
from vdirsyncer.storage.dav import _BAD_XML_CHARS, _merge_xml, _parse_xml
|
||||||
from vdirsyncer.storage.dav import _merge_xml
|
|
||||||
from vdirsyncer.storage.dav import _normalize_href
|
|
||||||
from vdirsyncer.storage.dav import _parse_xml
|
|
||||||
|
|
||||||
|
|
||||||
def test_xml_utilities():
|
def test_xml_utilities():
|
||||||
x = _parse_xml(
|
x = _parse_xml(b'''<?xml version="1.0" encoding="UTF-8" ?>
|
||||||
b"""<?xml version="1.0" encoding="UTF-8" ?>
|
<D:multistatus xmlns:D="DAV:">
|
||||||
<multistatus xmlns="DAV:">
|
<D:response>
|
||||||
<response>
|
<D:propstat>
|
||||||
<propstat>
|
<D:status>HTTP/1.1 404 Not Found</D:status>
|
||||||
<status>HTTP/1.1 404 Not Found</status>
|
<D:prop>
|
||||||
<prop>
|
<D:getcontenttype/>
|
||||||
<getcontenttype/>
|
</D:prop>
|
||||||
</prop>
|
</D:propstat>
|
||||||
</propstat>
|
<D:propstat>
|
||||||
<propstat>
|
<D:prop>
|
||||||
<prop>
|
<D:resourcetype>
|
||||||
<resourcetype>
|
<D:collection/>
|
||||||
<collection/>
|
</D:resourcetype>
|
||||||
</resourcetype>
|
</D:prop>
|
||||||
</prop>
|
</D:propstat>
|
||||||
</propstat>
|
</D:response>
|
||||||
</response>
|
</D:multistatus>
|
||||||
</multistatus>
|
''')
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
response = x.find("{DAV:}response")
|
response = x.find('{DAV:}response')
|
||||||
props = _merge_xml(response.findall("{DAV:}propstat/{DAV:}prop"))
|
props = _merge_xml(response.findall('{DAV:}propstat/{DAV:}prop'))
|
||||||
assert props.find("{DAV:}resourcetype/{DAV:}collection") is not None
|
assert props.find('{DAV:}resourcetype/{DAV:}collection') is not None
|
||||||
assert props.find("{DAV:}getcontenttype") is not None
|
assert props.find('{DAV:}getcontenttype') is not None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("char", range(32))
|
@pytest.mark.parametrize('char', range(32))
|
||||||
def test_xml_specialchars(char):
|
def test_xml_specialchars(char):
|
||||||
x = _parse_xml(
|
x = _parse_xml('<?xml version="1.0" encoding="UTF-8" ?>'
|
||||||
'<?xml version="1.0" encoding="UTF-8" ?>'
|
'<foo>ye{}s\r\n'
|
||||||
f"<foo>ye{chr(char)}s\r\n"
|
'hello</foo>'.format(chr(char)).encode('ascii'))
|
||||||
"hello</foo>".encode("ascii")
|
|
||||||
)
|
|
||||||
|
|
||||||
if char in _BAD_XML_CHARS:
|
if char in _BAD_XML_CHARS:
|
||||||
assert x.text == "yes\nhello"
|
assert x.text == 'yes\nhello'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"href",
|
|
||||||
[
|
|
||||||
"/dav/calendars/user/testuser/123/UID%253A20210609T084907Z-@synaps-web-54fddfdf7-7kcfm%250A.ics",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_normalize_href(href):
|
|
||||||
assert href == _normalize_href("https://example.com", href)
|
|
||||||
|
|
|
||||||
0
tests/storage/etesync/__init__.py
Normal file
0
tests/storage/etesync/__init__.py
Normal file
BIN
tests/storage/etesync/etesync_server/db.sqlite3
Normal file
BIN
tests/storage/etesync/etesync_server/db.sqlite3
Normal file
Binary file not shown.
124
tests/storage/etesync/etesync_server/etesync_server/settings.py
Normal file
124
tests/storage/etesync/etesync_server/etesync_server/settings.py
Normal file
|
|
@ -0,0 +1,124 @@
|
||||||
|
"""
|
||||||
|
Django settings for etesync_server project.
|
||||||
|
|
||||||
|
Generated by 'django-admin startproject' using Django 1.10.6.
|
||||||
|
|
||||||
|
For more information on this file, see
|
||||||
|
https://docs.djangoproject.com/en/1.10/topics/settings/
|
||||||
|
|
||||||
|
For the full list of settings and their values, see
|
||||||
|
https://docs.djangoproject.com/en/1.10/ref/settings/
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||||
|
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
|
||||||
|
# Quick-start development settings - unsuitable for production
|
||||||
|
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
|
||||||
|
|
||||||
|
# SECURITY WARNING: keep the secret key used in production secret!
|
||||||
|
SECRET_KEY = 'd7r(p-9=$3a@bbt%*+$p@4)cej13nzd0gmnt8+m0bitb=-umj#'
|
||||||
|
|
||||||
|
# SECURITY WARNING: don't run with debug turned on in production!
|
||||||
|
DEBUG = True
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = []
|
||||||
|
|
||||||
|
|
||||||
|
# Application definition
|
||||||
|
|
||||||
|
INSTALLED_APPS = [
|
||||||
|
'django.contrib.admin',
|
||||||
|
'django.contrib.auth',
|
||||||
|
'django.contrib.contenttypes',
|
||||||
|
'django.contrib.sessions',
|
||||||
|
'django.contrib.messages',
|
||||||
|
'django.contrib.staticfiles',
|
||||||
|
'rest_framework',
|
||||||
|
'rest_framework.authtoken',
|
||||||
|
'journal.apps.JournalConfig',
|
||||||
|
]
|
||||||
|
|
||||||
|
MIDDLEWARE = [
|
||||||
|
'django.middleware.security.SecurityMiddleware',
|
||||||
|
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||||
|
'django.middleware.common.CommonMiddleware',
|
||||||
|
'django.middleware.csrf.CsrfViewMiddleware',
|
||||||
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||||
|
'django.contrib.messages.middleware.MessageMiddleware',
|
||||||
|
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||||
|
]
|
||||||
|
|
||||||
|
ROOT_URLCONF = 'etesync_server.urls'
|
||||||
|
|
||||||
|
TEMPLATES = [
|
||||||
|
{
|
||||||
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||||
|
'DIRS': [],
|
||||||
|
'APP_DIRS': True,
|
||||||
|
'OPTIONS': {
|
||||||
|
'context_processors': [
|
||||||
|
'django.template.context_processors.debug',
|
||||||
|
'django.template.context_processors.request',
|
||||||
|
'django.contrib.auth.context_processors.auth',
|
||||||
|
'django.contrib.messages.context_processors.messages',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
WSGI_APPLICATION = 'etesync_server.wsgi.application'
|
||||||
|
|
||||||
|
|
||||||
|
# Database
|
||||||
|
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
|
||||||
|
|
||||||
|
DATABASES = {
|
||||||
|
'default': {
|
||||||
|
'ENGINE': 'django.db.backends.sqlite3',
|
||||||
|
'NAME': os.environ.get('ETESYNC_DB_PATH',
|
||||||
|
os.path.join(BASE_DIR, 'db.sqlite3')),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Password validation
|
||||||
|
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
|
||||||
|
|
||||||
|
AUTH_PASSWORD_VALIDATORS = [
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', # noqa
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', # noqa
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', # noqa
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', # noqa
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# Internationalization
|
||||||
|
# https://docs.djangoproject.com/en/1.10/topics/i18n/
|
||||||
|
|
||||||
|
LANGUAGE_CODE = 'en-us'
|
||||||
|
|
||||||
|
TIME_ZONE = 'UTC'
|
||||||
|
|
||||||
|
USE_I18N = True
|
||||||
|
|
||||||
|
USE_L10N = True
|
||||||
|
|
||||||
|
USE_TZ = True
|
||||||
|
|
||||||
|
|
||||||
|
# Static files (CSS, JavaScript, Images)
|
||||||
|
# https://docs.djangoproject.com/en/1.10/howto/static-files/
|
||||||
|
|
||||||
|
STATIC_URL = '/static/'
|
||||||
41
tests/storage/etesync/etesync_server/etesync_server/urls.py
Normal file
41
tests/storage/etesync/etesync_server/etesync_server/urls.py
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
"""etesync_server URL Configuration
|
||||||
|
|
||||||
|
The `urlpatterns` list routes URLs to views. For more information please see:
|
||||||
|
https://docs.djangoproject.com/en/1.10/topics/http/urls/
|
||||||
|
Examples:
|
||||||
|
Function views
|
||||||
|
1. Add an import: from my_app import views
|
||||||
|
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
|
||||||
|
Class-based views
|
||||||
|
1. Add an import: from other_app.views import Home
|
||||||
|
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
|
||||||
|
Including another URLconf
|
||||||
|
1. Import the include() function: from django.conf.urls import url, include
|
||||||
|
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
|
||||||
|
"""
|
||||||
|
from django.conf.urls import include, url
|
||||||
|
|
||||||
|
from rest_framework_nested import routers
|
||||||
|
|
||||||
|
from journal import views
|
||||||
|
|
||||||
|
router = routers.DefaultRouter()
|
||||||
|
router.register(r'journals', views.JournalViewSet)
|
||||||
|
router.register(r'journal/(?P<journal_uid>[^/]+)', views.EntryViewSet)
|
||||||
|
router.register(r'user', views.UserInfoViewSet)
|
||||||
|
|
||||||
|
journals_router = routers.NestedSimpleRouter(router, r'journals',
|
||||||
|
lookup='journal')
|
||||||
|
journals_router.register(r'members', views.MembersViewSet,
|
||||||
|
base_name='journal-members')
|
||||||
|
journals_router.register(r'entries', views.EntryViewSet,
|
||||||
|
base_name='journal-entries')
|
||||||
|
|
||||||
|
|
||||||
|
urlpatterns = [
|
||||||
|
url(r'^api/v1/', include(router.urls)),
|
||||||
|
url(r'^api/v1/', include(journals_router.urls)),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Adding this just for testing, this shouldn't be here normally
|
||||||
|
urlpatterns += url(r'^reset/$', views.reset, name='reset_debug'),
|
||||||
16
tests/storage/etesync/etesync_server/etesync_server/wsgi.py
Normal file
16
tests/storage/etesync/etesync_server/etesync_server/wsgi.py
Normal file
|
|
@ -0,0 +1,16 @@
|
||||||
|
"""
|
||||||
|
WSGI config for etesync_server project.
|
||||||
|
|
||||||
|
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||||
|
|
||||||
|
For more information on this file, see
|
||||||
|
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
from django.core.wsgi import get_wsgi_application
|
||||||
|
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings")
|
||||||
|
|
||||||
|
application = get_wsgi_application()
|
||||||
22
tests/storage/etesync/etesync_server/manage.py
Executable file
22
tests/storage/etesync/etesync_server/manage.py
Executable file
|
|
@ -0,0 +1,22 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etesync_server.settings")
|
||||||
|
try:
|
||||||
|
from django.core.management import execute_from_command_line
|
||||||
|
except ImportError:
|
||||||
|
# The above import may fail for some other reason. Ensure that the
|
||||||
|
# issue is really that Django is missing to avoid masking other
|
||||||
|
# exceptions on Python 2.
|
||||||
|
try:
|
||||||
|
import django # noqa
|
||||||
|
except ImportError:
|
||||||
|
raise ImportError(
|
||||||
|
"Couldn't import Django. Are you sure it's installed and "
|
||||||
|
"available on your PYTHONPATH environment variable? Did you "
|
||||||
|
"forget to activate a virtual environment?"
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
execute_from_command_line(sys.argv)
|
||||||
1
tests/storage/etesync/test@localhost/auth_token
Normal file
1
tests/storage/etesync/test@localhost/auth_token
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
63ae6eec45b592d5c511f79b7b0c312d2c5f7d6a
|
||||||
BIN
tests/storage/etesync/test@localhost/key
Normal file
BIN
tests/storage/etesync/test@localhost/key
Normal file
Binary file not shown.
92
tests/storage/etesync/test_main.py
Normal file
92
tests/storage/etesync/test_main.py
Normal file
|
|
@ -0,0 +1,92 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import shutil
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from vdirsyncer.storage.etesync import EtesyncContacts, EtesyncCalendars
|
||||||
|
|
||||||
|
from .. import StorageTests
|
||||||
|
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.skipif(os.getenv('ETESYNC_TESTS', '') != 'true',
|
||||||
|
reason='etesync tests disabled')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def etesync_app(tmpdir_factory):
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__),
|
||||||
|
'etesync_server'))
|
||||||
|
|
||||||
|
db = tmpdir_factory.mktemp('etesync').join('etesync.sqlite')
|
||||||
|
shutil.copy(
|
||||||
|
os.path.join(os.path.dirname(__file__), 'etesync_server',
|
||||||
|
'db.sqlite3'),
|
||||||
|
str(db)
|
||||||
|
)
|
||||||
|
|
||||||
|
os.environ['ETESYNC_DB_PATH'] = str(db)
|
||||||
|
from etesync_server.wsgi import application
|
||||||
|
return application
|
||||||
|
|
||||||
|
|
||||||
|
class EtesyncTests(StorageTests):
|
||||||
|
|
||||||
|
supports_metadata = False
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(self, request, get_item, tmpdir, etesync_app):
|
||||||
|
import wsgi_intercept
|
||||||
|
import wsgi_intercept.requests_intercept
|
||||||
|
wsgi_intercept.requests_intercept.install()
|
||||||
|
wsgi_intercept.add_wsgi_intercept('127.0.0.1', 8000,
|
||||||
|
lambda: etesync_app)
|
||||||
|
|
||||||
|
def teardown():
|
||||||
|
wsgi_intercept.remove_wsgi_intercept('127.0.0.1', 8000)
|
||||||
|
wsgi_intercept.requests_intercept.uninstall()
|
||||||
|
|
||||||
|
request.addfinalizer(teardown)
|
||||||
|
|
||||||
|
with open(os.path.join(os.path.dirname(__file__),
|
||||||
|
'test@localhost/auth_token')) as f:
|
||||||
|
token = f.read().strip()
|
||||||
|
headers = {'Authorization': 'Token ' + token}
|
||||||
|
r = requests.post('http://127.0.0.1:8000/reset/', headers=headers,
|
||||||
|
allow_redirects=False)
|
||||||
|
assert r.status_code == 200
|
||||||
|
|
||||||
|
def inner(collection='test'):
|
||||||
|
rv = {
|
||||||
|
'email': 'test@localhost',
|
||||||
|
'db_path': str(tmpdir.join('etesync.db')),
|
||||||
|
'secrets_dir': os.path.dirname(__file__),
|
||||||
|
'server_url': 'http://127.0.0.1:8000/'
|
||||||
|
}
|
||||||
|
if collection is not None:
|
||||||
|
rv = self.storage_class.create_collection(
|
||||||
|
collection=collection,
|
||||||
|
**rv
|
||||||
|
)
|
||||||
|
return rv
|
||||||
|
return inner
|
||||||
|
|
||||||
|
|
||||||
|
class TestContacts(EtesyncTests):
|
||||||
|
storage_class = EtesyncContacts
|
||||||
|
|
||||||
|
@pytest.fixture(params=['VCARD'])
|
||||||
|
def item_type(self, request):
|
||||||
|
return request.param
|
||||||
|
|
||||||
|
|
||||||
|
class TestCalendars(EtesyncTests):
|
||||||
|
storage_class = EtesyncCalendars
|
||||||
|
|
||||||
|
@pytest.fixture(params=['VEVENT'])
|
||||||
|
def item_type(self, request):
|
||||||
|
return request.param
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
|
||||||
def get_storage_args(
|
|
||||||
self,
|
|
||||||
request,
|
|
||||||
tmpdir,
|
|
||||||
slow_create_collection,
|
|
||||||
baikal_server,
|
|
||||||
aio_connector,
|
|
||||||
):
|
|
||||||
async def inner(collection="test"):
|
|
||||||
base_url = "http://127.0.0.1:8002/"
|
|
||||||
args = {
|
|
||||||
"url": base_url,
|
|
||||||
"username": "baikal",
|
|
||||||
"password": "baikal",
|
|
||||||
"connector": aio_connector,
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.storage_class.fileext == ".vcf":
|
|
||||||
args["url"] = base_url + "card.php/"
|
|
||||||
else:
|
|
||||||
args["url"] = base_url + "cal.php/"
|
|
||||||
|
|
||||||
if collection is not None:
|
|
||||||
args = await slow_create_collection(
|
|
||||||
self.storage_class,
|
|
||||||
args,
|
|
||||||
collection,
|
|
||||||
)
|
|
||||||
return args
|
|
||||||
|
|
||||||
return inner
|
|
||||||
|
|
@ -1,50 +1,49 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import uuid
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import uuid
|
||||||
|
|
||||||
try:
|
try:
|
||||||
caldav_args = {
|
caldav_args = {
|
||||||
# Those credentials are configured through the Travis UI
|
# Those credentials are configured through the Travis UI
|
||||||
"username": os.environ["DAVICAL_USERNAME"].strip(),
|
'username': os.environ['DAVICAL_USERNAME'].strip(),
|
||||||
"password": os.environ["DAVICAL_PASSWORD"].strip(),
|
'password': os.environ['DAVICAL_PASSWORD'].strip(),
|
||||||
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
|
'url': 'https://caesar.lostpackets.de/davical-test/caldav.php/',
|
||||||
}
|
}
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
pytestmark = pytest.mark.skip(f"Missing envkey: {e!s}")
|
caldav_args = None
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.flaky(reruns=5)
|
@pytest.mark.flaky(reruns=5)
|
||||||
class ServerMixin:
|
class ServerMixin(object):
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def davical_args(self):
|
def davical_args(self):
|
||||||
if self.storage_class.fileext == ".ics":
|
if caldav_args is None:
|
||||||
|
pytest.skip('Missing envkeys for davical')
|
||||||
|
if self.storage_class.fileext == '.ics':
|
||||||
return dict(caldav_args)
|
return dict(caldav_args)
|
||||||
elif self.storage_class.fileext == ".vcf":
|
elif self.storage_class.fileext == '.vcf':
|
||||||
pytest.skip("No carddav")
|
pytest.skip('No carddav')
|
||||||
else:
|
else:
|
||||||
raise RuntimeError
|
raise RuntimeError()
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, davical_args, request):
|
def get_storage_args(self, davical_args, request):
|
||||||
async def inner(collection="test"):
|
def inner(collection='test'):
|
||||||
if collection is None:
|
if collection is None:
|
||||||
return davical_args
|
return davical_args
|
||||||
|
|
||||||
assert collection.startswith("test")
|
assert collection.startswith('test')
|
||||||
|
|
||||||
for _ in range(4):
|
for _ in range(4):
|
||||||
args = self.storage_class.create_collection(
|
args = self.storage_class.create_collection(
|
||||||
collection + str(uuid.uuid4()), **davical_args
|
collection + str(uuid.uuid4()),
|
||||||
|
**davical_args
|
||||||
)
|
)
|
||||||
s = self.storage_class(**args)
|
s = self.storage_class(**args)
|
||||||
if not list(s.list()):
|
if not list(s.list()):
|
||||||
# See: https://stackoverflow.com/a/33984811
|
request.addfinalizer(
|
||||||
request.addfinalizer(lambda x=s: x.session.request("DELETE", ""))
|
lambda: s.session.request('DELETE', ''))
|
||||||
return args
|
return args
|
||||||
|
|
||||||
raise RuntimeError("Failed to find free collection.")
|
raise RuntimeError('Failed to find free collection.')
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
|
|
@ -1,42 +1,31 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
class ServerMixin:
|
username = os.environ.get('FASTMAIL_USERNAME', '').strip()
|
||||||
|
password = os.environ.get('FASTMAIL_PASSWORD', '').strip()
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin(object):
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, slow_create_collection, aio_connector, request):
|
def get_storage_args(self, slow_create_collection):
|
||||||
if (
|
if not username:
|
||||||
"item_type" in request.fixturenames
|
pytest.skip('Fastmail credentials not available')
|
||||||
and request.getfixturevalue("item_type") == "VTODO"
|
|
||||||
):
|
|
||||||
# Fastmail has non-standard support for TODOs
|
|
||||||
# See https://github.com/pimutils/vdirsyncer/issues/824
|
|
||||||
pytest.skip("Fastmail has non-standard VTODO support.")
|
|
||||||
|
|
||||||
async def inner(collection="test"):
|
def inner(collection='test'):
|
||||||
args = {
|
args = {'username': username, 'password': password}
|
||||||
"username": os.environ["FASTMAIL_USERNAME"],
|
|
||||||
"password": os.environ["FASTMAIL_PASSWORD"],
|
|
||||||
"connector": aio_connector,
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.storage_class.fileext == ".ics":
|
if self.storage_class.fileext == '.ics':
|
||||||
args["url"] = "https://caldav.fastmail.com/"
|
args['url'] = 'https://caldav.messagingengine.com/'
|
||||||
elif self.storage_class.fileext == ".vcf":
|
elif self.storage_class.fileext == '.vcf':
|
||||||
args["url"] = "https://carddav.fastmail.com/"
|
args['url'] = 'https://carddav.messagingengine.com/'
|
||||||
else:
|
else:
|
||||||
raise RuntimeError
|
raise RuntimeError()
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = await slow_create_collection(
|
args = slow_create_collection(self.storage_class, args,
|
||||||
self.storage_class,
|
collection)
|
||||||
args,
|
|
||||||
collection,
|
|
||||||
)
|
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
0
tests/storage/servers/fastmail/install.sh
Normal file
0
tests/storage/servers/fastmail/install.sh
Normal file
|
|
@ -1,33 +1,35 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
username = os.environ.get('ICLOUD_USERNAME', '').strip()
|
||||||
|
password = os.environ.get('ICLOUD_PASSWORD', '').strip()
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin(object):
|
||||||
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(self, item_type, slow_create_collection):
|
def get_storage_args(self, item_type, slow_create_collection):
|
||||||
if item_type != "VEVENT":
|
if item_type != 'VEVENT':
|
||||||
# iCloud collections can either be calendars or task lists.
|
# iCloud collections can either be calendars or task lists.
|
||||||
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615
|
# See https://github.com/pimutils/vdirsyncer/pull/593#issuecomment-285941615 # noqa
|
||||||
pytest.skip("iCloud doesn't support anything else than VEVENT")
|
pytest.skip('iCloud doesn\'t support anything else than VEVENT')
|
||||||
|
|
||||||
async def inner(collection="test"):
|
if not username:
|
||||||
args = {
|
pytest.skip('iCloud credentials not available')
|
||||||
"username": os.environ["ICLOUD_USERNAME"],
|
|
||||||
"password": os.environ["ICLOUD_PASSWORD"],
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.storage_class.fileext == ".ics":
|
def inner(collection='test'):
|
||||||
args["url"] = "https://caldav.icloud.com/"
|
args = {'username': username, 'password': password}
|
||||||
elif self.storage_class.fileext == ".vcf":
|
|
||||||
args["url"] = "https://contacts.icloud.com/"
|
if self.storage_class.fileext == '.ics':
|
||||||
|
args['url'] = 'https://caldav.icloud.com/'
|
||||||
|
elif self.storage_class.fileext == '.vcf':
|
||||||
|
args['url'] = 'https://contacts.icloud.com/'
|
||||||
else:
|
else:
|
||||||
raise RuntimeError
|
raise RuntimeError()
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = slow_create_collection(self.storage_class, args, collection)
|
args = slow_create_collection(self.storage_class, args,
|
||||||
|
collection)
|
||||||
return args
|
return args
|
||||||
|
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
0
tests/storage/servers/icloud/install.sh
Normal file
0
tests/storage/servers/icloud/install.sh
Normal file
29
tests/storage/servers/nextcloud/__init__.py
Normal file
29
tests/storage/servers/nextcloud/__init__.py
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
import os
|
||||||
|
import requests
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
port = os.environ.get('NEXTCLOUD_HOST', None) or 'localhost:5000'
|
||||||
|
user = os.environ.get('NEXTCLOUD_USER', None) or 'asdf'
|
||||||
|
pwd = os.environ.get('NEXTCLOUD_PASS', None) or 'asdf'
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin(object):
|
||||||
|
storage_class = None
|
||||||
|
wsgi_teardown = None
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def get_storage_args(self, item_type,
|
||||||
|
slow_create_collection):
|
||||||
|
def inner(collection='test'):
|
||||||
|
args = {
|
||||||
|
'username': user,
|
||||||
|
'password': pwd,
|
||||||
|
'url': 'http://{}/remote.php/dav/'.format(port)
|
||||||
|
}
|
||||||
|
|
||||||
|
if collection is not None:
|
||||||
|
args = slow_create_collection(self.storage_class, args,
|
||||||
|
collection)
|
||||||
|
return args
|
||||||
|
return inner
|
||||||
0
tests/storage/servers/nextcloud/install.sh
Normal file
0
tests/storage/servers/nextcloud/install.sh
Normal file
|
|
@ -1,33 +1,59 @@
|
||||||
from __future__ import annotations
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import logging
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
import radicale
|
||||||
|
import radicale.config
|
||||||
|
|
||||||
|
from pkg_resources import parse_version as ver
|
||||||
|
|
||||||
|
import wsgi_intercept
|
||||||
|
import wsgi_intercept.requests_intercept
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ServerMixin(object):
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def setup(self, request, tmpdir):
|
||||||
|
if ver(radicale.VERSION) < ver('2.0.0-pre'):
|
||||||
|
raise RuntimeError('Testing against Radicale only works with '
|
||||||
|
'Radicale >= 2.0.0')
|
||||||
|
|
||||||
|
def get_app():
|
||||||
|
config = radicale.config.load(())
|
||||||
|
config.set('storage', 'filesystem_folder', str(tmpdir))
|
||||||
|
config.set('rights', 'type', 'owner_only')
|
||||||
|
|
||||||
|
app = radicale.Application(config, logger)
|
||||||
|
|
||||||
|
def is_authenticated(user, password):
|
||||||
|
return user == 'bob' and password == 'bob'
|
||||||
|
|
||||||
|
app.is_authenticated = is_authenticated
|
||||||
|
return app
|
||||||
|
|
||||||
|
wsgi_intercept.requests_intercept.install()
|
||||||
|
wsgi_intercept.add_wsgi_intercept('127.0.0.1', 80, get_app)
|
||||||
|
|
||||||
|
def teardown():
|
||||||
|
wsgi_intercept.remove_wsgi_intercept('127.0.0.1', 80)
|
||||||
|
wsgi_intercept.requests_intercept.uninstall()
|
||||||
|
request.addfinalizer(teardown)
|
||||||
|
|
||||||
class ServerMixin:
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def get_storage_args(
|
def get_storage_args(self, get_item):
|
||||||
self,
|
def inner(collection='test'):
|
||||||
request,
|
url = 'http://127.0.0.1/'
|
||||||
tmpdir,
|
rv = {'url': url, 'username': 'bob', 'password': 'bob'}
|
||||||
slow_create_collection,
|
|
||||||
radicale_server,
|
|
||||||
aio_connector,
|
|
||||||
):
|
|
||||||
async def inner(collection="test"):
|
|
||||||
url = "http://127.0.0.1:8001/"
|
|
||||||
args = {
|
|
||||||
"url": url,
|
|
||||||
"username": "radicale",
|
|
||||||
"password": "radicale",
|
|
||||||
"connector": aio_connector,
|
|
||||||
}
|
|
||||||
|
|
||||||
if collection is not None:
|
if collection is not None:
|
||||||
args = await slow_create_collection(
|
collection = collection + self.storage_class.fileext
|
||||||
self.storage_class,
|
rv = self.storage_class.create_collection(collection, **rv)
|
||||||
args,
|
s = self.storage_class(**rv)
|
||||||
collection,
|
assert not list(s.list())
|
||||||
)
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
return rv
|
||||||
return inner
|
return inner
|
||||||
|
|
|
||||||
12
tests/storage/servers/radicale/install.sh
Normal file
12
tests/storage/servers/radicale/install.sh
Normal file
|
|
@ -0,0 +1,12 @@
|
||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ "$REQUIREMENTS" = "release" ] || [ "$REQUIREMENTS" = "minimal" ]; then
|
||||||
|
radicale_pkg="radicale"
|
||||||
|
elif [ "$REQUIREMENTS" = "devel" ]; then
|
||||||
|
radicale_pkg="git+https://github.com/Kozea/Radicale.git"
|
||||||
|
else
|
||||||
|
echo "Invalid requirements envvar"
|
||||||
|
false
|
||||||
|
fi
|
||||||
|
pip install wsgi_intercept $radicale_pkg
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue