Remove Python 2 support (#499)

* Discontinue Python 2.

See #219

* Remove Python 2 config option

* Remove coerce_native

* Remove PY2 variable

* s/text_type/str/g

* Flake8 fixes

* Remove str = str

* s/to_native/to_unicode/g

* Remove to_unicode = to_unicode

* Remove iteritems

* Remove itervalues

* Remove str import, flake8 fixes

* Remove urlparse compat code

* Remove with_metaclass

* Remove unused PY2 variable

* Remove getargspec_ish

* Remove to_bytes

* Remove compat module

* Remove Python 2 from Travis

* fixup! Remove urlparse compat code

* fixup! Remove urlparse compat code

* fixup! Remove compat module
This commit is contained in:
Markus Unterwaditzer 2016-09-08 12:18:36 +02:00 committed by GitHub
parent 696e53dc1f
commit 18d8bb9fc2
27 changed files with 107 additions and 334 deletions

View file

@ -14,70 +14,6 @@
"language": "python",
"matrix": {
"include": [
{
"env": "BUILD=style BUILD_PRS=true",
"python": "2.7"
},
{
"env": "BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=devel BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=release BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test REMOTESTORAGE_SERVER=mysteryshack REQUIREMENTS=minimal BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=owncloud REQUIREMENTS=devel BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=owncloud REQUIREMENTS=release BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=owncloud REQUIREMENTS=minimal BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=nextcloud REQUIREMENTS=devel BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=nextcloud REQUIREMENTS=release BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=nextcloud REQUIREMENTS=minimal BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=baikal REQUIREMENTS=devel BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=baikal REQUIREMENTS=release BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=baikal REQUIREMENTS=minimal BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=davical REQUIREMENTS=devel BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=davical REQUIREMENTS=release BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=test DAV_SERVER=davical REQUIREMENTS=minimal BUILD_PRS=false",
"python": "2.7"
},
{
"env": "BUILD=style BUILD_PRS=true",
"python": "3.3"
@ -186,10 +122,6 @@
"env": "BUILD=test DAV_SERVER=davical REQUIREMENTS=minimal BUILD_PRS=false",
"python": "3.5"
},
{
"env": "BUILD=style BUILD_PRS=true",
"python": "pypy"
},
{
"env": "BUILD=test",
"language": "generic",

View file

@ -9,6 +9,11 @@ Package maintainers and users who have to manually update their installation
may want to subscribe to `GitHub's tag feed
<https://github.com/pimutils/vdirsyncer/tags.atom>`_.
Version 0.13.0
==============
- Python 2 is no longer supported at all. See :gh:`219`.
Version 0.12.1
==============

View file

@ -35,7 +35,7 @@ If your distribution doesn't provide a package for vdirsyncer, you still can
use Python's package manager "pip". First, you'll have to check that the
following things are installed:
- A compatible version of Python (2.7+ or 3.3+) and the corresponding pip package
- Python 3.3+ and pip.
- ``libxml`` and ``libxslt``
- ``zlib``

View file

@ -38,7 +38,7 @@ cfg['script'] = [script("""
matrix = []
cfg['matrix'] = {'include': matrix}
for python in ("2.7", "3.3", "3.4", "3.5", "pypy"):
for python in ("3.3", "3.4", "3.5"):
matrix.append({
'python': python,
'env': 'BUILD=style BUILD_PRS=true'
@ -48,12 +48,6 @@ for python in ("2.7", "3.3", "3.4", "3.5", "pypy"):
dav_servers = ("radicale", "owncloud", "nextcloud", "baikal",
"davical")
rs_servers = ("mysteryshack",)
elif python == "2.7":
dav_servers = ("owncloud", "nextcloud", "baikal", "davical")
rs_servers = ("mysteryshack",)
elif python == "pypy":
dav_servers = ()
rs_servers = ()
else:
dav_servers = ("radicale",)
rs_servers = ()

View file

@ -9,7 +9,6 @@ import pytest
from vdirsyncer import exceptions
from vdirsyncer.cli.fetchparams import STRATEGIES, expand_fetch_params
from vdirsyncer.utils.compat import PY2
@pytest.fixture
@ -90,7 +89,6 @@ def test_key_conflict(monkeypatch, mystrategy):
assert 'Can\'t set foo.fetch and foo.' in str(excinfo.value)
@pytest.mark.skipif(PY2, reason='Don\'t care about Python 2')
@given(s=st.text(), t=st.text(min_size=1))
def test_fuzzing(s, t, mystrategy):
config = expand_fetch_params({

View file

@ -9,8 +9,6 @@ from hypothesis import example, given
import pytest
from vdirsyncer.utils.compat import PY2, to_native, to_unicode
def test_simple_run(tmpdir, runner):
runner.write_with_general(dedent('''
@ -277,7 +275,6 @@ def test_multiple_pairs(tmpdir, runner):
st.characters(
blacklist_characters=set(
u'./\x00' # Invalid chars on POSIX filesystems
+ (u';' if PY2 else u'') # https://bugs.python.org/issue16374
),
# Surrogates can't be encoded to utf-8 in Python
blacklist_categories=set(['Cs'])
@ -289,7 +286,6 @@ def test_multiple_pairs(tmpdir, runner):
))
@example(collections=[u'persönlich'])
def test_create_collections(subtest, collections):
collections = set(to_native(x, 'utf-8') for x in collections)
@subtest
def test_inner(tmpdir, runner):
@ -325,7 +321,7 @@ def test_create_collections(subtest, collections):
# Quoted from
# https://stackoverflow.com/questions/18137554/how-to-convert-path-to-mac-os-x-path-the-almost-nfd-normal-form # noqa
u = lambda xs: set(
unicodedata.normalize('NFKD', to_unicode(x, 'utf-8'))
unicodedata.normalize('NFKD', x)
for x in xs
)
assert u(x.basename for x in tmpdir.join('foo').listdir()) == \

View file

@ -1,31 +1,5 @@
from hypothesis import given
from hypothesis.strategies import (
binary,
booleans,
complex_numbers,
floats,
integers,
none,
one_of,
text
)
from vdirsyncer import exceptions
from vdirsyncer.cli.utils import coerce_native, handle_cli_error
@given(one_of(
binary(),
booleans(),
complex_numbers(),
floats(),
integers(),
none(),
text()
))
def test_coerce_native_fuzzing(s):
coerce_native(s)
from vdirsyncer.cli.utils import handle_cli_error
def test_handle_cli_error(capsys):

View file

@ -17,12 +17,6 @@ def setup_logging():
click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)
# XXX: Py2
@pytest.fixture(autouse=True)
def suppress_py2_warning(monkeypatch):
monkeypatch.setattr('vdirsyncer.cli._check_python2', lambda _: None)
try:
import pytest_benchmark
except ImportError:

View file

@ -3,6 +3,7 @@
import random
import textwrap
from urllib.parse import quote as urlquote, unquote as urlunquote
import hypothesis.strategies as st
from hypothesis import given
@ -11,7 +12,6 @@ import pytest
import vdirsyncer.exceptions as exceptions
from vdirsyncer.storage.base import Item, normalize_meta_value
from vdirsyncer.utils.compat import iteritems, text_type, urlquote, urlunquote
from .. import EVENT_TEMPLATE, TASK_TEMPLATE, VCARD_TEMPLATE, \
assert_item_equals, normalize_item, printable_characters_strategy
@ -80,8 +80,8 @@ class StorageTests(object):
hrefs.sort()
assert hrefs == sorted(s.list())
for href, etag in hrefs:
assert isinstance(href, (text_type, bytes))
assert isinstance(etag, (text_type, bytes))
assert isinstance(href, (str, bytes))
assert isinstance(etag, (str, bytes))
assert s.has(href)
item, etag2 = s.get(href)
assert etag == etag2
@ -114,7 +114,7 @@ class StorageTests(object):
new_item = get_item(uid=item.uid)
new_etag = s.update(href, new_item, etag)
# See https://github.com/pimutils/vdirsyncer/issues/48
assert isinstance(new_etag, (bytes, text_type))
assert isinstance(new_etag, (bytes, str))
assert_item_equals(s.get(href)[0], new_item)
def test_update_nonexisting(self, s, get_item):
@ -162,7 +162,7 @@ class StorageTests(object):
assert dict(
(href, etag) for href, item, etag
in s.get_multi(href for href, etag in iteritems(info))
in s.get_multi(href for href, etag in info.items())
) == info
def test_repr(self, s, get_storage_args):
@ -277,7 +277,7 @@ class StorageTests(object):
s.set_meta('displayname', x)
rv = s.get_meta('displayname')
assert rv == x
assert isinstance(rv, text_type)
assert isinstance(rv, str)
@given(value=st.one_of(
st.none(),

View file

@ -3,9 +3,9 @@
import os
import sys
import pytest
from urllib.parse import quote as urlquote
from vdirsyncer.utils.compat import urlquote
import pytest
import wsgi_intercept
import wsgi_intercept.requests_intercept

View file

@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
import logging
import platform
import click_log
@ -13,7 +12,6 @@ from vdirsyncer import utils
# These modules might be uninitialized and unavailable if not explicitly
# imported
import vdirsyncer.utils.compat # noqa
import vdirsyncer.utils.http # noqa
@ -43,11 +41,8 @@ def test_request_ssl(httpsserver):
def _fingerprints_broken():
from pkg_resources import parse_version as ver
tolerant_python = (
utils.compat.PY2 and platform.python_implementation() != 'PyPy'
)
broken_urllib3 = ver(requests.__version__) <= ver('2.5.1')
return broken_urllib3 and not tolerant_python
return broken_urllib3
@pytest.mark.skipif(_fingerprints_broken(),

View file

@ -19,6 +19,16 @@ except ImportError: # pragma: no cover
)
def _check_python_version(): # pragma: no cover
import sys
if sys.version_info[0] < 3:
print('vdirsyncer requires Python 3.')
sys.exit(1)
_check_python_version()
del _check_python_version
def _detect_faulty_requests(): # pragma: no cover
import requests
if 'dist-packages' not in requests.__file__:
@ -41,3 +51,4 @@ def _detect_faulty_requests(): # pragma: no cover
sys.exit(1)
_detect_faulty_requests()
del _detect_faulty_requests

View file

@ -8,8 +8,7 @@ import click
import click_log
from .. import PROJECT_HOME, __version__, exceptions
from ..utils.compat import PY2
from .. import __version__
cli_logger = logging.getLogger(__name__)
@ -38,29 +37,6 @@ def catch_errors(f):
return inner
def _check_python2(config):
# XXX: Py2
if not PY2:
return
msg = (
'Python 2 support will be dropped. Please switch '
'to at least Python 3.3 as soon as possible. See '
'{home}/issues/219 for more information.'
.format(home=PROJECT_HOME)
)
if not config.general.get('python2', False):
raise exceptions.UserError(
msg + (
'\nSet python2 = true in the [general] section to get rid of '
'this error for now.'
)
)
else:
cli_logger.warning(msg)
@click.group()
@click_log.init('vdirsyncer')
@click_log.simple_verbosity_option()
@ -76,7 +52,6 @@ def app(ctx, config):
if not ctx.config:
ctx.config = load_config(config)
_check_python2(ctx.config)
main = app

View file

@ -7,14 +7,13 @@ from . import cli_logger
from .fetchparams import expand_fetch_params
from .. import PROJECT_HOME, exceptions
from ..utils import cached_property, expand_path
from ..utils.compat import text_type
try:
from ConfigParser import RawConfigParser
except ImportError:
from configparser import RawConfigParser
GENERAL_ALL = frozenset(['status_path', 'python2']) # XXX: Py2
GENERAL_ALL = frozenset(['status_path'])
GENERAL_REQUIRED = frozenset(['status_path'])
SECTION_NAME_CHARS = frozenset(chain(string.ascii_letters, string.digits, '_'))
@ -68,7 +67,7 @@ def _validate_pair_section(pair_config):
for i, collection in enumerate(collections):
try:
if isinstance(collection, (text_type, bytes)):
if isinstance(collection, (str, bytes)):
collection_name = collection
elif isinstance(collection, list):
e = ValueError(
@ -78,11 +77,11 @@ def _validate_pair_section(pair_config):
if len(collection) != 3:
raise e
if not isinstance(collection[0], (text_type, bytes)):
if not isinstance(collection[0], (str, bytes)):
raise e
for x in collection[1:]:
if x is not None and not isinstance(x, (text_type, bytes)):
if x is not None and not isinstance(x, (str, bytes)):
raise e
collection_name = collection[0]

View file

@ -4,13 +4,12 @@ import functools
import json
from .config import CollectionConfig
from .utils import JobFailed, cli_logger, coerce_native, \
collections_for_pair, get_status_name, handle_cli_error, load_status, \
save_status, storage_class_from_config, storage_instance_from_config
from .utils import JobFailed, cli_logger, collections_for_pair, \
get_status_name, handle_cli_error, load_status, save_status, \
storage_class_from_config, storage_instance_from_config
from .. import exceptions
from ..sync import sync
from ..utils.compat import to_unicode
def prepare_pair(wq, pair_name, collections, config, callback, **kwargs):
@ -23,9 +22,6 @@ def prepare_pair(wq, pair_name, collections, config, callback, **kwargs):
# spawn one worker less because we can reuse the current one
new_workers = -1
for collection_name in (collections or all_collections):
# XXX: PY2 hack
if collection_name is not None:
collection_name = to_unicode(collection_name, 'utf-8')
try:
config_a, config_b = all_collections[collection_name]
except KeyError:
@ -51,12 +47,12 @@ def sync_collection(wq, collection, general, force_delete):
status_name = get_status_name(pair.name, collection.name)
try:
cli_logger.info('Syncing {}'.format(coerce_native(status_name)))
cli_logger.info('Syncing {}'.format(status_name))
status = load_status(general['status_path'], pair.name,
collection.name, data_type='items') or {}
cli_logger.debug('Loaded status for {}'
.format(coerce_native(status_name)))
.format(status_name))
a = storage_instance_from_config(collection.config_a)
b = storage_instance_from_config(collection.config_b)

View file

@ -19,7 +19,6 @@ from . import cli_logger
from .. import BUGTRACKER_HOME, DOCS_HOME, exceptions
from ..sync import IdentConflict, StorageEmpty, SyncConflict
from ..utils import expand_path, get_storage_init_args
from ..utils.compat import to_native
try:
import Queue as queue
@ -147,8 +146,7 @@ def handle_cli_error(status_name=None):
import traceback
tb = traceback.format_tb(tb)
if status_name:
msg = 'Unknown error occured for {}'.format(
coerce_native(status_name))
msg = 'Unknown error occured for {}'.format(status_name)
else:
msg = 'Unknown error occured'
@ -293,7 +291,7 @@ def _handle_collection_not_found(config, collection, e=None):
def _print_collections(base_config, discovered):
instance_name = base_config['instance_name']
cli_logger.info('{}:'.format(coerce_native(instance_name)))
cli_logger.info('{}:'.format(instance_name))
for args in discovered.values():
collection = args['collection']
if collection is None:
@ -308,7 +306,7 @@ def _print_collections(base_config, discovered):
cli_logger.info(' - {}{}'.format(
json.dumps(collection),
' ("{}")'.format(coerce_native(displayname))
' ("{}")'.format(displayname)
if displayname and displayname != collection
else ''
))
@ -465,7 +463,7 @@ def handle_storage_init_error(cls, config):
u'{} storage doesn\'t take the parameters: {}'
.format(cls.storage_name, u', '.join(invalid)))
if not problems: # XXX: Py2: Proper reraise
if not problems:
raise e
raise exceptions.UserError(
@ -585,18 +583,3 @@ def assert_permissions(path, wanted):
cli_logger.warning('Correcting permissions of {} from {:o} to {:o}'
.format(path, permissions, wanted))
os.chmod(path, wanted)
def coerce_native(x, encoding='utf-8'):
# XXX: Remove with Python 3 only
try:
return str(x)
except UnicodeError:
pass
try:
return to_native(x, encoding=encoding)
except UnicodeError:
pass
return repr(x)

View file

@ -5,7 +5,6 @@ import functools
from .. import exceptions, sync
from ..utils import uniq
from ..utils.compat import to_native, to_unicode, with_metaclass
from ..utils.vobject import Item # noqa
@ -25,7 +24,7 @@ class StorageMeta(type):
return super(StorageMeta, cls).__init__(name, bases, d)
class Storage(with_metaclass(StorageMeta)):
class Storage(metaclass=StorageMeta):
'''Superclass of all storages, mainly useful to summarize the interface to
implement.
@ -77,9 +76,7 @@ class Storage(with_metaclass(StorageMeta)):
self.read_only = bool(read_only)
if collection and instance_name:
# XXX: PY2 hack
instance_name = '{}/{}'.format(instance_name,
to_native(collection, 'utf-8'))
instance_name = '{}/{}'.format(instance_name, collection)
self.instance_name = instance_name
self.collection = collection
@ -241,4 +238,4 @@ class Storage(with_metaclass(StorageMeta)):
def normalize_meta_value(value):
return to_unicode(value or u'').strip()
return (value or u'').strip()

View file

@ -2,9 +2,11 @@
import datetime
import logging
import urllib.parse as urlparse
import xml.etree.ElementTree as etree
from inspect import getfullargspec
import requests
from requests.exceptions import HTTPError
@ -12,7 +14,6 @@ from .base import Item, Storage, normalize_meta_value
from .http import HTTP_STORAGE_PARAMETERS, USERAGENT, prepare_auth, \
prepare_client_cert, prepare_verify
from .. import exceptions, utils
from ..utils.compat import PY2, getargspec_ish, text_type, to_native
dav_logger = logging.getLogger(__name__)
@ -22,7 +23,7 @@ CALDAV_DT_FORMAT = '%Y%m%dT%H%M%SZ'
def _generate_path_reserved_chars():
for x in "/?#[]!$&'()*+,;":
x = utils.compat.urlquote(x, '')
x = urlparse.quote(x, '')
yield x.upper()
yield x.lower()
@ -42,13 +43,11 @@ def _normalize_href(base, href):
'''Normalize the href to be a path only relative to hostname and
schema.'''
orig_href = href
base = to_native(base, 'utf-8')
href = to_native(href, 'utf-8')
if not href:
raise ValueError(href)
x = utils.compat.urlparse.urljoin(base, href)
x = utils.compat.urlparse.urlsplit(x).path
x = urlparse.urljoin(base, href)
x = urlparse.urlsplit(x).path
# Encoding issues:
# - https://github.com/owncloud/contacts/issues/581
@ -58,9 +57,9 @@ def _normalize_href(base, href):
if _contains_quoted_reserved_chars(x):
break
old_x = x
x = utils.compat.urlunquote(x)
x = urlparse.unquote(x)
x = utils.compat.urlquote(x, '/@%:')
x = urlparse.quote(x, '/@%:')
if orig_href == x:
dav_logger.debug('Already normalized: {!r}'.format(x))
@ -129,7 +128,7 @@ class Discover(object):
@staticmethod
def _get_collection_from_url(url):
_, collection = url.rstrip('/').rsplit('/', 1)
return utils.compat.urlunquote(collection)
return urlparse.unquote(collection)
def find_dav(self):
try:
@ -166,7 +165,7 @@ class Discover(object):
rv = root.find('.//{DAV:}current-user-principal/{DAV:}href')
if rv is None:
raise InvalidXMLResponse()
return utils.compat.urlparse.urljoin(response.url, rv.text)
return urlparse.urljoin(response.url, rv.text)
def find_home(self, url=None):
if url is None:
@ -182,7 +181,7 @@ class Discover(object):
rv = root.find('.//' + self._homeset_tag + '/{DAV:}href')
if rv is None:
raise InvalidXMLResponse('Couldn\'t find home-set.')
return utils.compat.urlparse.urljoin(response.url, rv.text)
return urlparse.urljoin(response.url, rv.text)
def find_collections(self, url=None):
if url is None:
@ -202,7 +201,7 @@ class Discover(object):
if href is None:
raise InvalidXMLResponse('Missing href tag for collection '
'props.')
href = utils.compat.urlparse.urljoin(r.url, href.text)
href = urlparse.urljoin(r.url, href.text)
if href not in done:
done.add(href)
yield {'href': href}
@ -224,9 +223,9 @@ class Discover(object):
return c
home = self.find_home()
url = utils.compat.urlparse.urljoin(
url = urlparse.urljoin(
home,
utils.compat.urlquote(collection, '/@')
urlparse.quote(collection, '/@')
)
try:
@ -252,7 +251,8 @@ class Discover(object):
</D:set>
</D:mkcol>
'''.format(
to_native(etree.tostring(etree.Element(self._resourcetype)))
etree.tostring(etree.Element(self._resourcetype),
encoding='unicode')
)
response = self.session.request(
@ -299,7 +299,7 @@ class DavSession(object):
@classmethod
def init_and_remaining_args(cls, **kwargs):
argspec = getargspec_ish(cls.__init__)
argspec = getfullargspec(cls.__init__)
self_args, remainder = \
utils.split_dict(kwargs, argspec.args.__contains__)
@ -321,12 +321,12 @@ class DavSession(object):
@utils.cached_property
def parsed_url(self):
return utils.compat.urlparse.urlparse(self.url)
return urlparse.urlparse(self.url)
def request(self, method, path, **kwargs):
url = self.url
if path:
url = utils.compat.urlparse.urljoin(self.url, path)
url = urlparse.urljoin(self.url, path)
more = dict(self._settings)
more.update(kwargs)
@ -379,9 +379,8 @@ class DavStorage(Storage):
self.session_class.init_and_remaining_args(**kwargs)
super(DavStorage, self).__init__(**kwargs)
if not PY2:
import inspect
__init__.__signature__ = inspect.signature(session_class.__init__)
import inspect
__init__.__signature__ = inspect.signature(session_class.__init__)
@classmethod
def discover(cls, **kwargs):
@ -602,7 +601,7 @@ class DavStorage(Storage):
</D:prop>
</D:propfind>
'''.format(
to_native(etree.tostring(etree.Element(xpath)))
etree.tostring(etree.Element(xpath), encoding='unicode')
)
headers = self.session.get_default_headers()
@ -639,7 +638,7 @@ class DavStorage(Storage):
</D:prop>
</D:set>
</D:propertyupdate>
'''.format(to_native(etree.tostring(element)))
'''.format(etree.tostring(element, encoding='unicode'))
self.session.request(
'PROPPATCH', '',
@ -723,11 +722,11 @@ class CaldavStorage(DavStorage):
namespace = dict(datetime.__dict__)
namespace['start_date'] = self.start_date = \
(eval(start_date, namespace)
if isinstance(start_date, (bytes, text_type))
if isinstance(start_date, (bytes, str))
else start_date)
self.end_date = \
(eval(end_date, namespace)
if isinstance(end_date, (bytes, text_type))
if isinstance(end_date, (bytes, str))
else end_date)
@staticmethod

View file

@ -10,7 +10,6 @@ from atomicwrites import atomic_write
from .base import Item, Storage, normalize_meta_value
from .. import exceptions
from ..utils import checkdir, expand_path, generate_href, get_etag_from_file
from ..utils.compat import text_type, to_native
logger = logging.getLogger(__name__)
@ -42,7 +41,7 @@ class FilesystemStorage(Storage):
def __init__(self, path, fileext, encoding='utf-8', post_hook=None,
**kwargs):
super(FilesystemStorage, self).__init__(**kwargs)
path = expand_path(to_native(path, encoding))
path = expand_path(path)
checkdir(path, create=False)
self.path = path
self.encoding = encoding
@ -70,11 +69,9 @@ class FilesystemStorage(Storage):
@classmethod
def create_collection(cls, collection, **kwargs):
kwargs = dict(kwargs)
encoding = kwargs.get('encoding', 'utf-8')
path = to_native(kwargs['path'], encoding)
path = kwargs['path']
if collection is not None:
collection = to_native(collection, encoding)
path = os.path.join(path, collection)
checkdir(expand_path(path), create=True)
@ -84,7 +81,7 @@ class FilesystemStorage(Storage):
return kwargs
def _get_filepath(self, href):
return os.path.join(self.path, to_native(href, self.encoding))
return os.path.join(self.path, href)
def _get_href(self, ident):
return generate_href(ident) + self.fileext
@ -108,7 +105,7 @@ class FilesystemStorage(Storage):
raise
def upload(self, item):
if not isinstance(item.raw, text_type):
if not isinstance(item.raw, str):
raise TypeError('item.raw must be a unicode string.')
try:
@ -151,7 +148,7 @@ class FilesystemStorage(Storage):
if etag != actual_etag:
raise exceptions.WrongEtagError(etag, actual_etag)
if not isinstance(item.raw, text_type):
if not isinstance(item.raw, str):
raise TypeError('item.raw must be a unicode string.')
with atomic_write(fpath, mode='wb', overwrite=True) as f:

View file

@ -2,6 +2,7 @@
import json
import logging
import urllib.parse as urlparse
from atomicwrites import atomic_write
@ -10,7 +11,8 @@ import click
from click_threading import get_ui_worker
from . import base, dav
from .. import exceptions, utils
from .. import exceptions
from ..utils import expand_path, open_graphical_browser
logger = logging.getLogger(__name__)
@ -37,7 +39,7 @@ class GoogleSession(dav.DavSession):
if not have_oauth2:
raise exceptions.UserError('requests-oauthlib not installed')
token_file = utils.expand_path(token_file)
token_file = expand_path(token_file)
ui_worker = get_ui_worker()
f = lambda: self._init_token(token_file, client_id, client_secret)
ui_worker.put(f)
@ -75,7 +77,7 @@ class GoogleSession(dav.DavSession):
access_type='offline', approval_prompt='force')
click.echo('Opening {} ...'.format(authorization_url))
try:
utils.open_graphical_browser(authorization_url)
open_graphical_browser(authorization_url)
except Exception as e:
logger.warning(str(e))
@ -118,7 +120,7 @@ class GoogleCalendarStorage(dav.CaldavStorage):
parts = url.rstrip('/').split('/')
parts.pop()
collection = parts.pop()
return utils.compat.urlunquote(collection)
return urlparse.unquote(collection)
storage_name = 'google_calendar'

View file

@ -1,9 +1,10 @@
# -*- coding: utf-8 -*-
import urllib.parse as urlparse
from .base import Item, Storage
from .. import exceptions
from ..utils import expand_path
from ..utils.compat import iteritems, text_type, urlparse
from ..utils.http import request
from ..utils.vobject import split_collection
@ -39,7 +40,7 @@ def prepare_auth(auth, username, password):
def prepare_verify(verify, verify_fingerprint):
if isinstance(verify, (text_type, bytes)):
if isinstance(verify, (str, bytes)):
verify = expand_path(verify)
elif not isinstance(verify, bool):
raise exceptions.UserError('Invalid value for verify ({}), '
@ -47,7 +48,7 @@ def prepare_verify(verify, verify_fingerprint):
.format(verify))
if verify_fingerprint is not None:
if not isinstance(verify_fingerprint, (bytes, text_type)):
if not isinstance(verify_fingerprint, (bytes, str)):
raise exceptions.UserError('Invalid value for verify_fingerprint '
'({}), must be a string or null.'
.format(verify_fingerprint))
@ -64,7 +65,7 @@ def prepare_verify(verify, verify_fingerprint):
def prepare_client_cert(cert):
if isinstance(cert, (text_type, bytes)):
if isinstance(cert, (str, bytes)):
cert = expand_path(cert)
elif isinstance(cert, list):
cert = tuple(map(prepare_client_cert, cert))
@ -154,7 +155,7 @@ class HttpStorage(Storage):
etag = item.hash
self._items[item.ident] = item, etag
return ((href, etag) for href, (item, etag) in iteritems(self._items))
return ((href, etag) for href, (item, etag) in self._items.items())
def get(self, href):
if self._items is None:

View file

@ -7,6 +7,7 @@ things, and plugging in an account "just works".
'''
import logging
from urllib.parse import quote as urlquote, urljoin
import click
@ -21,9 +22,6 @@ DRAFT_VERSION = '05'
logger = logging.getLogger(__name__)
urljoin = utils.compat.urlparse.urljoin
urlquote = utils.compat.urlquote
def _ensure_slash(dir):
return dir.rstrip('/') + '/'
@ -33,7 +31,7 @@ def _iter_listing(json):
new_listing = '@context' in json # draft-02 and beyond
if new_listing:
json = json['items']
for name, info in utils.compat.iteritems(json):
for name, info in json.items():
if not new_listing:
info = {'ETag': info}
yield name, info

View file

@ -12,7 +12,6 @@ from atomicwrites import atomic_write
from .base import Item, Storage
from .. import exceptions
from ..utils import checkfile, expand_path
from ..utils.compat import iteritems, itervalues
from ..utils.vobject import join_collection, split_collection
logger = logging.getLogger(__name__)
@ -167,7 +166,7 @@ class SingleFileStorage(Storage):
etag = item.hash
self._items[item.ident] = item, etag
return ((href, etag) for href, (item, etag) in iteritems(self._items))
return ((href, etag) for href, (item, etag) in self._items.items())
def get(self, href):
if self._items is None or not self._at_once:
@ -218,7 +217,7 @@ class SingleFileStorage(Storage):
'synchronization and make sure absolutely no other program is '
'writing into the same file.'.format(self.path))
text = join_collection(
(item.raw for item, etag in itervalues(self._items)),
item.raw for item, etag in self._items.values()
)
try:
with atomic_write(self.path, mode='wb', overwrite=True) as f:

View file

@ -14,7 +14,6 @@ import logging
from . import exceptions
from .utils import uniq
from .utils.compat import iteritems, text_type
sync_logger = logging.getLogger(__name__)
@ -92,7 +91,7 @@ class StorageSyncer(object):
def prepare_idents(self):
href_to_status = dict((meta['href'], (ident, meta))
for ident, meta
in iteritems(self.status))
in self.status.items())
prefetch = {}
self.idents = {}
@ -206,11 +205,11 @@ def sync(storage_a, storage_b, status, conflict_resolution=None,
a_info = storage_a.syncer_class(storage_a, dict(
(ident, meta_a)
for ident, (meta_a, meta_b) in iteritems(status)
for ident, (meta_a, meta_b) in status.items()
))
b_info = storage_b.syncer_class(storage_b, dict(
(ident, meta_b)
for ident, (meta_a, meta_b) in iteritems(status)
for ident, (meta_a, meta_b) in status.items()
))
a_info.prepare_idents()
@ -275,7 +274,7 @@ def _action_update(ident, source, dest):
dest_href = dest_meta['href']
dest_etag = dest.storage.update(dest_href, source_meta['item'],
dest_meta['etag'])
assert isinstance(dest_etag, (bytes, text_type))
assert isinstance(dest_etag, (bytes, str))
source.status[ident] = _compress_meta(source_meta)
dest.status[ident] = {

View file

@ -5,7 +5,8 @@ import os
import sys
import uuid
from .compat import getargspec_ish, iteritems, to_unicode
from inspect import getfullargspec
from .. import exceptions
@ -25,7 +26,7 @@ def expand_path(p):
def split_dict(d, f):
'''Puts key into first dict if f(key), otherwise in second dict'''
a, b = split_sequence(iteritems(d), lambda item: f(item[0]))
a, b = split_sequence(d.items(), lambda item: f(item[0]))
return dict(a), dict(b)
@ -77,7 +78,7 @@ def get_storage_init_specs(cls, stop_at=object):
if cls is stop_at:
return ()
spec = getargspec_ish(cls.__init__)
spec = getfullargspec(cls.__init__)
traverse_superclass = getattr(cls.__init__, '_traverse_superclass', True)
if traverse_superclass:
if traverse_superclass is True: # noqa
@ -178,7 +179,7 @@ def generate_href(ident=None, safe=SAFE_UID_CHARS):
UUID.
'''
if not ident or not href_safe(ident, safe):
return to_unicode(uuid.uuid4().hex)
return str(uuid.uuid4())
else:
return ident

View file

@ -1,66 +0,0 @@
# -*- coding: utf-8 -*-
import functools
import sys
PY2 = sys.version_info[0] == 2
if sys.version_info < (3, 3) and \
sys.version_info[:2] != (2, 7): # pragma: no cover
raise RuntimeError(
'vdirsyncer only works on Python versions 2.7.x and 3.3+'
)
def to_unicode(x, encoding='ascii'):
if not isinstance(x, text_type):
x = x.decode(encoding)
return x
def to_bytes(x, encoding='ascii'):
if not isinstance(x, bytes):
x = x.encode(encoding)
return x
def _wrap_native(f, encoding='utf-8'):
@functools.wraps(f)
def wrapper(x, *a, **kw):
to_orig = to_unicode if isinstance(x, text_type) else to_bytes
return to_orig(f(to_native(x, encoding), *a, **kw), encoding)
return wrapper
if PY2: # pragma: no cover
import urlparse
import urllib as _urllib
from inspect import getargspec as getargspec_ish # noqa
# Horrible hack to make urllib play nice with u'...' urls from requests
urlquote = _wrap_native(_urllib.quote)
urlunquote = _wrap_native(_urllib.unquote)
text_type = unicode # noqa
iteritems = lambda x: x.iteritems()
itervalues = lambda x: x.itervalues()
to_native = to_bytes
else: # pragma: no cover
import urllib.parse as urlparse
from inspect import getfullargspec as getargspec_ish # noqa
urlquote = urlparse.quote
urlunquote = urlparse.unquote
text_type = str
iteritems = lambda x: x.items()
itervalues = lambda x: x.values()
to_native = to_unicode
def with_metaclass(meta, *bases):
'''Original code from six, by Benjamin Peterson.'''
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})

View file

@ -4,14 +4,9 @@ import hashlib
from itertools import chain, tee
from . import cached_property, uniq
from .compat import itervalues, text_type, to_unicode
def _prepare_props(*x):
return tuple(map(to_unicode, x))
IGNORE_PROPS = _prepare_props(
IGNORE_PROPS = (
# PRODID is changed by radicale for some reason after upload
'PRODID',
# X-RADICALE-NAME is used by radicale, because hrefs don't really exist in
@ -34,7 +29,6 @@ IGNORE_PROPS = _prepare_props(
'DTSTAMP',
'UID',
)
del _prepare_props
class Item(object):
@ -43,7 +37,7 @@ class Item(object):
VCARD'''
def __init__(self, raw):
assert isinstance(raw, text_type)
assert isinstance(raw, str)
self._raw = raw
@cached_property
@ -111,7 +105,7 @@ def hash_item(text):
def split_collection(text):
assert isinstance(text, text_type)
assert isinstance(text, str)
inline = []
items = {} # uid => item
ungrouped_items = []
@ -141,7 +135,7 @@ def split_collection(text):
for main in _Component.parse(text, multiple=True):
inner(main, main)
for item in chain(itervalues(items), ungrouped_items):
for item in chain(items.values(), ungrouped_items):
item.subcomponents.extend(inline)
yield u'\r\n'.join(item.dump_lines())
@ -240,7 +234,7 @@ class _Component(object):
def parse(cls, lines, multiple=False):
if isinstance(lines, bytes):
lines = lines.decode('utf-8')
if isinstance(lines, text_type):
if isinstance(lines, str):
lines = lines.splitlines()
stack = []
@ -301,7 +295,7 @@ class _Component(object):
self.props = new_lines
def __setitem__(self, key, val):
assert isinstance(val, text_type)
assert isinstance(val, str)
assert u'\n' not in val
del self[key]
line = u'{}:{}'.format(key, val)