Merge pull request #925 from pimutils/pyupgrade

Set up pyupgrade
This commit is contained in:
Hugo Osvaldo Barrera 2021-08-04 20:43:50 +02:00 committed by GitHub
commit ecb181d9d7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 40 additions and 62 deletions

View file

@ -22,6 +22,11 @@ repos:
hooks: hooks:
- id: isort - id: isort
name: isort (python) name: isort (python)
- repo: https://github.com/asottile/pyupgrade
rev: v2.23.3
hooks:
- id: pyupgrade
args: [--py37-plus]
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: "v0.910" rev: "v0.910"
hooks: hooks:

View file

@ -1,6 +1,3 @@
[wheel]
universal = 1
[tool:pytest] [tool:pytest]
addopts = addopts =
--tb=short --tb=short

View file

@ -11,7 +11,7 @@ try:
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/", "url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
} }
except KeyError as e: except KeyError as e:
pytestmark = pytest.mark.skip("Missing envkey: {}".format(str(e))) pytestmark = pytest.mark.skip(f"Missing envkey: {str(e)}")
@pytest.mark.flaky(reruns=5) @pytest.mark.flaky(reruns=5)

View file

@ -65,7 +65,7 @@ class TestHttpStorage(StorageTests):
""" """
assert headers["User-Agent"].startswith("vdirsyncer/") assert headers["User-Agent"].startswith("vdirsyncer/")
with open(self.tmpfile, "r") as f: with open(self.tmpfile) as f:
body = f.read() body = f.read()
return CallbackResult( return CallbackResult(

View file

@ -39,7 +39,7 @@ def test_basic(storage, runner, collection):
assert not result.exception assert not result.exception
assert "No UID" in result.output assert "No UID" in result.output
assert "'toobroken.txt' is malformed beyond repair" in result.output assert "'toobroken.txt' is malformed beyond repair" in result.output
(new_fname,) = [x for x in storage.listdir() if "toobroken" not in str(x)] (new_fname,) = (x for x in storage.listdir() if "toobroken" not in str(x))
assert "UID:" in new_fname.read() assert "UID:" in new_fname.read()

View file

@ -561,7 +561,7 @@ class SyncMachine(RuleBasedStateMachine):
_old_update = s.update _old_update = s.update
async def upload(item): async def upload(item):
return ((await _old_upload(item)))[0], "NULL" return (await _old_upload(item))[0], "NULL"
async def update(href, item, etag): async def update(href, item, etag):
return await _old_update(href, item, etag) and "NULL" return await _old_update(href, item, etag) and "NULL"

View file

@ -88,9 +88,7 @@ def _validate_collections_param(collections):
raise ValueError("Duplicate value.") raise ValueError("Duplicate value.")
collection_names.add(collection_name) collection_names.add(collection_name)
except ValueError as e: except ValueError as e:
raise ValueError( raise ValueError(f"`collections` parameter, position {i}: {str(e)}")
"`collections` parameter, position {i}: {e}".format(i=i, e=str(e))
)
class _ConfigReader: class _ConfigReader:
@ -135,7 +133,7 @@ class _ConfigReader:
dict(_parse_options(self._parser.items(section), section=section)), dict(_parse_options(self._parser.items(section), section=section)),
) )
except ValueError as e: except ValueError as e:
raise exceptions.UserError('Section "{}": {}'.format(section, str(e))) raise exceptions.UserError(f'Section "{section}": {str(e)}')
_validate_general_section(self._general) _validate_general_section(self._general)
if getattr(self._file, "name", None): if getattr(self._file, "name", None):
@ -152,7 +150,7 @@ def _parse_options(items, section=None):
try: try:
yield key, json.loads(value) yield key, json.loads(value)
except ValueError as e: except ValueError as e:
raise ValueError('Section "{}", option "{}": {}'.format(section, key, e)) raise ValueError(f'Section "{section}", option "{key}": {e}')
class Config: class Config:
@ -190,9 +188,7 @@ class Config:
with open(fname) as f: with open(fname) as f:
return cls.from_fileobject(f) return cls.from_fileobject(f)
except Exception as e: except Exception as e:
raise exceptions.UserError( raise exceptions.UserError(f"Error during reading config {fname}: {e}")
"Error during reading config {}: {}".format(fname, e)
)
def get_storage_args(self, storage_name): def get_storage_args(self, storage_name):
try: try:

View file

@ -74,7 +74,7 @@ async def collections_for_pair(
" before synchronization.".format(pair.name) " before synchronization.".format(pair.name)
) )
logger.info("Discovering collections for pair {}".format(pair.name)) logger.info(f"Discovering collections for pair {pair.name}")
a_discovered = _DiscoverResult(pair.config_a, connector=connector) a_discovered = _DiscoverResult(pair.config_a, connector=connector)
b_discovered = _DiscoverResult(pair.config_b, connector=connector) b_discovered = _DiscoverResult(pair.config_b, connector=connector)

View file

@ -30,9 +30,7 @@ def expand_fetch_params(config):
@synchronized() @synchronized()
def _fetch_value(opts, key): def _fetch_value(opts, key):
if not isinstance(opts, list): if not isinstance(opts, list):
raise ValueError( raise ValueError(f"Invalid value for {key}: Expected a list, found {opts!r}.")
"Invalid value for {}: Expected a list, found {!r}.".format(key, opts)
)
if not opts: if not opts:
raise ValueError("Expected list of length > 0.") raise ValueError("Expected list of length > 0.")
@ -58,7 +56,7 @@ def _fetch_value(opts, key):
except KeyError: except KeyError:
raise exceptions.UserError(f"Unknown strategy: {strategy}") raise exceptions.UserError(f"Unknown strategy: {strategy}")
logger.debug("Fetching value for {} with {} strategy.".format(key, strategy)) logger.debug(f"Fetching value for {key} with {strategy} strategy.")
try: try:
rv = strategy_fn(*opts[1:]) rv = strategy_fn(*opts[1:])
except (click.Abort, KeyboardInterrupt) as e: except (click.Abort, KeyboardInterrupt) as e:

View file

@ -93,9 +93,7 @@ async def discover_collections(pair, **kwargs):
collections = [c for c, (a, b) in rv] collections = [c for c, (a, b) in rv]
if collections == [None]: if collections == [None]:
collections = None collections = None
cli_logger.info( cli_logger.info(f"Saved for {pair.name}: collections = {json.dumps(collections)}")
"Saved for {}: collections = {}".format(pair.name, json.dumps(collections))
)
async def repair_collection( async def repair_collection(

View file

@ -169,9 +169,7 @@ def get_status_path(base_path, pair, collection=None, data_type=None):
if os.path.isfile(path) and data_type == "items": if os.path.isfile(path) and data_type == "items":
new_path = path + ".items" new_path = path + ".items"
# XXX: Legacy migration # XXX: Legacy migration
cli_logger.warning( cli_logger.warning(f"Migrating statuses: Renaming {path} to {new_path}")
"Migrating statuses: Renaming {} to {}".format(path, new_path)
)
os.rename(path, new_path) os.rename(path, new_path)
path += "." + data_type path += "." + data_type

View file

@ -53,7 +53,7 @@ def prepare_auth(auth, username, password):
else: else:
return GuessAuth(username, password) return GuessAuth(username, password)
else: else:
raise exceptions.UserError("Unknown authentication method: {}".format(auth)) raise exceptions.UserError(f"Unknown authentication method: {auth}")
elif auth: elif auth:
raise exceptions.UserError( raise exceptions.UserError(
"You need to specify username and password " "You need to specify username and password "

View file

@ -18,7 +18,7 @@ async def repair_storage(storage, repair_unsafe_uid):
all_hrefs = await aiostream.stream.list(storage.list()) all_hrefs = await aiostream.stream.list(storage.list())
for i, (href, _) in enumerate(all_hrefs): for i, (href, _) in enumerate(all_hrefs):
item, etag = await storage.get(href) item, etag = await storage.get(href)
logger.info("[{}/{}] Processing {}".format(i, len(all_hrefs), href)) logger.info(f"[{i}/{len(all_hrefs)}] Processing {href}")
try: try:
new_item = repair_item(href, item, seen_uids, repair_unsafe_uid) new_item = repair_item(href, item, seen_uids, repair_unsafe_uid)

View file

@ -89,7 +89,7 @@ def _normalize_href(base, href):
if orig_href == x: if orig_href == x:
dav_logger.debug(f"Already normalized: {x!r}") dav_logger.debug(f"Already normalized: {x!r}")
else: else:
dav_logger.debug("Normalized URL from {!r} to {!r}".format(orig_href, x)) dav_logger.debug(f"Normalized URL from {orig_href!r} to {x!r}")
return x return x
@ -554,9 +554,7 @@ class DAVStorage(Storage):
for href, etag, prop in self._parse_prop_responses(root): for href, etag, prop in self._parse_prop_responses(root):
raw = prop.find(self.get_multi_data_query) raw = prop.find(self.get_multi_data_query)
if raw is None: if raw is None:
dav_logger.warning( dav_logger.warning(f"Skipping {href}, the item content is missing.")
"Skipping {}, the item content is missing.".format(href)
)
continue continue
raw = raw.text or "" raw = raw.text or ""
@ -570,11 +568,9 @@ class DAVStorage(Storage):
hrefs_left.remove(href) hrefs_left.remove(href)
except KeyError: except KeyError:
if href in hrefs: if href in hrefs:
dav_logger.warning("Server sent item twice: {}".format(href)) dav_logger.warning(f"Server sent item twice: {href}")
else: else:
dav_logger.warning( dav_logger.warning(f"Server sent unsolicited item: {href}")
"Server sent unsolicited item: {}".format(href)
)
else: else:
rv.append((href, Item(raw), etag)) rv.append((href, Item(raw), etag))
for href in hrefs_left: for href in hrefs_left:
@ -652,12 +648,12 @@ class DAVStorage(Storage):
# https://github.com/pimutils/vdirsyncer/issues/88 # https://github.com/pimutils/vdirsyncer/issues/88
# - Davmail # - Davmail
# https://github.com/pimutils/vdirsyncer/issues/144 # https://github.com/pimutils/vdirsyncer/issues/144
dav_logger.warning("Skipping identical href: {!r}".format(href)) dav_logger.warning(f"Skipping identical href: {href!r}")
continue continue
props = response.findall("{DAV:}propstat/{DAV:}prop") props = response.findall("{DAV:}propstat/{DAV:}prop")
if props is None or not len(props): if props is None or not len(props):
dav_logger.debug("Skipping {!r}, properties are missing.".format(href)) dav_logger.debug(f"Skipping {href!r}, properties are missing.")
continue continue
else: else:
props = _merge_xml(props) props = _merge_xml(props)
@ -668,9 +664,7 @@ class DAVStorage(Storage):
etag = getattr(props.find("{DAV:}getetag"), "text", "") etag = getattr(props.find("{DAV:}getetag"), "text", "")
if not etag: if not etag:
dav_logger.debug( dav_logger.debug(f"Skipping {href!r}, etag property is missing.")
"Skipping {!r}, etag property is missing.".format(href)
)
continue continue
contenttype = getattr(props.find("{DAV:}getcontenttype"), "text", None) contenttype = getattr(props.find("{DAV:}getcontenttype"), "text", None)

View file

@ -56,7 +56,7 @@ class _Session:
auth_token = self._get_auth_token() auth_token = self._get_auth_token()
if not auth_token: if not auth_token:
password = click.prompt( password = click.prompt(
"Enter service password for {}".format(self.email), hide_input=True f"Enter service password for {self.email}", hide_input=True
) )
auth_token = etesync.Authenticator(server_url).get_auth_token( auth_token = etesync.Authenticator(server_url).get_auth_token(
self.email, password self.email, password

View file

@ -29,7 +29,7 @@ class FilesystemStorage(Storage):
encoding="utf-8", encoding="utf-8",
post_hook=None, post_hook=None,
fileignoreext=".tmp", fileignoreext=".tmp",
**kwargs **kwargs,
): ):
super().__init__(**kwargs) super().__init__(**kwargs)
path = expand_path(path) path = expand_path(path)
@ -168,13 +168,11 @@ class FilesystemStorage(Storage):
os.remove(fpath) os.remove(fpath)
def _run_post_hook(self, fpath): def _run_post_hook(self, fpath):
logger.info( logger.info(f"Calling post_hook={self.post_hook} with argument={fpath}")
"Calling post_hook={} with argument={}".format(self.post_hook, fpath)
)
try: try:
subprocess.call([self.post_hook, fpath]) subprocess.call([self.post_hook, fpath])
except OSError as e: except OSError as e:
logger.warning("Error executing external hook: {}".format(str(e))) logger.warning(f"Error executing external hook: {str(e)}")
async def get_meta(self, key): async def get_meta(self, key):
fpath = os.path.join(self.path, key) fpath = os.path.join(self.path, key)

View file

@ -227,7 +227,7 @@ class Update(Action):
meta = ItemMetadata(hash=self.item.hash) meta = ItemMetadata(hash=self.item.hash)
else: else:
sync_logger.info( sync_logger.info(
"Copying (updating) item {} to {}".format(self.ident, self.dest.storage) f"Copying (updating) item {self.ident} to {self.dest.storage}"
) )
meta = self.dest.status.get_new(self.ident) meta = self.dest.status.get_new(self.ident)
meta.etag = await self.dest.storage.update(meta.href, self.item, meta.etag) meta.etag = await self.dest.storage.update(meta.href, self.item, meta.etag)
@ -243,9 +243,7 @@ class Delete(Action):
async def _run_impl(self, a, b): async def _run_impl(self, a, b):
meta = self.dest.status.get_new(self.ident) meta = self.dest.status.get_new(self.ident)
if not self.dest.storage.read_only: if not self.dest.storage.read_only:
sync_logger.info( sync_logger.info(f"Deleting item {self.ident} from {self.dest.storage}")
"Deleting item {} from {}".format(self.ident, self.dest.storage)
)
await self.dest.storage.delete(meta.href, meta.etag) await self.dest.storage.delete(meta.href, meta.etag)
self.dest.status.remove_ident(self.ident) self.dest.status.remove_ident(self.ident)
@ -257,9 +255,7 @@ class ResolveConflict(Action):
async def run(self, a, b, conflict_resolution, partial_sync): async def run(self, a, b, conflict_resolution, partial_sync):
with self.auto_rollback(a, b): with self.auto_rollback(a, b):
sync_logger.info( sync_logger.info(f"Doing conflict resolution for item {self.ident}...")
"Doing conflict resolution for item {}...".format(self.ident)
)
meta_a = a.status.get_new(self.ident) meta_a = a.status.get_new(self.ident)
meta_b = b.status.get_new(self.ident) meta_b = b.status.get_new(self.ident)
@ -290,7 +286,7 @@ class ResolveConflict(Action):
) )
else: else:
raise UserError( raise UserError(
"Invalid conflict resolution mode: {!r}".format(conflict_resolution) f"Invalid conflict resolution mode: {conflict_resolution!r}"
) )

View file

@ -123,9 +123,7 @@ def checkdir(path, create=False, mode=0o750):
if create: if create:
os.makedirs(path, mode) os.makedirs(path, mode)
else: else:
raise exceptions.CollectionNotFound( raise exceptions.CollectionNotFound(f"Directory {path} does not exist.")
"Directory {} does not exist.".format(path)
)
def checkfile(path, create=False): def checkfile(path, create=False):
@ -143,7 +141,7 @@ def checkfile(path, create=False):
with open(path, "wb"): with open(path, "wb"):
pass pass
else: else:
raise exceptions.CollectionNotFound("File {} does not exist.".format(path)) raise exceptions.CollectionNotFound(f"File {path} does not exist.")
class cached_property: class cached_property:

View file

@ -168,7 +168,7 @@ def _split_collection_impl(item, main, inline, items, ungrouped_items):
for subitem in item.subcomponents: for subitem in item.subcomponents:
_split_collection_impl(subitem, item, inline, items, ungrouped_items) _split_collection_impl(subitem, item, inline, items, ungrouped_items)
else: else:
raise ValueError("Unknown component: {}".format(item.name)) raise ValueError(f"Unknown component: {item.name}")
_default_join_wrappers = { _default_join_wrappers = {
@ -287,12 +287,12 @@ class _Component:
if line.strip(): if line.strip():
stack[-1].props.append(line) stack[-1].props.append(line)
except IndexError: except IndexError:
raise ValueError("Parsing error at line {}".format(_i + 1)) raise ValueError(f"Parsing error at line {_i + 1}")
if multiple: if multiple:
return rv return rv
elif len(rv) != 1: elif len(rv) != 1:
raise ValueError("Found {} components, expected one.".format(len(rv))) raise ValueError(f"Found {len(rv)} components, expected one.")
else: else:
return rv[0] return rv[0]