Merge pull request #925 from pimutils/pyupgrade

Set up pyupgrade
This commit is contained in:
Hugo Osvaldo Barrera 2021-08-04 20:43:50 +02:00 committed by GitHub
commit ecb181d9d7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 40 additions and 62 deletions

View file

@ -22,6 +22,11 @@ repos:
hooks:
- id: isort
name: isort (python)
- repo: https://github.com/asottile/pyupgrade
rev: v2.23.3
hooks:
- id: pyupgrade
args: [--py37-plus]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v0.910"
hooks:

View file

@ -1,6 +1,3 @@
[wheel]
universal = 1
[tool:pytest]
addopts =
--tb=short

View file

@ -11,7 +11,7 @@ try:
"url": "https://brutus.lostpackets.de/davical-test/caldav.php/",
}
except KeyError as e:
pytestmark = pytest.mark.skip("Missing envkey: {}".format(str(e)))
pytestmark = pytest.mark.skip(f"Missing envkey: {str(e)}")
@pytest.mark.flaky(reruns=5)

View file

@ -65,7 +65,7 @@ class TestHttpStorage(StorageTests):
"""
assert headers["User-Agent"].startswith("vdirsyncer/")
with open(self.tmpfile, "r") as f:
with open(self.tmpfile) as f:
body = f.read()
return CallbackResult(

View file

@ -39,7 +39,7 @@ def test_basic(storage, runner, collection):
assert not result.exception
assert "No UID" in result.output
assert "'toobroken.txt' is malformed beyond repair" in result.output
(new_fname,) = [x for x in storage.listdir() if "toobroken" not in str(x)]
(new_fname,) = (x for x in storage.listdir() if "toobroken" not in str(x))
assert "UID:" in new_fname.read()

View file

@ -561,7 +561,7 @@ class SyncMachine(RuleBasedStateMachine):
_old_update = s.update
async def upload(item):
return ((await _old_upload(item)))[0], "NULL"
return (await _old_upload(item))[0], "NULL"
async def update(href, item, etag):
return await _old_update(href, item, etag) and "NULL"

View file

@ -88,9 +88,7 @@ def _validate_collections_param(collections):
raise ValueError("Duplicate value.")
collection_names.add(collection_name)
except ValueError as e:
raise ValueError(
"`collections` parameter, position {i}: {e}".format(i=i, e=str(e))
)
raise ValueError(f"`collections` parameter, position {i}: {str(e)}")
class _ConfigReader:
@ -135,7 +133,7 @@ class _ConfigReader:
dict(_parse_options(self._parser.items(section), section=section)),
)
except ValueError as e:
raise exceptions.UserError('Section "{}": {}'.format(section, str(e)))
raise exceptions.UserError(f'Section "{section}": {str(e)}')
_validate_general_section(self._general)
if getattr(self._file, "name", None):
@ -152,7 +150,7 @@ def _parse_options(items, section=None):
try:
yield key, json.loads(value)
except ValueError as e:
raise ValueError('Section "{}", option "{}": {}'.format(section, key, e))
raise ValueError(f'Section "{section}", option "{key}": {e}')
class Config:
@ -190,9 +188,7 @@ class Config:
with open(fname) as f:
return cls.from_fileobject(f)
except Exception as e:
raise exceptions.UserError(
"Error during reading config {}: {}".format(fname, e)
)
raise exceptions.UserError(f"Error during reading config {fname}: {e}")
def get_storage_args(self, storage_name):
try:

View file

@ -74,7 +74,7 @@ async def collections_for_pair(
" before synchronization.".format(pair.name)
)
logger.info("Discovering collections for pair {}".format(pair.name))
logger.info(f"Discovering collections for pair {pair.name}")
a_discovered = _DiscoverResult(pair.config_a, connector=connector)
b_discovered = _DiscoverResult(pair.config_b, connector=connector)

View file

@ -30,9 +30,7 @@ def expand_fetch_params(config):
@synchronized()
def _fetch_value(opts, key):
if not isinstance(opts, list):
raise ValueError(
"Invalid value for {}: Expected a list, found {!r}.".format(key, opts)
)
raise ValueError(f"Invalid value for {key}: Expected a list, found {opts!r}.")
if not opts:
raise ValueError("Expected list of length > 0.")
@ -58,7 +56,7 @@ def _fetch_value(opts, key):
except KeyError:
raise exceptions.UserError(f"Unknown strategy: {strategy}")
logger.debug("Fetching value for {} with {} strategy.".format(key, strategy))
logger.debug(f"Fetching value for {key} with {strategy} strategy.")
try:
rv = strategy_fn(*opts[1:])
except (click.Abort, KeyboardInterrupt) as e:

View file

@ -93,9 +93,7 @@ async def discover_collections(pair, **kwargs):
collections = [c for c, (a, b) in rv]
if collections == [None]:
collections = None
cli_logger.info(
"Saved for {}: collections = {}".format(pair.name, json.dumps(collections))
)
cli_logger.info(f"Saved for {pair.name}: collections = {json.dumps(collections)}")
async def repair_collection(

View file

@ -169,9 +169,7 @@ def get_status_path(base_path, pair, collection=None, data_type=None):
if os.path.isfile(path) and data_type == "items":
new_path = path + ".items"
# XXX: Legacy migration
cli_logger.warning(
"Migrating statuses: Renaming {} to {}".format(path, new_path)
)
cli_logger.warning(f"Migrating statuses: Renaming {path} to {new_path}")
os.rename(path, new_path)
path += "." + data_type

View file

@ -53,7 +53,7 @@ def prepare_auth(auth, username, password):
else:
return GuessAuth(username, password)
else:
raise exceptions.UserError("Unknown authentication method: {}".format(auth))
raise exceptions.UserError(f"Unknown authentication method: {auth}")
elif auth:
raise exceptions.UserError(
"You need to specify username and password "

View file

@ -18,7 +18,7 @@ async def repair_storage(storage, repair_unsafe_uid):
all_hrefs = await aiostream.stream.list(storage.list())
for i, (href, _) in enumerate(all_hrefs):
item, etag = await storage.get(href)
logger.info("[{}/{}] Processing {}".format(i, len(all_hrefs), href))
logger.info(f"[{i}/{len(all_hrefs)}] Processing {href}")
try:
new_item = repair_item(href, item, seen_uids, repair_unsafe_uid)

View file

@ -89,7 +89,7 @@ def _normalize_href(base, href):
if orig_href == x:
dav_logger.debug(f"Already normalized: {x!r}")
else:
dav_logger.debug("Normalized URL from {!r} to {!r}".format(orig_href, x))
dav_logger.debug(f"Normalized URL from {orig_href!r} to {x!r}")
return x
@ -554,9 +554,7 @@ class DAVStorage(Storage):
for href, etag, prop in self._parse_prop_responses(root):
raw = prop.find(self.get_multi_data_query)
if raw is None:
dav_logger.warning(
"Skipping {}, the item content is missing.".format(href)
)
dav_logger.warning(f"Skipping {href}, the item content is missing.")
continue
raw = raw.text or ""
@ -570,11 +568,9 @@ class DAVStorage(Storage):
hrefs_left.remove(href)
except KeyError:
if href in hrefs:
dav_logger.warning("Server sent item twice: {}".format(href))
dav_logger.warning(f"Server sent item twice: {href}")
else:
dav_logger.warning(
"Server sent unsolicited item: {}".format(href)
)
dav_logger.warning(f"Server sent unsolicited item: {href}")
else:
rv.append((href, Item(raw), etag))
for href in hrefs_left:
@ -652,12 +648,12 @@ class DAVStorage(Storage):
# https://github.com/pimutils/vdirsyncer/issues/88
# - Davmail
# https://github.com/pimutils/vdirsyncer/issues/144
dav_logger.warning("Skipping identical href: {!r}".format(href))
dav_logger.warning(f"Skipping identical href: {href!r}")
continue
props = response.findall("{DAV:}propstat/{DAV:}prop")
if props is None or not len(props):
dav_logger.debug("Skipping {!r}, properties are missing.".format(href))
dav_logger.debug(f"Skipping {href!r}, properties are missing.")
continue
else:
props = _merge_xml(props)
@ -668,9 +664,7 @@ class DAVStorage(Storage):
etag = getattr(props.find("{DAV:}getetag"), "text", "")
if not etag:
dav_logger.debug(
"Skipping {!r}, etag property is missing.".format(href)
)
dav_logger.debug(f"Skipping {href!r}, etag property is missing.")
continue
contenttype = getattr(props.find("{DAV:}getcontenttype"), "text", None)

View file

@ -56,7 +56,7 @@ class _Session:
auth_token = self._get_auth_token()
if not auth_token:
password = click.prompt(
"Enter service password for {}".format(self.email), hide_input=True
f"Enter service password for {self.email}", hide_input=True
)
auth_token = etesync.Authenticator(server_url).get_auth_token(
self.email, password

View file

@ -29,7 +29,7 @@ class FilesystemStorage(Storage):
encoding="utf-8",
post_hook=None,
fileignoreext=".tmp",
**kwargs
**kwargs,
):
super().__init__(**kwargs)
path = expand_path(path)
@ -168,13 +168,11 @@ class FilesystemStorage(Storage):
os.remove(fpath)
def _run_post_hook(self, fpath):
logger.info(
"Calling post_hook={} with argument={}".format(self.post_hook, fpath)
)
logger.info(f"Calling post_hook={self.post_hook} with argument={fpath}")
try:
subprocess.call([self.post_hook, fpath])
except OSError as e:
logger.warning("Error executing external hook: {}".format(str(e)))
logger.warning(f"Error executing external hook: {str(e)}")
async def get_meta(self, key):
fpath = os.path.join(self.path, key)

View file

@ -227,7 +227,7 @@ class Update(Action):
meta = ItemMetadata(hash=self.item.hash)
else:
sync_logger.info(
"Copying (updating) item {} to {}".format(self.ident, self.dest.storage)
f"Copying (updating) item {self.ident} to {self.dest.storage}"
)
meta = self.dest.status.get_new(self.ident)
meta.etag = await self.dest.storage.update(meta.href, self.item, meta.etag)
@ -243,9 +243,7 @@ class Delete(Action):
async def _run_impl(self, a, b):
meta = self.dest.status.get_new(self.ident)
if not self.dest.storage.read_only:
sync_logger.info(
"Deleting item {} from {}".format(self.ident, self.dest.storage)
)
sync_logger.info(f"Deleting item {self.ident} from {self.dest.storage}")
await self.dest.storage.delete(meta.href, meta.etag)
self.dest.status.remove_ident(self.ident)
@ -257,9 +255,7 @@ class ResolveConflict(Action):
async def run(self, a, b, conflict_resolution, partial_sync):
with self.auto_rollback(a, b):
sync_logger.info(
"Doing conflict resolution for item {}...".format(self.ident)
)
sync_logger.info(f"Doing conflict resolution for item {self.ident}...")
meta_a = a.status.get_new(self.ident)
meta_b = b.status.get_new(self.ident)
@ -290,7 +286,7 @@ class ResolveConflict(Action):
)
else:
raise UserError(
"Invalid conflict resolution mode: {!r}".format(conflict_resolution)
f"Invalid conflict resolution mode: {conflict_resolution!r}"
)

View file

@ -123,9 +123,7 @@ def checkdir(path, create=False, mode=0o750):
if create:
os.makedirs(path, mode)
else:
raise exceptions.CollectionNotFound(
"Directory {} does not exist.".format(path)
)
raise exceptions.CollectionNotFound(f"Directory {path} does not exist.")
def checkfile(path, create=False):
@ -143,7 +141,7 @@ def checkfile(path, create=False):
with open(path, "wb"):
pass
else:
raise exceptions.CollectionNotFound("File {} does not exist.".format(path))
raise exceptions.CollectionNotFound(f"File {path} does not exist.")
class cached_property:

View file

@ -168,7 +168,7 @@ def _split_collection_impl(item, main, inline, items, ungrouped_items):
for subitem in item.subcomponents:
_split_collection_impl(subitem, item, inline, items, ungrouped_items)
else:
raise ValueError("Unknown component: {}".format(item.name))
raise ValueError(f"Unknown component: {item.name}")
_default_join_wrappers = {
@ -287,12 +287,12 @@ class _Component:
if line.strip():
stack[-1].props.append(line)
except IndexError:
raise ValueError("Parsing error at line {}".format(_i + 1))
raise ValueError(f"Parsing error at line {_i + 1}")
if multiple:
return rv
elif len(rv) != 1:
raise ValueError("Found {} components, expected one.".format(len(rv)))
raise ValueError(f"Found {len(rv)} components, expected one.")
else:
return rv[0]