pre-commit run --all

This commit is contained in:
Hugo Osvaldo Barrera 2023-09-24 12:39:19 +02:00
parent dcd3b7a359
commit 301aa0e16f
17 changed files with 81 additions and 112 deletions

View file

@ -383,7 +383,7 @@ class StorageTests:
uid = str(uuid.uuid4()) uid = str(uuid.uuid4())
item = Item( item = Item(
textwrap.dedent( textwrap.dedent(
""" f"""
BEGIN:VCALENDAR BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
BEGIN:VEVENT BEGIN:VEVENT
@ -417,9 +417,7 @@ class StorageTests:
TRANSP:OPAQUE TRANSP:OPAQUE
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
""".format( """
uid=uid
)
).strip() ).strip()
) )

View file

@ -39,8 +39,8 @@ def test_xml_utilities():
def test_xml_specialchars(char): def test_xml_specialchars(char):
x = _parse_xml( x = _parse_xml(
'<?xml version="1.0" encoding="UTF-8" ?>' '<?xml version="1.0" encoding="UTF-8" ?>'
"<foo>ye{}s\r\n" f"<foo>ye{chr(char)}s\r\n"
"hello</foo>".format(chr(char)).encode("ascii") "hello</foo>".encode("ascii")
) )
if char in _BAD_XML_CHARS: if char in _BAD_XML_CHARS:

View file

@ -152,7 +152,7 @@ def test_discover_direct_path(tmpdir, runner):
def test_null_collection_with_named_collection(tmpdir, runner): def test_null_collection_with_named_collection(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -160,15 +160,13 @@ def test_null_collection_with_named_collection(tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{base}/foo/" path = "{str(tmpdir)}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "singlefile" type = "singlefile"
path = "{base}/bar.txt" path = "{str(tmpdir)}/bar.txt"
""".format( """
base=str(tmpdir)
)
) )
) )
@ -221,7 +219,7 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -229,14 +227,12 @@ def test_collection_required(a_requires, b_requires, tmpdir, runner, monkeypatch
[storage foo] [storage foo]
type = "test" type = "test"
require_collection = {a} require_collection = {json.dumps(a_requires)}
[storage bar] [storage bar]
type = "test" type = "test"
require_collection = {b} require_collection = {json.dumps(b_requires)}
""".format( """
a=json.dumps(a_requires), b=json.dumps(b_requires)
)
) )
) )

View file

@ -4,7 +4,7 @@ from textwrap import dedent
def test_get_password_from_command(tmpdir, runner): def test_get_password_from_command(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -12,16 +12,14 @@ def test_get_password_from_command(tmpdir, runner):
[storage foo] [storage foo]
type.fetch = ["shell", "echo filesystem"] type.fetch = ["shell", "echo filesystem"]
path = "{base}/foo/" path = "{str(tmpdir)}/foo/"
fileext.fetch = ["command", "echo", ".txt"] fileext.fetch = ["command", "echo", ".txt"]
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{str(tmpdir)}/bar/"
fileext.fetch = ["prompt", "Fileext for bar"] fileext.fetch = ["prompt", "Fileext for bar"]
""".format( """
base=str(tmpdir)
)
) )
) )

View file

@ -280,24 +280,22 @@ def test_multiple_pairs(tmpdir, runner):
def test_create_collections(collections, tmpdir, runner): def test_create_collections(collections, tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = {colls} collections = {json.dumps(list(collections))}
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{base}/foo/" path = "{str(tmpdir)}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{str(tmpdir)}/bar/"
fileext = ".txt" fileext = ".txt"
""".format( """
base=str(tmpdir), colls=json.dumps(list(collections))
)
) )
) )
@ -315,7 +313,7 @@ def test_create_collections(collections, tmpdir, runner):
def test_ident_conflict(tmpdir, runner): def test_ident_conflict(tmpdir, runner):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -323,16 +321,14 @@ def test_ident_conflict(tmpdir, runner):
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
path = "{base}/foo/" path = "{str(tmpdir)}/foo/"
fileext = ".txt" fileext = ".txt"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
path = "{base}/bar/" path = "{str(tmpdir)}/bar/"
fileext = ".txt" fileext = ".txt"
""".format( """
base=str(tmpdir)
)
) )
) )
@ -371,7 +367,7 @@ def test_ident_conflict(tmpdir, runner):
def test_unknown_storage(tmpdir, runner, existing, missing): def test_unknown_storage(tmpdir, runner, existing, missing):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
@ -379,11 +375,9 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
[storage {existing}] [storage {existing}]
type = "filesystem" type = "filesystem"
path = "{base}/{existing}/" path = "{str(tmpdir)}/{existing}/"
fileext = ".txt" fileext = ".txt"
""".format( """
base=str(tmpdir), existing=existing
)
) )
) )
@ -393,10 +387,8 @@ def test_unknown_storage(tmpdir, runner, existing, missing):
assert result.exception assert result.exception
assert ( assert (
"Storage '{missing}' not found. " f"Storage '{missing}' not found. "
"These are the configured storages: ['{existing}']".format( f"These are the configured storages: ['{existing}']"
missing=missing, existing=existing
)
) in result.output ) in result.output
@ -416,25 +408,23 @@ def test_no_configured_pairs(tmpdir, runner, cmd):
def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar): def test_conflict_resolution(tmpdir, runner, resolution, expect_foo, expect_bar):
runner.write_with_general( runner.write_with_general(
dedent( dedent(
""" f"""
[pair foobar] [pair foobar]
a = "foo" a = "foo"
b = "bar" b = "bar"
collections = null collections = null
conflict_resolution = {val} conflict_resolution = {json.dumps(resolution)}
[storage foo] [storage foo]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{base}/foo" path = "{str(tmpdir)}/foo"
[storage bar] [storage bar]
type = "filesystem" type = "filesystem"
fileext = ".txt" fileext = ".txt"
path = "{base}/bar" path = "{str(tmpdir)}/bar"
""".format( """
base=str(tmpdir), val=json.dumps(resolution)
)
) )
) )
@ -526,13 +516,11 @@ def test_fetch_only_necessary_params(tmpdir, runner):
fetch_script = tmpdir.join("fetch_script") fetch_script = tmpdir.join("fetch_script")
fetch_script.write( fetch_script.write(
dedent( dedent(
""" f"""
set -e set -e
touch "{}" touch "{str(fetched_file)}"
echo ".txt" echo ".txt"
""".format( """
str(fetched_file)
)
) )
) )

View file

@ -17,10 +17,10 @@ except ImportError: # pragma: no cover
) )
def _check_python_version(): # pragma: no cover def _check_python_version():
import sys import sys
if sys.version_info < (3, 7, 0): if sys.version_info < (3, 7, 0): # noqa: UP036
print("vdirsyncer requires at least Python 3.7.") print("vdirsyncer requires at least Python 3.7.")
sys.exit(1) sys.exit(1)

View file

@ -54,7 +54,7 @@ def app(ctx, config: str):
cli_logger.warning( cli_logger.warning(
"Vdirsyncer currently does not support Windows. " "Vdirsyncer currently does not support Windows. "
"You will likely encounter bugs. " "You will likely encounter bugs. "
"See {}/535 for more information.".format(BUGTRACKER_HOME) f"See {BUGTRACKER_HOME}/535 for more information."
) )
if not ctx.config: if not ctx.config:

View file

@ -27,9 +27,9 @@ def validate_section_name(name, section_type):
if invalid: if invalid:
chars_display = "".join(sorted(SECTION_NAME_CHARS)) chars_display = "".join(sorted(SECTION_NAME_CHARS))
raise exceptions.UserError( raise exceptions.UserError(
'The {}-section "{}" contains invalid characters. Only ' f'The {section_type}-section "{name}" contains invalid characters. Only '
"the following characters are allowed for storage and " "the following characters are allowed for storage and "
"pair names:\n{}".format(section_type, name, chars_display) f"pair names:\n{chars_display}"
) )
@ -51,7 +51,7 @@ def _validate_general_section(general_config: dict[str, str]):
if problems: if problems:
raise exceptions.UserError( raise exceptions.UserError(
"Invalid general section. Copy the example " "Invalid general section. Copy the example "
"config from the repository and edit it: {}".format(PROJECT_HOME), f"config from the repository and edit it: {PROJECT_HOME}",
problems=problems, problems=problems,
) )
@ -210,10 +210,8 @@ class Config:
args = self.storages[storage_name] args = self.storages[storage_name]
except KeyError: except KeyError:
raise exceptions.UserError( raise exceptions.UserError(
"Storage {!r} not found. " f"Storage {storage_name!r} not found. "
"These are the configured storages: {}".format( f"These are the configured storages: {list(self.storages)}"
storage_name, list(self.storages)
)
) )
else: else:
return expand_fetch_params(args) return expand_fetch_params(args)

View file

@ -66,12 +66,12 @@ async def collections_for_pair(
elif rv: elif rv:
raise exceptions.UserError( raise exceptions.UserError(
"Detected change in config file, " "Detected change in config file, "
"please run `vdirsyncer discover {}`.".format(pair.name) f"please run `vdirsyncer discover {pair.name}`."
) )
else: else:
raise exceptions.UserError( raise exceptions.UserError(
"Please run `vdirsyncer discover {}` " f"Please run `vdirsyncer discover {pair.name}` "
" before synchronization.".format(pair.name) " before synchronization."
) )
logger.info(f"Discovering collections for pair {pair.name}") logger.info(f"Discovering collections for pair {pair.name}")
@ -271,8 +271,8 @@ async def _print_collections(
logger.debug("".join(traceback.format_tb(sys.exc_info()[2]))) logger.debug("".join(traceback.format_tb(sys.exc_info()[2])))
logger.warning( logger.warning(
"Failed to discover collections for {}, use `-vdebug` " f"Failed to discover collections for {instance_name}, use `-vdebug` "
"to see the full traceback.".format(instance_name) "to see the full traceback."
) )
return return
logger.info(f"{instance_name}:") logger.info(f"{instance_name}:")

View file

@ -65,8 +65,7 @@ def _fetch_value(opts, key):
else: else:
if not rv: if not rv:
raise exceptions.UserError( raise exceptions.UserError(
"Empty value for {}, this most likely " f"Empty value for {key}, this most likely indicates an error."
"indicates an error.".format(key)
) )
password_cache[cache_key] = rv password_cache[cache_key] = rv
return rv return rv

View file

@ -88,23 +88,19 @@ def handle_cli_error(status_name=None, e=None):
) )
except PartialSync as e: except PartialSync as e:
cli_logger.error( cli_logger.error(
"{status_name}: Attempted change on {storage}, which is read-only" f"{status_name}: Attempted change on {e.storage}, which is read-only"
". Set `partial_sync` in your pair section to `ignore` to ignore " ". Set `partial_sync` in your pair section to `ignore` to ignore "
"those changes, or `revert` to revert them on the other side.".format( "those changes, or `revert` to revert them on the other side."
status_name=status_name, storage=e.storage
)
) )
except SyncConflict as e: except SyncConflict as e:
cli_logger.error( cli_logger.error(
"{status_name}: One item changed on both sides. Resolve this " f"{status_name}: One item changed on both sides. Resolve this "
"conflict manually, or by setting the `conflict_resolution` " "conflict manually, or by setting the `conflict_resolution` "
"parameter in your config file.\n" "parameter in your config file.\n"
"See also {docs}/config.html#pair-section\n" f"See also {DOCS_HOME}/config.html#pair-section\n"
"Item ID: {e.ident}\n" f"Item ID: {e.ident}\n"
"Item href on side A: {e.href_a}\n" f"Item href on side A: {e.href_a}\n"
"Item href on side B: {e.href_b}\n".format( f"Item href on side B: {e.href_b}\n"
status_name=status_name, e=e, docs=DOCS_HOME
)
) )
except IdentConflict as e: except IdentConflict as e:
cli_logger.error( cli_logger.error(
@ -125,17 +121,17 @@ def handle_cli_error(status_name=None, e=None):
pass pass
except exceptions.PairNotFound as e: except exceptions.PairNotFound as e:
cli_logger.error( cli_logger.error(
"Pair {pair_name} does not exist. Please check your " f"Pair {e.pair_name} does not exist. Please check your "
"configuration file and make sure you've typed the pair name " "configuration file and make sure you've typed the pair name "
"correctly".format(pair_name=e.pair_name) "correctly"
) )
except exceptions.InvalidResponse as e: except exceptions.InvalidResponse as e:
cli_logger.error( cli_logger.error(
"The server returned something vdirsyncer doesn't understand. " "The server returned something vdirsyncer doesn't understand. "
"Error message: {!r}\n" f"Error message: {e!r}\n"
"While this is most likely a serverside problem, the vdirsyncer " "While this is most likely a serverside problem, the vdirsyncer "
"devs are generally interested in such bugs. Please report it in " "devs are generally interested in such bugs. Please report it in "
"the issue tracker at {}".format(e, BUGTRACKER_HOME) f"the issue tracker at {BUGTRACKER_HOME}"
) )
except exceptions.CollectionRequired: except exceptions.CollectionRequired:
cli_logger.error( cli_logger.error(
@ -367,7 +363,7 @@ async def handle_collection_not_found(config, collection, e=None):
cli_logger.error(e) cli_logger.error(e)
raise exceptions.UserError( raise exceptions.UserError(
'Unable to find or create collection "{collection}" for ' f'Unable to find or create collection "{collection}" for '
'storage "{storage}". Please create the collection ' f'storage "{storage_name}". Please create the collection '
"yourself.".format(collection=collection, storage=storage_name) "yourself."
) )

View file

@ -57,8 +57,7 @@ def prepare_auth(auth, username, password):
raise exceptions.UserError(f"Unknown authentication method: {auth}") raise exceptions.UserError(f"Unknown authentication method: {auth}")
elif auth: elif auth:
raise exceptions.UserError( raise exceptions.UserError(
"You need to specify username and password " f"You need to specify username and password for {auth} authentication."
"for {} authentication.".format(auth)
) )
return None return None

View file

@ -24,9 +24,9 @@ async def repair_storage(storage, repair_unsafe_uid):
new_item = repair_item(href, item, seen_uids, repair_unsafe_uid) new_item = repair_item(href, item, seen_uids, repair_unsafe_uid)
except IrreparableItem: except IrreparableItem:
logger.error( logger.error(
"Item {!r} is malformed beyond repair. " f"Item {href!r} is malformed beyond repair. "
"The PRODID property may indicate which software " "The PRODID property may indicate which software "
"created this item.".format(href) "created this item."
) )
logger.error(f"Item content: {item.raw!r}") logger.error(f"Item content: {item.raw!r}")
continue continue

View file

@ -92,8 +92,7 @@ def _parse_xml(content):
return etree.XML(_clean_body(content)) return etree.XML(_clean_body(content))
except etree.ParseError as e: except etree.ParseError as e:
raise InvalidXMLResponse( raise InvalidXMLResponse(
"Invalid XML encountered: {}\n" f"Invalid XML encountered: {e}\nDouble-check the URLs in your config."
"Double-check the URLs in your config.".format(e)
) )

View file

@ -106,8 +106,8 @@ class GoogleSession(dav.DAVSession):
pass pass
except ValueError as e: except ValueError as e:
raise exceptions.UserError( raise exceptions.UserError(
"Failed to load token file {}, try deleting it. " f"Failed to load token file {self._token_file}, try deleting it. "
"Original error: {}".format(self._token_file, e) f"Original error: {e}"
) )
if not self._token: if not self._token:

View file

@ -177,11 +177,9 @@ class SingleFileStorage(Storage):
self.path self.path
): ):
raise exceptions.PreconditionFailed( raise exceptions.PreconditionFailed(
( f"Some other program modified the file {self.path!r}. Re-run the "
"Some other program modified the file {!r}. Re-run the " "synchronization and make sure absolutely no other program is "
"synchronization and make sure absolutely no other program is " "writing into the same file."
"writing into the same file."
).format(self.path)
) )
text = join_collection(item.raw for item, etag in self._items.values()) text = join_collection(item.raw for item, etag in self._items.values())
try: try:

View file

@ -247,10 +247,10 @@ class SqliteStatus(_StatusBase):
def _get_impl(self, ident, side, table): def _get_impl(self, ident, side, table):
res = self._c.execute( res = self._c.execute(
"SELECT href_{side} AS href," f"SELECT href_{side} AS href,"
" hash_{side} AS hash," f" hash_{side} AS hash,"
" etag_{side} AS etag " f" etag_{side} AS etag "
"FROM {table} WHERE ident=?".format(side=side, table=table), f"FROM {table} WHERE ident=?",
(ident,), (ident,),
).fetchone() ).fetchone()
if res is None: if res is None:
@ -304,8 +304,8 @@ class SqliteStatus(_StatusBase):
def _get_by_href_impl(self, href, default=(None, None), side=None): def _get_by_href_impl(self, href, default=(None, None), side=None):
res = self._c.execute( res = self._c.execute(
"SELECT ident, hash_{side} AS hash, etag_{side} AS etag " f"SELECT ident, hash_{side} AS hash, etag_{side} AS etag "
"FROM status WHERE href_{side}=?".format(side=side), f"FROM status WHERE href_{side}=?",
(href,), (href,),
).fetchone() ).fetchone()
if not res: if not res: