Refactor: More f-string for tools
This commit is contained in:
parent
9f250fdf26
commit
a22f37b919
14
.github/workflows/ci.py
vendored
14
.github/workflows/ci.py
vendored
@ -48,7 +48,7 @@ def install_deps():
|
|||||||
|
|
||||||
def build_kitty():
|
def build_kitty():
|
||||||
python = shutil.which('python3') if is_bundle else sys.executable
|
python = shutil.which('python3') if is_bundle else sys.executable
|
||||||
cmd = '{} setup.py build --verbose'.format(python)
|
cmd = f'{python} setup.py build --verbose'
|
||||||
if os.environ.get('KITTY_SANITIZE') == '1':
|
if os.environ.get('KITTY_SANITIZE') == '1':
|
||||||
cmd += ' --debug --sanitize'
|
cmd += ' --debug --sanitize'
|
||||||
run(cmd)
|
run(cmd)
|
||||||
@ -59,8 +59,8 @@ def test_kitty():
|
|||||||
|
|
||||||
|
|
||||||
def package_kitty():
|
def package_kitty():
|
||||||
py = 'python3' if is_macos else 'python'
|
python = 'python3' if is_macos else 'python'
|
||||||
run(py + ' setup.py linux-package --update-check-interval=0 --verbose')
|
run(f'{python} setup.py linux-package --update-check-interval=0 --verbose')
|
||||||
if is_macos:
|
if is_macos:
|
||||||
run('python3 setup.py kitty.app --update-check-interval=0 --verbose')
|
run('python3 setup.py kitty.app --update-check-interval=0 --verbose')
|
||||||
run('kitty.app/Contents/MacOS/kitty +runpy "from kitty.constants import *; print(kitty_exe())"')
|
run('kitty.app/Contents/MacOS/kitty +runpy "from kitty.constants import *; print(kitty_exe())"')
|
||||||
@ -76,11 +76,11 @@ def replace_in_file(path, src, dest):
|
|||||||
def setup_bundle_env():
|
def setup_bundle_env():
|
||||||
global SW
|
global SW
|
||||||
os.environ['SW'] = SW = '/Users/Shared/kitty-build/sw/sw' if is_macos else os.path.join(os.environ['GITHUB_WORKSPACE'], 'sw')
|
os.environ['SW'] = SW = '/Users/Shared/kitty-build/sw/sw' if is_macos else os.path.join(os.environ['GITHUB_WORKSPACE'], 'sw')
|
||||||
os.environ['PKG_CONFIG_PATH'] = SW + '/lib/pkgconfig'
|
os.environ['PKG_CONFIG_PATH'] = os.path.join(SW, 'lib', 'pkgconfig')
|
||||||
if is_macos:
|
if is_macos:
|
||||||
os.environ['PATH'] = '{}:{}'.format('/usr/local/opt/sphinx-doc/bin', os.environ['PATH'])
|
os.environ['PATH'] = '{}:{}'.format('/usr/local/opt/sphinx-doc/bin', os.environ['PATH'])
|
||||||
else:
|
else:
|
||||||
os.environ['LD_LIBRARY_PATH'] = SW + '/lib'
|
os.environ['LD_LIBRARY_PATH'] = os.path.join(SW, 'lib')
|
||||||
os.environ['PYTHONHOME'] = SW
|
os.environ['PYTHONHOME'] = SW
|
||||||
os.environ['PATH'] = '{}:{}'.format(os.path.join(SW, 'bin'), os.environ['PATH'])
|
os.environ['PATH'] = '{}:{}'.format(os.path.join(SW, 'bin'), os.environ['PATH'])
|
||||||
|
|
||||||
@ -111,7 +111,7 @@ def main():
|
|||||||
setup_bundle_env()
|
setup_bundle_env()
|
||||||
else:
|
else:
|
||||||
if not is_macos and 'pythonLocation' in os.environ:
|
if not is_macos and 'pythonLocation' in os.environ:
|
||||||
os.environ['LD_LIBRARY_PATH'] = '{}/lib'.format(os.environ['pythonLocation'])
|
os.environ['LD_LIBRARY_PATH'] = os.path.join(os.environ['pythonLocation'], 'lib')
|
||||||
action = sys.argv[-1]
|
action = sys.argv[-1]
|
||||||
if action in ('build', 'package'):
|
if action in ('build', 'package'):
|
||||||
install_deps()
|
install_deps()
|
||||||
@ -122,7 +122,7 @@ def main():
|
|||||||
elif action == 'test':
|
elif action == 'test':
|
||||||
test_kitty()
|
test_kitty()
|
||||||
else:
|
else:
|
||||||
raise SystemExit('Unknown action: ' + action)
|
raise SystemExit(f'Unknown action: {action}')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|||||||
@ -13,7 +13,7 @@ import tempfile
|
|||||||
|
|
||||||
def compile_terminfo(base):
|
def compile_terminfo(base):
|
||||||
with tempfile.TemporaryDirectory() as tdir:
|
with tempfile.TemporaryDirectory() as tdir:
|
||||||
proc = subprocess.run(['tic', '-x', '-o' + tdir, 'terminfo/kitty.terminfo'], check=True, stderr=subprocess.PIPE)
|
proc = subprocess.run(['tic', '-x', f'-o{tdir}', 'terminfo/kitty.terminfo'], check=True, stderr=subprocess.PIPE)
|
||||||
regex = '^"terminfo/kitty.terminfo", line [0-9]+, col [0-9]+, terminal \'xterm-kitty\': older tic versions may treat the description field as an alias$'
|
regex = '^"terminfo/kitty.terminfo", line [0-9]+, col [0-9]+, terminal \'xterm-kitty\': older tic versions may treat the description field as an alias$'
|
||||||
for error in proc.stderr.decode('utf-8').splitlines():
|
for error in proc.stderr.decode('utf-8').splitlines():
|
||||||
if not re.match(regex, error):
|
if not re.match(regex, error):
|
||||||
|
|||||||
16
docs/conf.py
16
docs/conf.py
@ -217,7 +217,8 @@ if you specify a program-to-run you can use the special placeholder
|
|||||||
from kitty.remote_control import cli_msg, global_options_spec
|
from kitty.remote_control import cli_msg, global_options_spec
|
||||||
with open('generated/cli-kitty-at.rst', 'w') as f:
|
with open('generated/cli-kitty-at.rst', 'w') as f:
|
||||||
p = partial(print, file=f)
|
p = partial(print, file=f)
|
||||||
p('kitty @\n' + '-' * 80)
|
p('kitty @')
|
||||||
|
p('-' * 80)
|
||||||
p('.. program::', 'kitty @')
|
p('.. program::', 'kitty @')
|
||||||
p('\n\n' + as_rst(
|
p('\n\n' + as_rst(
|
||||||
global_options_spec, message=cli_msg, usage='command ...', appname='kitty @'))
|
global_options_spec, message=cli_msg, usage='command ...', appname='kitty @'))
|
||||||
@ -225,7 +226,8 @@ if you specify a program-to-run you can use the special placeholder
|
|||||||
for cmd_name in sorted(all_command_names()):
|
for cmd_name in sorted(all_command_names()):
|
||||||
func = command_for_name(cmd_name)
|
func = command_for_name(cmd_name)
|
||||||
p(f'.. _at_{func.name}:\n')
|
p(f'.. _at_{func.name}:\n')
|
||||||
p('kitty @', func.name + '\n' + '-' * 120)
|
p('kitty @', func.name)
|
||||||
|
p('-' * 120)
|
||||||
p('.. program::', 'kitty @', func.name)
|
p('.. program::', 'kitty @', func.name)
|
||||||
p('\n\n' + as_rst(*cli_params_for(func)))
|
p('\n\n' + as_rst(*cli_params_for(func)))
|
||||||
from kittens.runner import get_kitten_cli_docs
|
from kittens.runner import get_kitten_cli_docs
|
||||||
@ -234,12 +236,12 @@ if you specify a program-to-run you can use the special placeholder
|
|||||||
if data:
|
if data:
|
||||||
with open(f'generated/cli-kitten-{kitten}.rst', 'w') as f:
|
with open(f'generated/cli-kitten-{kitten}.rst', 'w') as f:
|
||||||
p = partial(print, file=f)
|
p = partial(print, file=f)
|
||||||
p('.. program::', f'kitty +kitten {kitten}')
|
p('.. program::', 'kitty +kitten', kitten)
|
||||||
p(f'\nSource code for {kitten}')
|
p('\nSource code for', kitten)
|
||||||
p('-' * 72)
|
p('-' * 72)
|
||||||
p(f'\nThe source code for this kitten is `available on GitHub <https://github.com/kovidgoyal/kitty/tree/master/kittens/{kitten}>`_.')
|
p(f'\nThe source code for this kitten is `available on GitHub <https://github.com/kovidgoyal/kitty/tree/master/kittens/{kitten}>`_.')
|
||||||
p('\nCommand Line Interface')
|
p('\nCommand Line Interface')
|
||||||
p('-' * 72, file=f)
|
p('-' * 72)
|
||||||
p('\n\n' + option_spec_as_rst(
|
p('\n\n' + option_spec_as_rst(
|
||||||
data['options'], message=data['help_text'], usage=data['usage'], appname=f'kitty +kitten {kitten}',
|
data['options'], message=data['help_text'], usage=data['usage'], appname=f'kitty +kitten {kitten}',
|
||||||
heading_char='^'))
|
heading_char='^'))
|
||||||
@ -370,8 +372,8 @@ def expand_opt_references(conf_name: str, text: str) -> str:
|
|||||||
def expand(m: Match[str]) -> str:
|
def expand(m: Match[str]) -> str:
|
||||||
ref = m.group(1)
|
ref = m.group(1)
|
||||||
if '<' not in ref and '.' not in ref:
|
if '<' not in ref and '.' not in ref:
|
||||||
full_ref = conf_name + ref
|
# full ref
|
||||||
return f':opt:`{ref} <{full_ref}>`'
|
return f':opt:`{ref} <{conf_name}{ref}>`'
|
||||||
return str(m.group())
|
return str(m.group())
|
||||||
|
|
||||||
return re.sub(r':opt:`(.+?)`', expand, text)
|
return re.sub(r':opt:`(.+?)`', expand, text)
|
||||||
|
|||||||
@ -67,7 +67,7 @@ def cmd_for_report(report_name: str, keymap: KeymapType, type_map: Dict[str, Any
|
|||||||
flag_fmt, flag_attrs = [], []
|
flag_fmt, flag_attrs = [], []
|
||||||
cv = {'flag': 'c', 'int': 'i', 'uint': 'I'}[atype]
|
cv = {'flag': 'c', 'int': 'i', 'uint': 'I'}[atype]
|
||||||
for ch in type_map[atype]:
|
for ch in type_map[atype]:
|
||||||
flag_fmt.append('s' + cv)
|
flag_fmt.append(f's{cv}')
|
||||||
attr = keymap[ch][0]
|
attr = keymap[ch][0]
|
||||||
flag_attrs.append(f'"{attr}", {conv}g.{attr}')
|
flag_attrs.append(f'"{attr}", {conv}g.{attr}')
|
||||||
return ' '.join(flag_fmt), ', '.join(flag_attrs)
|
return ' '.join(flag_fmt), ', '.join(flag_attrs)
|
||||||
|
|||||||
@ -238,7 +238,7 @@ def patch_file(path: str, what: str, text: str, start_marker: str = '/* ', end_m
|
|||||||
end = raw.index(end_q)
|
end = raw.index(end_q)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise SystemExit(f'Failed to find "{end_q}" in {path}')
|
raise SystemExit(f'Failed to find "{end_q}" in {path}')
|
||||||
raw = raw[:start] + start_q + '\n' + text + '\n' + raw[end:]
|
raw = f'{raw[:start]}{start_q}\n{text}\n{raw[end:]}'
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
f.truncate(0)
|
f.truncate(0)
|
||||||
f.write(raw)
|
f.write(raw)
|
||||||
@ -368,7 +368,7 @@ def generate_ctrl_mapping() -> None:
|
|||||||
val = str(ctrl_mapping[k])
|
val = str(ctrl_mapping[k])
|
||||||
items.append(val)
|
items.append(val)
|
||||||
if k in "\\'":
|
if k in "\\'":
|
||||||
k = '\\' + k
|
k = f'\\{k}'
|
||||||
mi.append(f" case '{k}': return {val};")
|
mi.append(f" case '{k}': return {val};")
|
||||||
|
|
||||||
for line_items in chunks(items, 6):
|
for line_items in chunks(items, 6):
|
||||||
|
|||||||
35
publish.py
35
publish.py
@ -40,7 +40,7 @@ def echo_cmd(cmd: Iterable[str]) -> None:
|
|||||||
isatty = sys.stdout.isatty()
|
isatty = sys.stdout.isatty()
|
||||||
end = '\n'
|
end = '\n'
|
||||||
if isatty:
|
if isatty:
|
||||||
end = '\x1b[m' + end
|
end = f'\x1b[m{end}'
|
||||||
print('\x1b[92m', end='')
|
print('\x1b[92m', end='')
|
||||||
print(shlex.join(cmd), end=end, flush=True)
|
print(shlex.join(cmd), end=end, flush=True)
|
||||||
|
|
||||||
@ -146,11 +146,11 @@ def run_website(args: Any) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def sign_file(path: str) -> None:
|
def sign_file(path: str) -> None:
|
||||||
dest = path + '.sig'
|
dest = f'{path}.sig'
|
||||||
with suppress(FileNotFoundError):
|
with suppress(FileNotFoundError):
|
||||||
os.remove(dest)
|
os.remove(dest)
|
||||||
subprocess.check_call([
|
subprocess.check_call([
|
||||||
os.environ['PENV'] + '/gpg-as-kovid', '--output', path + '.sig',
|
os.environ['PENV'] + '/gpg-as-kovid', '--output', f'{path}.sig',
|
||||||
'--detach-sig', path
|
'--detach-sig', path
|
||||||
])
|
])
|
||||||
|
|
||||||
@ -159,7 +159,7 @@ def run_sdist(args: Any) -> None:
|
|||||||
with tempfile.TemporaryDirectory() as tdir:
|
with tempfile.TemporaryDirectory() as tdir:
|
||||||
base = os.path.join(tdir, f'kitty-{version}')
|
base = os.path.join(tdir, f'kitty-{version}')
|
||||||
os.mkdir(base)
|
os.mkdir(base)
|
||||||
subprocess.check_call('git archive HEAD | tar -x -C ' + base, shell=True)
|
subprocess.check_call(f'git archive HEAD | tar -x -C {base}', shell=True)
|
||||||
dest = os.path.join(base, 'docs', '_build')
|
dest = os.path.join(base, 'docs', '_build')
|
||||||
os.mkdir(dest)
|
os.mkdir(dest)
|
||||||
for x in 'html man'.split():
|
for x in 'html man'.split():
|
||||||
@ -167,9 +167,9 @@ def run_sdist(args: Any) -> None:
|
|||||||
dest = os.path.abspath(os.path.join('build', f'kitty-{version}.tar'))
|
dest = os.path.abspath(os.path.join('build', f'kitty-{version}.tar'))
|
||||||
subprocess.check_call(['tar', '-cf', dest, os.path.basename(base)], cwd=tdir)
|
subprocess.check_call(['tar', '-cf', dest, os.path.basename(base)], cwd=tdir)
|
||||||
with suppress(FileNotFoundError):
|
with suppress(FileNotFoundError):
|
||||||
os.remove(dest + '.xz')
|
os.remove(f'{dest}.xz')
|
||||||
subprocess.check_call(['xz', '-9', dest])
|
subprocess.check_call(['xz', '-9', dest])
|
||||||
sign_file(dest + '.xz')
|
sign_file(f'{dest}.xz')
|
||||||
|
|
||||||
|
|
||||||
class ReadFileWithProgressReporting(io.FileIO): # {{{
|
class ReadFileWithProgressReporting(io.FileIO): # {{{
|
||||||
@ -231,7 +231,7 @@ class Base: # {{{
|
|||||||
|
|
||||||
class GitHub(Base): # {{{
|
class GitHub(Base): # {{{
|
||||||
|
|
||||||
API = 'https://api.github.com/'
|
API = 'https://api.github.com'
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -244,12 +244,12 @@ class GitHub(Base): # {{{
|
|||||||
):
|
):
|
||||||
self.files, self.reponame, self.version, self.username, self.password, self.replace = (
|
self.files, self.reponame, self.version, self.username, self.password, self.replace = (
|
||||||
files, reponame, version, username, password, replace)
|
files, reponame, version, username, password, replace)
|
||||||
self.current_tag_name = self.version if self.version == 'nightly' else ('v' + self.version)
|
self.current_tag_name = self.version if self.version == 'nightly' else f'v{self.version}'
|
||||||
self.is_nightly = self.current_tag_name == 'nightly'
|
self.is_nightly = self.current_tag_name == 'nightly'
|
||||||
self.requests = s = requests.Session()
|
self.requests = s = requests.Session()
|
||||||
s.auth = (self.username, self.password)
|
s.auth = (self.username, self.password)
|
||||||
s.headers.update({'Accept': 'application/vnd.github.v3+json'})
|
s.headers.update({'Accept': 'application/vnd.github.v3+json'})
|
||||||
self.url_base = f'{self.API}repos/{self.username}/{self.reponame}/releases/'
|
self.url_base = f'{self.API}/repos/{self.username}/{self.reponame}/releases'
|
||||||
|
|
||||||
def patch(self, url: str, fail_msg: str, **data: Any) -> None:
|
def patch(self, url: str, fail_msg: str, **data: Any) -> None:
|
||||||
rdata = json.dumps(data)
|
rdata = json.dumps(data)
|
||||||
@ -262,7 +262,7 @@ class GitHub(Base): # {{{
|
|||||||
self.fail(r, fail_msg)
|
self.fail(r, fail_msg)
|
||||||
|
|
||||||
def update_nightly_description(self, release_id: int) -> None:
|
def update_nightly_description(self, release_id: int) -> None:
|
||||||
url = self.url_base + str(release_id)
|
url = f'{self.url_base}/{release_id}'
|
||||||
now = str(datetime.datetime.utcnow()).split('.')[0] + ' UTC'
|
now = str(datetime.datetime.utcnow()).split('.')[0] + ' UTC'
|
||||||
with open('.git/refs/heads/master') as f:
|
with open('.git/refs/heads/master') as f:
|
||||||
commit = f.read().strip()
|
commit = f.read().strip()
|
||||||
@ -276,7 +276,7 @@ class GitHub(Base): # {{{
|
|||||||
# self.clean_older_releases(releases)
|
# self.clean_older_releases(releases)
|
||||||
release = self.create_release()
|
release = self.create_release()
|
||||||
upload_url = release['upload_url'].partition('{')[0]
|
upload_url = release['upload_url'].partition('{')[0]
|
||||||
asset_url = self.url_base + 'assets/{}'
|
asset_url = f'{self.url_base}/assets/{{}}'
|
||||||
existing_assets = self.existing_assets(release['id'])
|
existing_assets = self.existing_assets(release['id'])
|
||||||
if self.is_nightly:
|
if self.is_nightly:
|
||||||
for fname in existing_assets:
|
for fname in existing_assets:
|
||||||
@ -308,7 +308,7 @@ class GitHub(Base): # {{{
|
|||||||
self.info(f'\nDeleting old released installers from: {release["tag_name"]}')
|
self.info(f'\nDeleting old released installers from: {release["tag_name"]}')
|
||||||
for asset in release['assets']:
|
for asset in release['assets']:
|
||||||
r = self.requests.delete(
|
r = self.requests.delete(
|
||||||
f'{self.API}repos/{self.username}/{self.reponame}/releases/assets/{asset["id"]}')
|
f'{self.url_base}/assets/{asset["id"]}')
|
||||||
if r.status_code != 204:
|
if r.status_code != 204:
|
||||||
self.fail(r, f'Failed to delete obsolete asset: {asset["name"]} for release: {release["tag_name"]}')
|
self.fail(r, f'Failed to delete obsolete asset: {asset["name"]} for release: {release["tag_name"]}')
|
||||||
|
|
||||||
@ -336,7 +336,7 @@ class GitHub(Base): # {{{
|
|||||||
return bool(error_code == 'already_exists')
|
return bool(error_code == 'already_exists')
|
||||||
|
|
||||||
def existing_assets(self, release_id: str) -> Dict[str, str]:
|
def existing_assets(self, release_id: str) -> Dict[str, str]:
|
||||||
url = f'{self.API}repos/{self.username}/{self.reponame}/releases/{release_id}/assets'
|
url = f'{self.url_base}/{release_id}/assets'
|
||||||
r = self.requests.get(url)
|
r = self.requests.get(url)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
self.fail(r, 'Failed to get assets for release')
|
self.fail(r, 'Failed to get assets for release')
|
||||||
@ -345,15 +345,14 @@ class GitHub(Base): # {{{
|
|||||||
def create_release(self) -> Dict[str, Any]:
|
def create_release(self) -> Dict[str, Any]:
|
||||||
' Create a release on GitHub or if it already exists, return the existing release '
|
' Create a release on GitHub or if it already exists, return the existing release '
|
||||||
# Check for existing release
|
# Check for existing release
|
||||||
url = f'{self.API}repos/{self.username}/{self.reponame}/releases/tags/{self.current_tag_name}'
|
url = f'{self.url_base}/tags/{self.current_tag_name}'
|
||||||
r = self.requests.get(url)
|
r = self.requests.get(url)
|
||||||
if r.status_code == 200:
|
if r.status_code == 200:
|
||||||
return dict(r.json())
|
return dict(r.json())
|
||||||
if self.is_nightly:
|
if self.is_nightly:
|
||||||
raise SystemExit('No existing nightly release found on GitHub')
|
raise SystemExit('No existing nightly release found on GitHub')
|
||||||
url = f'{self.API}repos/{self.username}/{self.reponame}/releases'
|
|
||||||
r = self.requests.post(
|
r = self.requests.post(
|
||||||
url,
|
self.url_base,
|
||||||
data=json.dumps({
|
data=json.dumps({
|
||||||
'tag_name': self.current_tag_name,
|
'tag_name': self.current_tag_name,
|
||||||
'target_commitish': 'master',
|
'target_commitish': 'master',
|
||||||
@ -394,7 +393,7 @@ def files_for_upload() -> Dict[str, str]:
|
|||||||
files[f'build/kitty-{version}.tar.xz.sig'] = 'Source code GPG signature'
|
files[f'build/kitty-{version}.tar.xz.sig'] = 'Source code GPG signature'
|
||||||
for path, desc in signatures.items():
|
for path, desc in signatures.items():
|
||||||
sign_file(path)
|
sign_file(path)
|
||||||
files[path + '.sig'] = desc
|
files[f'{path}.sig'] = desc
|
||||||
for f in files:
|
for f in files:
|
||||||
if not os.path.exists(f):
|
if not os.path.exists(f):
|
||||||
raise SystemExit(f'The release artifact {f} does not exist')
|
raise SystemExit(f'The release artifact {f} does not exist')
|
||||||
@ -460,7 +459,7 @@ def exec_actions(actions: Iterable[str], args: Any) -> None:
|
|||||||
for action in actions:
|
for action in actions:
|
||||||
print('Running', action)
|
print('Running', action)
|
||||||
cwd = os.getcwd()
|
cwd = os.getcwd()
|
||||||
globals()['run_' + action](args)
|
globals()[f'run_{action}'](args)
|
||||||
os.chdir(cwd)
|
os.chdir(cwd)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
4
setup.py
4
setup.py
@ -710,7 +710,7 @@ def compile_c_extension(
|
|||||||
def on_success() -> None:
|
def on_success() -> None:
|
||||||
os.rename(dest, real_dest)
|
os.rename(dest, real_dest)
|
||||||
|
|
||||||
compilation_database.add_command(desc, cmd, partial(newer, real_dest, *objects), on_success=on_success, key=CompileKey('', module + '.so'))
|
compilation_database.add_command(desc, cmd, partial(newer, real_dest, *objects), on_success=on_success, key=CompileKey('', f'{module}.so'))
|
||||||
|
|
||||||
|
|
||||||
def find_c_files() -> Tuple[List[str], List[str]]:
|
def find_c_files() -> Tuple[List[str], List[str]]:
|
||||||
@ -1112,7 +1112,7 @@ def create_macos_app_icon(where: str = 'Resources') -> None:
|
|||||||
'iconutil', '-c', 'icns', iconset_dir, '-o', icns_dir
|
'iconutil', '-c', 'icns', iconset_dir, '-o', icns_dir
|
||||||
])
|
])
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
print(error('iconutil not found') + ', using png2icns (without retina support) to convert the logo', file=sys.stderr)
|
print(f'{error("iconutil not found")}, using png2icns (without retina support) to convert the logo', file=sys.stderr)
|
||||||
subprocess.check_call([
|
subprocess.check_call([
|
||||||
'png2icns', icns_dir
|
'png2icns', icns_dir
|
||||||
] + [os.path.join(iconset_dir, logo) for logo in [
|
] + [os.path.join(iconset_dir, logo) for logo in [
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user