All defs are now typed
This commit is contained in:
parent
d3f37eeba4
commit
5956277863
@ -4,12 +4,12 @@
|
|||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import DefaultDict, Dict, FrozenSet, List, Tuple, Union
|
from typing import Any, DefaultDict, Dict, FrozenSet, List, Tuple, Union
|
||||||
|
|
||||||
KeymapType = Dict[str, Tuple[str, Union[FrozenSet[str], str]]]
|
KeymapType = Dict[str, Tuple[str, Union[FrozenSet[str], str]]]
|
||||||
|
|
||||||
|
|
||||||
def resolve_keys(keymap: KeymapType):
|
def resolve_keys(keymap: KeymapType) -> DefaultDict[str, List[str]]:
|
||||||
ans: DefaultDict[str, List[str]] = defaultdict(list)
|
ans: DefaultDict[str, List[str]] = defaultdict(list)
|
||||||
for ch, (attr, atype) in keymap.items():
|
for ch, (attr, atype) in keymap.items():
|
||||||
if isinstance(atype, str) and atype in ('int', 'uint'):
|
if isinstance(atype, str) and atype in ('int', 'uint'):
|
||||||
@ -20,7 +20,7 @@ def resolve_keys(keymap: KeymapType):
|
|||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
|
||||||
def enum(keymap: KeymapType):
|
def enum(keymap: KeymapType) -> str:
|
||||||
lines = []
|
lines = []
|
||||||
for ch, (attr, atype) in keymap.items():
|
for ch, (attr, atype) in keymap.items():
|
||||||
lines.append(f"{attr}='{ch}'")
|
lines.append(f"{attr}='{ch}'")
|
||||||
@ -31,7 +31,7 @@ def enum(keymap: KeymapType):
|
|||||||
'''.format(',\n'.join(lines))
|
'''.format(',\n'.join(lines))
|
||||||
|
|
||||||
|
|
||||||
def parse_key(keymap: KeymapType):
|
def parse_key(keymap: KeymapType) -> str:
|
||||||
lines = []
|
lines = []
|
||||||
for attr, atype in keymap.values():
|
for attr, atype in keymap.values():
|
||||||
vs = atype.upper() if isinstance(atype, str) and atype in ('uint', 'int') else 'FLAG'
|
vs = atype.upper() if isinstance(atype, str) and atype in ('uint', 'int') else 'FLAG'
|
||||||
@ -39,7 +39,7 @@ def parse_key(keymap: KeymapType):
|
|||||||
return ' \n'.join(lines)
|
return ' \n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
def parse_flag(keymap: KeymapType, type_map, command_class):
|
def parse_flag(keymap: KeymapType, type_map: Dict[str, Any], command_class: str) -> str:
|
||||||
lines = []
|
lines = []
|
||||||
for ch in type_map['flag']:
|
for ch in type_map['flag']:
|
||||||
attr, allowed_values = keymap[ch]
|
attr, allowed_values = keymap[ch]
|
||||||
@ -57,14 +57,14 @@ def parse_flag(keymap: KeymapType, type_map, command_class):
|
|||||||
return ' \n'.join(lines)
|
return ' \n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
def parse_number(keymap: KeymapType):
|
def parse_number(keymap: KeymapType) -> Tuple[str, str]:
|
||||||
int_keys = [f'I({attr})' for attr, atype in keymap.values() if atype == 'int']
|
int_keys = [f'I({attr})' for attr, atype in keymap.values() if atype == 'int']
|
||||||
uint_keys = [f'U({attr})' for attr, atype in keymap.values() if atype == 'uint']
|
uint_keys = [f'U({attr})' for attr, atype in keymap.values() if atype == 'uint']
|
||||||
return '; '.join(int_keys), '; '.join(uint_keys)
|
return '; '.join(int_keys), '; '.join(uint_keys)
|
||||||
|
|
||||||
|
|
||||||
def cmd_for_report(report_name, keymap: KeymapType, type_map, payload_allowed):
|
def cmd_for_report(report_name: str, keymap: KeymapType, type_map: Dict[str, Any], payload_allowed: bool) -> str:
|
||||||
def group(atype, conv):
|
def group(atype: str, conv: str) -> Tuple[str, str]:
|
||||||
flag_fmt, flag_attrs = [], []
|
flag_fmt, flag_attrs = [], []
|
||||||
cv = {'flag': 'c', 'int': 'i', 'uint': 'I'}[atype]
|
cv = {'flag': 'c', 'int': 'i', 'uint': 'I'}[atype]
|
||||||
for ch in type_map[atype]:
|
for ch in type_map[atype]:
|
||||||
@ -89,7 +89,15 @@ def cmd_for_report(report_name, keymap: KeymapType, type_map, payload_allowed):
|
|||||||
return '\n'.join(ans)
|
return '\n'.join(ans)
|
||||||
|
|
||||||
|
|
||||||
def generate(function_name, callback_name, report_name, keymap: KeymapType, command_class, initial_key='a', payload_allowed=True):
|
def generate(
|
||||||
|
function_name: str,
|
||||||
|
callback_name: str,
|
||||||
|
report_name: str,
|
||||||
|
keymap: KeymapType,
|
||||||
|
command_class: str,
|
||||||
|
initial_key: str = 'a',
|
||||||
|
payload_allowed: bool = True
|
||||||
|
) -> str:
|
||||||
type_map = resolve_keys(keymap)
|
type_map = resolve_keys(keymap)
|
||||||
keys_enum = enum(keymap)
|
keys_enum = enum(keymap)
|
||||||
handle_key = parse_key(keymap)
|
handle_key = parse_key(keymap)
|
||||||
@ -230,7 +238,7 @@ static inline void
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
def write_header(text, path):
|
def write_header(text: str, path: str) -> None:
|
||||||
with open(path, 'w') as f:
|
with open(path, 'w') as f:
|
||||||
print(f'// This file is generated by {__file__} do not edit!', file=f, end='\n\n')
|
print(f'// This file is generated by {__file__} do not edit!', file=f, end='\n\n')
|
||||||
print('#pragma once', file=f)
|
print('#pragma once', file=f)
|
||||||
@ -238,7 +246,7 @@ def write_header(text, path):
|
|||||||
subprocess.check_call(['clang-format', '-i', path])
|
subprocess.check_call(['clang-format', '-i', path])
|
||||||
|
|
||||||
|
|
||||||
def graphics_parser():
|
def graphics_parser() -> None:
|
||||||
flag = frozenset
|
flag = frozenset
|
||||||
keymap: KeymapType = {
|
keymap: KeymapType = {
|
||||||
'a': ('action', flag('tTqpd')),
|
'a': ('action', flag('tTqpd')),
|
||||||
|
|||||||
@ -13,7 +13,8 @@ from html.entities import html5
|
|||||||
from itertools import groupby
|
from itertools import groupby
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from typing import (
|
from typing import (
|
||||||
DefaultDict, Dict, Generator, Iterable, List, Optional, Set, Tuple, Union
|
Callable, DefaultDict, Dict, FrozenSet, Generator, Iterable, List,
|
||||||
|
Optional, Set, Tuple, Union
|
||||||
)
|
)
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
|
|
||||||
@ -27,7 +28,7 @@ if len(non_characters) != 66:
|
|||||||
emoji_skin_tone_modifiers = frozenset(range(0x1f3fb, 0x1F3FF + 1))
|
emoji_skin_tone_modifiers = frozenset(range(0x1f3fb, 0x1F3FF + 1))
|
||||||
|
|
||||||
|
|
||||||
def get_data(fname, folder='UCD'):
|
def get_data(fname: str, folder: str = 'UCD') -> Iterable[str]:
|
||||||
url = f'https://www.unicode.org/Public/{folder}/latest/{fname}'
|
url = f'https://www.unicode.org/Public/{folder}/latest/{fname}'
|
||||||
bn = os.path.basename(url)
|
bn = os.path.basename(url)
|
||||||
local = os.path.join('/tmp', bn)
|
local = os.path.join('/tmp', bn)
|
||||||
@ -54,9 +55,9 @@ marks = set(emoji_skin_tone_modifiers) | {zwj}
|
|||||||
not_assigned = set(range(0, sys.maxunicode))
|
not_assigned = set(range(0, sys.maxunicode))
|
||||||
|
|
||||||
|
|
||||||
def parse_ucd():
|
def parse_ucd() -> None:
|
||||||
|
|
||||||
def add_word(w, c):
|
def add_word(w: str, c: int) -> None:
|
||||||
if c <= 32 or c == 127 or 128 <= c <= 159:
|
if c <= 32 or c == 127 or 128 <= c <= 159:
|
||||||
return
|
return
|
||||||
if len(w) > 1:
|
if len(w) > 1:
|
||||||
@ -102,7 +103,7 @@ def parse_ucd():
|
|||||||
word_search_map['lamda'] |= word_search_map['lambda']
|
word_search_map['lamda'] |= word_search_map['lambda']
|
||||||
|
|
||||||
|
|
||||||
def split_two(line):
|
def split_two(line: str) -> Tuple[Set[int], str]:
|
||||||
spec, rest = line.split(';', 1)
|
spec, rest = line.split(';', 1)
|
||||||
spec, rest = spec.strip(), rest.strip().split(' ', 1)[0].strip()
|
spec, rest = spec.strip(), rest.strip().split(' ', 1)[0].strip()
|
||||||
if '..' in spec:
|
if '..' in spec:
|
||||||
@ -118,26 +119,27 @@ emoji_categories: Dict[str, Set[int]] = {}
|
|||||||
emoji_presentation_bases: Set[int] = set()
|
emoji_presentation_bases: Set[int] = set()
|
||||||
|
|
||||||
|
|
||||||
def parse_emoji():
|
def parse_emoji() -> None:
|
||||||
for line in get_data('emoji-data.txt', 'emoji'):
|
for line in get_data('emoji-data.txt', 'emoji'):
|
||||||
chars, rest = split_two(line)
|
chars, rest = split_two(line)
|
||||||
s = emoji_categories.setdefault(rest, set())
|
s = emoji_categories.setdefault(rest, set())
|
||||||
s.update(chars)
|
s.update(chars)
|
||||||
all_emoji.update(chars)
|
all_emoji.update(chars)
|
||||||
for line in get_data('emoji-variation-sequences.txt', 'emoji'):
|
for line in get_data('emoji-variation-sequences.txt', 'emoji'):
|
||||||
base, var, *rest = line.split()
|
parts = line.split()
|
||||||
|
base, var = parts[0], parts[1]
|
||||||
if base.startswith('#'):
|
if base.startswith('#'):
|
||||||
continue
|
continue
|
||||||
base = int(base, 16)
|
|
||||||
if var.upper() == 'FE0F':
|
if var.upper() == 'FE0F':
|
||||||
emoji_presentation_bases.add(base)
|
ibase = int(base, 16)
|
||||||
|
emoji_presentation_bases.add(ibase)
|
||||||
|
|
||||||
|
|
||||||
doublewidth: Set[int] = set()
|
doublewidth: Set[int] = set()
|
||||||
ambiguous: Set[int] = set()
|
ambiguous: Set[int] = set()
|
||||||
|
|
||||||
|
|
||||||
def parse_eaw():
|
def parse_eaw() -> None:
|
||||||
global doublewidth, ambiguous
|
global doublewidth, ambiguous
|
||||||
seen: Set[int] = set()
|
seen: Set[int] = set()
|
||||||
for line in get_data('ucd/EastAsianWidth.txt'):
|
for line in get_data('ucd/EastAsianWidth.txt'):
|
||||||
@ -166,7 +168,7 @@ def get_ranges(items: List[int]) -> Generator[Union[int, Tuple[int, int]], None,
|
|||||||
yield a, b
|
yield a, b
|
||||||
|
|
||||||
|
|
||||||
def write_case(spec, p):
|
def write_case(spec: Union[Tuple, int], p: Callable) -> None:
|
||||||
if isinstance(spec, tuple):
|
if isinstance(spec, tuple):
|
||||||
p('\t\tcase 0x{:x} ... 0x{:x}:'.format(*spec))
|
p('\t\tcase 0x{:x} ... 0x{:x}:'.format(*spec))
|
||||||
else:
|
else:
|
||||||
@ -174,7 +176,7 @@ def write_case(spec, p):
|
|||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def create_header(path, include_data_types=True):
|
def create_header(path: str, include_data_types: bool = True) -> Generator[Callable, None, None]:
|
||||||
with open(path, 'w') as f:
|
with open(path, 'w') as f:
|
||||||
p = partial(print, file=f)
|
p = partial(print, file=f)
|
||||||
p('// unicode data, built from the unicode standard on:', date.today())
|
p('// unicode data, built from the unicode standard on:', date.today())
|
||||||
@ -191,7 +193,7 @@ def create_header(path, include_data_types=True):
|
|||||||
p('END_ALLOW_CASE_RANGE')
|
p('END_ALLOW_CASE_RANGE')
|
||||||
|
|
||||||
|
|
||||||
def gen_emoji():
|
def gen_emoji() -> None:
|
||||||
with create_header('kitty/emoji.h') as p:
|
with create_header('kitty/emoji.h') as p:
|
||||||
p('static inline bool\nis_emoji(char_type code) {')
|
p('static inline bool\nis_emoji(char_type code) {')
|
||||||
p('\tswitch(code) {')
|
p('\tswitch(code) {')
|
||||||
@ -221,8 +223,16 @@ def gen_emoji():
|
|||||||
p('\treturn false;\n}')
|
p('\treturn false;\n}')
|
||||||
|
|
||||||
|
|
||||||
def category_test(name, p, classes, comment, static=False, extra_chars=frozenset(), exclude=frozenset()):
|
def category_test(
|
||||||
static = 'static inline ' if static else ''
|
name: str,
|
||||||
|
p: Callable,
|
||||||
|
classes: Iterable[str],
|
||||||
|
comment: str,
|
||||||
|
use_static: bool = False,
|
||||||
|
extra_chars: Union[FrozenSet[int], Set[int]] = frozenset(),
|
||||||
|
exclude: Union[Set[int], FrozenSet[int]] = frozenset()
|
||||||
|
) -> None:
|
||||||
|
static = 'static inline ' if use_static else ''
|
||||||
chars: Set[int] = set()
|
chars: Set[int] = set()
|
||||||
for c in classes:
|
for c in classes:
|
||||||
chars |= class_maps[c]
|
chars |= class_maps[c]
|
||||||
@ -238,7 +248,7 @@ def category_test(name, p, classes, comment, static=False, extra_chars=frozenset
|
|||||||
p('\treturn false;\n}\n')
|
p('\treturn false;\n}\n')
|
||||||
|
|
||||||
|
|
||||||
def codepoint_to_mark_map(p, mark_map):
|
def codepoint_to_mark_map(p: Callable, mark_map: List[int]) -> Dict[int, int]:
|
||||||
p('\tswitch(c) { // {{{')
|
p('\tswitch(c) { // {{{')
|
||||||
rmap = {c: m for m, c in enumerate(mark_map)}
|
rmap = {c: m for m, c in enumerate(mark_map)}
|
||||||
for spec in get_ranges(mark_map):
|
for spec in get_ranges(mark_map):
|
||||||
@ -253,14 +263,14 @@ def codepoint_to_mark_map(p, mark_map):
|
|||||||
return rmap
|
return rmap
|
||||||
|
|
||||||
|
|
||||||
def classes_to_regex(classes, exclude=''):
|
def classes_to_regex(classes: Iterable[str], exclude: str = '') -> Iterable[str]:
|
||||||
chars: Set[int] = set()
|
chars: Set[int] = set()
|
||||||
for c in classes:
|
for c in classes:
|
||||||
chars |= class_maps[c]
|
chars |= class_maps[c]
|
||||||
for c in map(ord, exclude):
|
for x in map(ord, exclude):
|
||||||
chars.discard(c)
|
chars.discard(x)
|
||||||
|
|
||||||
def as_string(codepoint):
|
def as_string(codepoint: int) -> str:
|
||||||
if codepoint < 256:
|
if codepoint < 256:
|
||||||
return r'\x{:02x}'.format(codepoint)
|
return r'\x{:02x}'.format(codepoint)
|
||||||
if codepoint <= 0xffff:
|
if codepoint <= 0xffff:
|
||||||
@ -274,7 +284,7 @@ def classes_to_regex(classes, exclude=''):
|
|||||||
yield as_string(spec)
|
yield as_string(spec)
|
||||||
|
|
||||||
|
|
||||||
def gen_ucd():
|
def gen_ucd() -> None:
|
||||||
cz = {c for c in class_maps if c[0] in 'CZ'}
|
cz = {c for c in class_maps if c[0] in 'CZ'}
|
||||||
with create_header('kitty/unicode-data.c') as p:
|
with create_header('kitty/unicode-data.c') as p:
|
||||||
p('#include "unicode-data.h"')
|
p('#include "unicode-data.h"')
|
||||||
@ -313,7 +323,7 @@ def gen_ucd():
|
|||||||
f.write("url_delimiters = '{}' # noqa".format(''.join(classes_to_regex(cz, exclude='\n'))))
|
f.write("url_delimiters = '{}' # noqa".format(''.join(classes_to_regex(cz, exclude='\n'))))
|
||||||
|
|
||||||
|
|
||||||
def gen_names():
|
def gen_names() -> None:
|
||||||
with create_header('kittens/unicode_input/names.h') as p:
|
with create_header('kittens/unicode_input/names.h') as p:
|
||||||
mark_to_cp = list(sorted(name_map))
|
mark_to_cp = list(sorted(name_map))
|
||||||
cp_to_mark = {cp: m for m, cp in enumerate(mark_to_cp)}
|
cp_to_mark = {cp: m for m, cp in enumerate(mark_to_cp)}
|
||||||
@ -372,24 +382,24 @@ def gen_names():
|
|||||||
|
|
||||||
class TrieNode:
|
class TrieNode:
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
self.match_offset = 0
|
self.match_offset = 0
|
||||||
self.children_offset = 0
|
self.children_offset = 0
|
||||||
self.children = {}
|
self.children: Dict[int, int] = {}
|
||||||
|
|
||||||
def add_letter(self, letter):
|
def add_letter(self, letter: int) -> int:
|
||||||
if letter not in self.children:
|
if letter not in self.children:
|
||||||
self.children[letter] = len(all_trie_nodes)
|
self.children[letter] = len(all_trie_nodes)
|
||||||
all_trie_nodes.append(TrieNode())
|
all_trie_nodes.append(TrieNode())
|
||||||
return self.children[letter]
|
return self.children[letter]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
return f'{{ .children_offset={self.children_offset}, .match_offset={self.match_offset} }}'
|
return f'{{ .children_offset={self.children_offset}, .match_offset={self.match_offset} }}'
|
||||||
|
|
||||||
root = TrieNode()
|
root = TrieNode()
|
||||||
all_trie_nodes.append(root)
|
all_trie_nodes.append(root)
|
||||||
|
|
||||||
def add_word(word_idx, word):
|
def add_word(word_idx: int, word: str) -> None:
|
||||||
parent = root
|
parent = root
|
||||||
for letter in map(ord, word):
|
for letter in map(ord, word):
|
||||||
idx = parent.add_letter(letter)
|
idx = parent.add_letter(letter)
|
||||||
@ -414,10 +424,10 @@ def gen_names():
|
|||||||
p('}; // }}}\n')
|
p('}; // }}}\n')
|
||||||
|
|
||||||
|
|
||||||
def gen_wcwidth():
|
def gen_wcwidth() -> None:
|
||||||
seen: Set[int] = set()
|
seen: Set[int] = set()
|
||||||
|
|
||||||
def add(p, comment, chars_, ret):
|
def add(p: Callable, comment: str, chars_: Set[int], ret: int) -> None:
|
||||||
chars = chars_ - seen
|
chars = chars_ - seen
|
||||||
seen.update(chars)
|
seen.update(chars)
|
||||||
p(f'\t\t// {comment} ({len(chars)} codepoints)' + ' {{' '{')
|
p(f'\t\t// {comment} ({len(chars)} codepoints)' + ' {{' '{')
|
||||||
|
|||||||
56
glfw/glfw.py
56
glfw/glfw.py
@ -6,18 +6,54 @@ import json
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from typing import Callable, List, Optional, Tuple
|
||||||
|
|
||||||
_plat = sys.platform.lower()
|
_plat = sys.platform.lower()
|
||||||
is_linux = 'linux' in _plat
|
is_linux = 'linux' in _plat
|
||||||
base = os.path.dirname(os.path.abspath(__file__))
|
base = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
def wayland_protocol_file_name(base, ext='c'):
|
class Env:
|
||||||
|
|
||||||
|
cc: str = ''
|
||||||
|
cppflags: List[str] = []
|
||||||
|
cflags: List[str] = []
|
||||||
|
ldflags: List[str] = []
|
||||||
|
ldpaths: List[str] = []
|
||||||
|
ccver: Tuple[int, int]
|
||||||
|
|
||||||
|
# glfw stuff
|
||||||
|
all_headers: List[str] = []
|
||||||
|
sources: List[str] = []
|
||||||
|
wayland_packagedir: str = ''
|
||||||
|
wayland_scanner: str = ''
|
||||||
|
wayland_scanner_code: str = ''
|
||||||
|
wayland_protocols: Tuple[str, ...] = ()
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, cc: str = '', cppflags: List[str] = [], cflags: List[str] = [], ldflags: List[str] = [],
|
||||||
|
ldpaths: Optional[List[str]] = None, ccver: Tuple[int, int] = (0, 0)
|
||||||
|
):
|
||||||
|
self.cc, self.cppflags, self.cflags, self.ldflags, self.ldpaths = cc, cppflags, cflags, ldflags, [] if ldpaths is None else ldpaths
|
||||||
|
self.ccver = ccver
|
||||||
|
|
||||||
|
def copy(self) -> 'Env':
|
||||||
|
ans = Env(self.cc, list(self.cppflags), list(self.cflags), list(self.ldflags), list(self.ldpaths), self.ccver)
|
||||||
|
ans.all_headers = list(self.all_headers)
|
||||||
|
ans.sources = list(self.sources)
|
||||||
|
ans.wayland_packagedir = self.wayland_packagedir
|
||||||
|
ans.wayland_scanner = self.wayland_scanner
|
||||||
|
ans.wayland_scanner_code = self.wayland_scanner_code
|
||||||
|
ans.wayland_protocols = self.wayland_protocols
|
||||||
|
return ans
|
||||||
|
|
||||||
|
|
||||||
|
def wayland_protocol_file_name(base: str, ext: str = 'c') -> str:
|
||||||
base = os.path.basename(base).rpartition('.')[0]
|
base = os.path.basename(base).rpartition('.')[0]
|
||||||
return 'wayland-{}-client-protocol.{}'.format(base, ext)
|
return 'wayland-{}-client-protocol.{}'.format(base, ext)
|
||||||
|
|
||||||
|
|
||||||
def init_env(env, pkg_config, at_least_version, test_compile, module='x11'):
|
def init_env(env: Env, pkg_config: Callable, at_least_version: Callable, test_compile: Callable, module: str = 'x11') -> Env:
|
||||||
ans = env.copy()
|
ans = env.copy()
|
||||||
ans.cflags.append('-fpic')
|
ans.cflags.append('-fpic')
|
||||||
ans.cppflags.append('-D_GLFW_' + module.upper())
|
ans.cppflags.append('-D_GLFW_' + module.upper())
|
||||||
@ -74,7 +110,7 @@ def init_env(env, pkg_config, at_least_version, test_compile, module='x11'):
|
|||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
|
||||||
def build_wayland_protocols(env, Command, parallel_run, emphasis, newer, dest_dir):
|
def build_wayland_protocols(env: Env, Command: Callable, parallel_run: Callable, emphasis: Callable, newer: Callable, dest_dir: str) -> None:
|
||||||
items = []
|
items = []
|
||||||
for protocol in env.wayland_protocols:
|
for protocol in env.wayland_protocols:
|
||||||
src = os.path.join(env.wayland_packagedir, protocol)
|
src = os.path.join(env.wayland_packagedir, protocol)
|
||||||
@ -95,7 +131,7 @@ def build_wayland_protocols(env, Command, parallel_run, emphasis, newer, dest_di
|
|||||||
|
|
||||||
class Arg:
|
class Arg:
|
||||||
|
|
||||||
def __init__(self, decl):
|
def __init__(self, decl: str):
|
||||||
self.type, self.name = decl.rsplit(' ', 1)
|
self.type, self.name = decl.rsplit(' ', 1)
|
||||||
self.type = self.type.strip()
|
self.type = self.type.strip()
|
||||||
self.name = self.name.strip()
|
self.name = self.name.strip()
|
||||||
@ -103,13 +139,13 @@ class Arg:
|
|||||||
self.name = self.name[1:]
|
self.name = self.name[1:]
|
||||||
self.type = self.type + '*'
|
self.type = self.type + '*'
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self) -> str:
|
||||||
return 'Arg({}, {})'.format(self.type, self.name)
|
return 'Arg({}, {})'.format(self.type, self.name)
|
||||||
|
|
||||||
|
|
||||||
class Function:
|
class Function:
|
||||||
|
|
||||||
def __init__(self, declaration, check_fail=True):
|
def __init__(self, declaration: str, check_fail: bool = True):
|
||||||
self.check_fail = check_fail
|
self.check_fail = check_fail
|
||||||
m = re.match(
|
m = re.match(
|
||||||
r'(.+?)\s+(glfw[A-Z][a-zA-Z0-9]+)[(](.+)[)]$', declaration
|
r'(.+?)\s+(glfw[A-Z][a-zA-Z0-9]+)[(](.+)[)]$', declaration
|
||||||
@ -128,14 +164,14 @@ class Function:
|
|||||||
if not self.args:
|
if not self.args:
|
||||||
self.args = [Arg('void v')]
|
self.args = [Arg('void v')]
|
||||||
|
|
||||||
def declaration(self):
|
def declaration(self) -> str:
|
||||||
return 'typedef {restype} (*{name}_func)({args});\n{name}_func {name}_impl;\n#define {name} {name}_impl'.format(
|
return 'typedef {restype} (*{name}_func)({args});\n{name}_func {name}_impl;\n#define {name} {name}_impl'.format(
|
||||||
restype=self.restype,
|
restype=self.restype,
|
||||||
name=self.name,
|
name=self.name,
|
||||||
args=', '.join(a.type for a in self.args)
|
args=', '.join(a.type for a in self.args)
|
||||||
)
|
)
|
||||||
|
|
||||||
def load(self):
|
def load(self) -> str:
|
||||||
ans = '*(void **) (&{name}_impl) = dlsym(handle, "{name}");'.format(
|
ans = '*(void **) (&{name}_impl) = dlsym(handle, "{name}");'.format(
|
||||||
name=self.name
|
name=self.name
|
||||||
)
|
)
|
||||||
@ -146,7 +182,7 @@ class Function:
|
|||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
|
||||||
def generate_wrappers(glfw_header):
|
def generate_wrappers(glfw_header: str) -> None:
|
||||||
with open(glfw_header) as f:
|
with open(glfw_header) as f:
|
||||||
src = f.read()
|
src = f.read()
|
||||||
functions = []
|
functions = []
|
||||||
@ -244,7 +280,7 @@ unload_glfw(void) {
|
|||||||
f.write(code)
|
f.write(code)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> None:
|
||||||
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
os.chdir(os.path.dirname(os.path.abspath(__file__)))
|
||||||
generate_wrappers('glfw3.h')
|
generate_wrappers('glfw3.h')
|
||||||
|
|
||||||
|
|||||||
77
publish.py
77
publish.py
@ -16,7 +16,7 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from typing import IO, Optional, cast
|
from typing import IO, Any, Dict, Iterable, List, Optional, cast
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
@ -44,7 +44,7 @@ def call(*cmd: str, cwd: Optional[str] = None) -> None:
|
|||||||
raise SystemExit(ret)
|
raise SystemExit(ret)
|
||||||
|
|
||||||
|
|
||||||
def run_build(args):
|
def run_build(args: Any) -> None:
|
||||||
os.chdir(build_path)
|
os.chdir(build_path)
|
||||||
call('./linux 64 kitty')
|
call('./linux 64 kitty')
|
||||||
call('./osx kitty --sign-installers')
|
call('./osx kitty --sign-installers')
|
||||||
@ -52,21 +52,21 @@ def run_build(args):
|
|||||||
call('./linux 32 kitty')
|
call('./linux 32 kitty')
|
||||||
|
|
||||||
|
|
||||||
def run_tag(args):
|
def run_tag(args: Any) -> None:
|
||||||
call('git push')
|
call('git push')
|
||||||
call('git tag -s v{0} -m version-{0}'.format(version))
|
call('git tag -s v{0} -m version-{0}'.format(version))
|
||||||
call('git push origin v{0}'.format(version))
|
call('git push origin v{0}'.format(version))
|
||||||
|
|
||||||
|
|
||||||
def run_man(args):
|
def run_man(args: Any) -> None:
|
||||||
call('make FAIL_WARN=-W man', cwd=docs_dir)
|
call('make FAIL_WARN=-W man', cwd=docs_dir)
|
||||||
|
|
||||||
|
|
||||||
def run_html(args):
|
def run_html(args: Any) -> None:
|
||||||
call('make FAIL_WARN=-W html', cwd=docs_dir)
|
call('make FAIL_WARN=-W html', cwd=docs_dir)
|
||||||
|
|
||||||
|
|
||||||
def add_analytics():
|
def add_analytics() -> None:
|
||||||
analytics = '''
|
analytics = '''
|
||||||
<!-- Google Analytics -->
|
<!-- Google Analytics -->
|
||||||
<script>
|
<script>
|
||||||
@ -87,7 +87,7 @@ ga('send', 'pageview');
|
|||||||
f.write(html.encode('utf-8'))
|
f.write(html.encode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
def run_website(args):
|
def run_website(args: Any) -> None:
|
||||||
if os.path.exists(publish_dir):
|
if os.path.exists(publish_dir):
|
||||||
shutil.rmtree(publish_dir)
|
shutil.rmtree(publish_dir)
|
||||||
shutil.copytree(os.path.join(docs_dir, '_build', 'html'), publish_dir)
|
shutil.copytree(os.path.join(docs_dir, '_build', 'html'), publish_dir)
|
||||||
@ -105,7 +105,7 @@ def run_website(args):
|
|||||||
subprocess.check_call(['git', 'push'])
|
subprocess.check_call(['git', 'push'])
|
||||||
|
|
||||||
|
|
||||||
def run_sdist(args):
|
def run_sdist(args: Any) -> None:
|
||||||
with tempfile.TemporaryDirectory() as tdir:
|
with tempfile.TemporaryDirectory() as tdir:
|
||||||
base = os.path.join(tdir, f'kitty-{version}')
|
base = os.path.join(tdir, f'kitty-{version}')
|
||||||
os.mkdir(base)
|
os.mkdir(base)
|
||||||
@ -122,7 +122,7 @@ def run_sdist(args):
|
|||||||
|
|
||||||
|
|
||||||
class ReadFileWithProgressReporting(io.FileIO): # {{{
|
class ReadFileWithProgressReporting(io.FileIO): # {{{
|
||||||
def __init__(self, path):
|
def __init__(self, path: str):
|
||||||
io.FileIO.__init__(self, path, 'rb')
|
io.FileIO.__init__(self, path, 'rb')
|
||||||
self.seek(0, os.SEEK_END)
|
self.seek(0, os.SEEK_END)
|
||||||
self._total = self.tell()
|
self._total = self.tell()
|
||||||
@ -166,14 +166,12 @@ class ReadFileWithProgressReporting(io.FileIO): # {{{
|
|||||||
|
|
||||||
|
|
||||||
class Base(object): # {{{
|
class Base(object): # {{{
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def info(self, *args, **kwargs):
|
def info(self, *args: Any, **kwargs: Any) -> None:
|
||||||
print(*args, **kwargs)
|
print(*args, **kwargs)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
def warn(self, *args, **kwargs):
|
def warn(self, *args: Any, **kwargs: Any) -> None:
|
||||||
print('\n' + '_' * 20, 'WARNING', '_' * 20)
|
print('\n' + '_' * 20, 'WARNING', '_' * 20)
|
||||||
print(*args, **kwargs)
|
print(*args, **kwargs)
|
||||||
print('_' * 50)
|
print('_' * 50)
|
||||||
@ -187,13 +185,15 @@ class GitHub(Base): # {{{
|
|||||||
|
|
||||||
API = 'https://api.github.com/'
|
API = 'https://api.github.com/'
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(
|
||||||
files,
|
self,
|
||||||
reponame,
|
files: Dict[str, str],
|
||||||
version,
|
reponame: str,
|
||||||
username,
|
version: str,
|
||||||
password,
|
username: str,
|
||||||
replace=False):
|
password: str,
|
||||||
|
replace: bool = False
|
||||||
|
):
|
||||||
self.files, self.reponame, self.version, self.username, self.password, self.replace = (
|
self.files, self.reponame, self.version, self.username, self.password, self.replace = (
|
||||||
files, reponame, version, username, password, replace)
|
files, reponame, version, username, password, replace)
|
||||||
self.current_tag_name = 'v' + self.version
|
self.current_tag_name = 'v' + self.version
|
||||||
@ -201,7 +201,7 @@ class GitHub(Base): # {{{
|
|||||||
s.auth = (self.username, self.password)
|
s.auth = (self.username, self.password)
|
||||||
s.headers.update({'Accept': 'application/vnd.github.v3+json'})
|
s.headers.update({'Accept': 'application/vnd.github.v3+json'})
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self) -> None:
|
||||||
releases = self.releases()
|
releases = self.releases()
|
||||||
# self.clean_older_releases(releases)
|
# self.clean_older_releases(releases)
|
||||||
release = self.create_release(releases)
|
release = self.create_release(releases)
|
||||||
@ -239,7 +239,7 @@ class GitHub(Base): # {{{
|
|||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
self.fail(r, 'Failed to set label for %s' % fname)
|
self.fail(r, 'Failed to set label for %s' % fname)
|
||||||
|
|
||||||
def clean_older_releases(self, releases):
|
def clean_older_releases(self, releases: Iterable[Dict[str, Any]]) -> None:
|
||||||
for release in releases:
|
for release in releases:
|
||||||
if release.get(
|
if release.get(
|
||||||
'assets',
|
'assets',
|
||||||
@ -256,7 +256,7 @@ class GitHub(Base): # {{{
|
|||||||
'Failed to delete obsolete asset: %s for release: %s'
|
'Failed to delete obsolete asset: %s for release: %s'
|
||||||
% (asset['name'], release['tag_name']))
|
% (asset['name'], release['tag_name']))
|
||||||
|
|
||||||
def do_upload(self, url, path, desc, fname):
|
def do_upload(self, url: str, path: str, desc: str, fname: str) -> requests.Response:
|
||||||
mime_type = mimetypes.guess_type(fname)[0]
|
mime_type = mimetypes.guess_type(fname)[0]
|
||||||
self.info('Uploading to GitHub: %s (%s)' % (fname, mime_type))
|
self.info('Uploading to GitHub: %s (%s)' % (fname, mime_type))
|
||||||
with ReadFileWithProgressReporting(path) as f:
|
with ReadFileWithProgressReporting(path) as f:
|
||||||
@ -269,17 +269,17 @@ class GitHub(Base): # {{{
|
|||||||
params={'name': fname},
|
params={'name': fname},
|
||||||
data=cast(IO[bytes], f))
|
data=cast(IO[bytes], f))
|
||||||
|
|
||||||
def fail(self, r, msg):
|
def fail(self, r: requests.Response, msg: str) -> None:
|
||||||
print(msg, ' Status Code: %s' % r.status_code, file=sys.stderr)
|
print(msg, ' Status Code: %s' % r.status_code, file=sys.stderr)
|
||||||
print("JSON from response:", file=sys.stderr)
|
print("JSON from response:", file=sys.stderr)
|
||||||
pprint.pprint(dict(r.json()), stream=sys.stderr)
|
pprint.pprint(dict(r.json()), stream=sys.stderr)
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
|
|
||||||
def already_exists(self, r):
|
def already_exists(self, r: requests.Response) -> bool:
|
||||||
error_code = r.json().get('errors', [{}])[0].get('code', None)
|
error_code = r.json().get('errors', [{}])[0].get('code', None)
|
||||||
return error_code == 'already_exists'
|
return bool(error_code == 'already_exists')
|
||||||
|
|
||||||
def existing_assets(self, release_id):
|
def existing_assets(self, release_id: str) -> Dict[str, str]:
|
||||||
url = self.API + 'repos/%s/%s/releases/%s/assets' % (
|
url = self.API + 'repos/%s/%s/releases/%s/assets' % (
|
||||||
self.username, self.reponame, release_id)
|
self.username, self.reponame, release_id)
|
||||||
r = self.requests.get(url)
|
r = self.requests.get(url)
|
||||||
@ -287,22 +287,21 @@ class GitHub(Base): # {{{
|
|||||||
self.fail(r, 'Failed to get assets for release')
|
self.fail(r, 'Failed to get assets for release')
|
||||||
return {asset['name']: asset['id'] for asset in r.json()}
|
return {asset['name']: asset['id'] for asset in r.json()}
|
||||||
|
|
||||||
def releases(self):
|
def releases(self) -> List[Dict[str, Any]]:
|
||||||
url = self.API + 'repos/%s/%s/releases' % (self.username, self.reponame
|
url = self.API + 'repos/%s/%s/releases' % (self.username, self.reponame
|
||||||
)
|
)
|
||||||
r = self.requests.get(url)
|
r = self.requests.get(url)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
self.fail(r, 'Failed to list releases')
|
self.fail(r, 'Failed to list releases')
|
||||||
return r.json()
|
return list(r.json())
|
||||||
|
|
||||||
def create_release(self, releases):
|
def create_release(self, releases: Iterable[Dict[str, str]]) -> Dict[str, Any]:
|
||||||
' Create a release on GitHub or if it already exists, return the existing release '
|
' Create a release on GitHub or if it already exists, return the existing release '
|
||||||
for release in releases:
|
for release in releases:
|
||||||
# Check for existing release
|
# Check for existing release
|
||||||
if release['tag_name'] == self.current_tag_name:
|
if release['tag_name'] == self.current_tag_name:
|
||||||
return release
|
return release
|
||||||
url = self.API + 'repos/%s/%s/releases' % (self.username, self.reponame
|
url = self.API + 'repos/%s/%s/releases' % (self.username, self.reponame)
|
||||||
)
|
|
||||||
r = self.requests.post(
|
r = self.requests.post(
|
||||||
url,
|
url,
|
||||||
data=json.dumps({
|
data=json.dumps({
|
||||||
@ -316,19 +315,17 @@ class GitHub(Base): # {{{
|
|||||||
if r.status_code != 201:
|
if r.status_code != 201:
|
||||||
self.fail(r, 'Failed to create release for version: %s' %
|
self.fail(r, 'Failed to create release for version: %s' %
|
||||||
self.version)
|
self.version)
|
||||||
return r.json()
|
return dict(r.json())
|
||||||
|
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
|
|
||||||
def get_github_data():
|
def get_github_data() -> Dict[str, str]:
|
||||||
with open(os.environ['PENV'] + '/github-token') as f:
|
with open(os.environ['PENV'] + '/github-token') as f:
|
||||||
un, pw = f.read().strip().split(':')
|
un, pw = f.read().strip().split(':')
|
||||||
return {'username': un, 'password': pw}
|
return {'username': un, 'password': pw}
|
||||||
|
|
||||||
|
|
||||||
def run_upload(args):
|
def run_upload(args: Any) -> None:
|
||||||
files = {
|
files = {
|
||||||
os.path.join(build_path, 'build', f.format(version)): desc
|
os.path.join(build_path, 'build', f.format(version)): desc
|
||||||
for f, desc in {
|
for f, desc in {
|
||||||
@ -346,18 +343,18 @@ def run_upload(args):
|
|||||||
gh()
|
gh()
|
||||||
|
|
||||||
|
|
||||||
def require_git_master(branch='master'):
|
def require_git_master(branch: str = 'master') -> None:
|
||||||
b = subprocess.check_output(['git', 'symbolic-ref', '--short', 'HEAD']).decode('utf-8').strip()
|
b = subprocess.check_output(['git', 'symbolic-ref', '--short', 'HEAD']).decode('utf-8').strip()
|
||||||
if b != branch:
|
if b != branch:
|
||||||
raise SystemExit('You must be in the {} git branch'.format(branch))
|
raise SystemExit('You must be in the {} git branch'.format(branch))
|
||||||
|
|
||||||
|
|
||||||
def require_penv():
|
def require_penv() -> None:
|
||||||
if 'PENV' not in os.environ:
|
if 'PENV' not in os.environ:
|
||||||
raise SystemExit('The PENV env var is not present, required for uploading releases')
|
raise SystemExit('The PENV env var is not present, required for uploading releases')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> None:
|
||||||
require_git_master()
|
require_git_master()
|
||||||
require_penv()
|
require_penv()
|
||||||
parser = argparse.ArgumentParser(description='Publish kitty')
|
parser = argparse.ArgumentParser(description='Publish kitty')
|
||||||
|
|||||||
232
setup.py
232
setup.py
@ -14,20 +14,20 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
import time
|
import time
|
||||||
from collections import namedtuple
|
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
Callable, Dict, Iterator, List, NamedTuple, Optional, Sequence, Tuple,
|
Callable, Dict, Iterable, Iterator, List, NamedTuple, Optional,
|
||||||
Union
|
Sequence, Set, Tuple, Union
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from glfw import glfw # noqa
|
||||||
|
|
||||||
if sys.version_info[:2] < (3, 6):
|
if sys.version_info[:2] < (3, 6):
|
||||||
raise SystemExit('kitty requires python >= 3.6')
|
raise SystemExit('kitty requires python >= 3.6')
|
||||||
base = os.path.dirname(os.path.abspath(__file__))
|
base = os.path.dirname(os.path.abspath(__file__))
|
||||||
sys.path.insert(0, base)
|
sys.path.insert(0, base)
|
||||||
from glfw import glfw # noqa
|
|
||||||
del sys.path[0]
|
del sys.path[0]
|
||||||
|
|
||||||
verbose = False
|
verbose = False
|
||||||
@ -46,25 +46,28 @@ version = tuple(
|
|||||||
)
|
)
|
||||||
_plat = sys.platform.lower()
|
_plat = sys.platform.lower()
|
||||||
is_macos = 'darwin' in _plat
|
is_macos = 'darwin' in _plat
|
||||||
|
Env = glfw.Env
|
||||||
|
|
||||||
class Env:
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, cc: str = '', cppflags: List[str] = [], cflags: List[str] = [], ldflags: List[str] = [],
|
|
||||||
ldpaths: Optional[List[str]] = None, ccver: Optional[Tuple[int, int]] = None
|
|
||||||
):
|
|
||||||
self.cc, self.cppflags, self.cflags, self.ldflags, self.ldpaths = cc, cppflags, cflags, ldflags, [] if ldpaths is None else ldpaths
|
|
||||||
self.ccver = ccver
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
return Env(self.cc, list(self.cppflags), list(self.cflags), list(self.ldflags), list(self.ldpaths), self.ccver)
|
|
||||||
|
|
||||||
|
|
||||||
env = Env()
|
env = Env()
|
||||||
|
|
||||||
PKGCONFIG = os.environ.get('PKGCONFIG_EXE', 'pkg-config')
|
PKGCONFIG = os.environ.get('PKGCONFIG_EXE', 'pkg-config')
|
||||||
CompileKey = namedtuple('CompileKey', 'src dest')
|
|
||||||
|
|
||||||
|
class Options(argparse.Namespace):
|
||||||
|
action: str = 'build'
|
||||||
|
debug: bool = False
|
||||||
|
verbose: int = 0
|
||||||
|
sanitize: bool = False
|
||||||
|
prefix: str = './linux-package'
|
||||||
|
incremental: bool = True
|
||||||
|
profile: bool = False
|
||||||
|
for_freeze: bool = False
|
||||||
|
libdir_name: str = 'lib'
|
||||||
|
extra_logging: List[str] = []
|
||||||
|
update_check_interval: float = 24
|
||||||
|
|
||||||
|
|
||||||
|
class CompileKey(NamedTuple):
|
||||||
|
src: str
|
||||||
|
dest: str
|
||||||
|
|
||||||
|
|
||||||
class Command(NamedTuple):
|
class Command(NamedTuple):
|
||||||
@ -76,19 +79,19 @@ class Command(NamedTuple):
|
|||||||
keyfile: Optional[str]
|
keyfile: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
def emphasis(text):
|
def emphasis(text: str) -> str:
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
text = '\033[32m' + text + '\033[39m'
|
text = '\033[32m' + text + '\033[39m'
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
def error(text):
|
def error(text: str) -> str:
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
text = '\033[91m' + text + '\033[39m'
|
text = '\033[91m' + text + '\033[39m'
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
def pkg_config(pkg, *args):
|
def pkg_config(pkg: str, *args: str) -> List[str]:
|
||||||
try:
|
try:
|
||||||
return list(
|
return list(
|
||||||
filter(
|
filter(
|
||||||
@ -103,7 +106,7 @@ def pkg_config(pkg, *args):
|
|||||||
raise SystemExit('The package {} was not found on your system'.format(error(pkg)))
|
raise SystemExit('The package {} was not found on your system'.format(error(pkg)))
|
||||||
|
|
||||||
|
|
||||||
def at_least_version(package, major, minor=0):
|
def at_least_version(package: str, major: int, minor: int = 0) -> None:
|
||||||
q = '{}.{}'.format(major, minor)
|
q = '{}.{}'.format(major, minor)
|
||||||
if subprocess.run([PKGCONFIG, package, '--atleast-version=' + q]
|
if subprocess.run([PKGCONFIG, package, '--atleast-version=' + q]
|
||||||
).returncode != 0:
|
).returncode != 0:
|
||||||
@ -146,15 +149,19 @@ def cc_version() -> Tuple[str, Tuple[int, int]]:
|
|||||||
return cc, ver
|
return cc, ver
|
||||||
|
|
||||||
|
|
||||||
def get_python_include_paths():
|
def get_python_include_paths() -> List[str]:
|
||||||
ans = []
|
ans = []
|
||||||
for name in sysconfig.get_path_names():
|
for name in sysconfig.get_path_names():
|
||||||
if 'include' in name:
|
if 'include' in name:
|
||||||
ans.append(name)
|
ans.append(name)
|
||||||
return sorted(frozenset(map(sysconfig.get_path, sorted(ans))))
|
|
||||||
|
def gp(x: str) -> Optional[str]:
|
||||||
|
return sysconfig.get_path(x)
|
||||||
|
|
||||||
|
return sorted(frozenset(filter(None, map(gp, sorted(ans)))))
|
||||||
|
|
||||||
|
|
||||||
def get_python_flags(cflags):
|
def get_python_flags(cflags: List[str]) -> List[str]:
|
||||||
cflags.extend('-I' + x for x in get_python_include_paths())
|
cflags.extend('-I' + x for x in get_python_include_paths())
|
||||||
libs: List[str] = []
|
libs: List[str] = []
|
||||||
libs += (sysconfig.get_config_var('LIBS') or '').split()
|
libs += (sysconfig.get_config_var('LIBS') or '').split()
|
||||||
@ -186,7 +193,7 @@ def get_python_flags(cflags):
|
|||||||
return libs
|
return libs
|
||||||
|
|
||||||
|
|
||||||
def get_sanitize_args(cc, ccver):
|
def get_sanitize_args(cc: str, ccver: Tuple[int, int]) -> List[str]:
|
||||||
sanitize_args = ['-fsanitize=address']
|
sanitize_args = ['-fsanitize=address']
|
||||||
if ccver >= (5, 0):
|
if ccver >= (5, 0):
|
||||||
sanitize_args.append('-fsanitize=undefined')
|
sanitize_args.append('-fsanitize=undefined')
|
||||||
@ -209,7 +216,7 @@ def test_compile(cc: str, *cflags: str, src: Optional[str] = None) -> bool:
|
|||||||
return p.wait() == 0
|
return p.wait() == 0
|
||||||
|
|
||||||
|
|
||||||
def first_successful_compile(cc, *cflags, src=None):
|
def first_successful_compile(cc: str, *cflags: str, src: Optional[str] = None) -> str:
|
||||||
for x in cflags:
|
for x in cflags:
|
||||||
if test_compile(cc, *shlex.split(x), src=src):
|
if test_compile(cc, *shlex.split(x), src=src):
|
||||||
return x
|
return x
|
||||||
@ -217,9 +224,12 @@ def first_successful_compile(cc, *cflags, src=None):
|
|||||||
|
|
||||||
|
|
||||||
def init_env(
|
def init_env(
|
||||||
debug=False, sanitize=False, native_optimizations=True, profile=False,
|
debug: bool = False,
|
||||||
extra_logging=()
|
sanitize: bool = False,
|
||||||
):
|
native_optimizations: bool = True,
|
||||||
|
profile: bool = False,
|
||||||
|
extra_logging: Iterable[str] = ()
|
||||||
|
) -> Env:
|
||||||
native_optimizations = native_optimizations and not sanitize and not debug
|
native_optimizations = native_optimizations and not sanitize and not debug
|
||||||
cc, ccver = cc_version()
|
cc, ccver = cc_version()
|
||||||
print('CC:', cc, ccver)
|
print('CC:', cc, ccver)
|
||||||
@ -279,7 +289,7 @@ def init_env(
|
|||||||
return Env(cc, cppflags, cflags, ldflags, ccver=ccver)
|
return Env(cc, cppflags, cflags, ldflags, ccver=ccver)
|
||||||
|
|
||||||
|
|
||||||
def kitty_env():
|
def kitty_env() -> Env:
|
||||||
ans = env.copy()
|
ans = env.copy()
|
||||||
cflags = ans.cflags
|
cflags = ans.cflags
|
||||||
cflags.append('-pthread')
|
cflags.append('-pthread')
|
||||||
@ -318,11 +328,11 @@ def kitty_env():
|
|||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
|
||||||
def define(x):
|
def define(x: str) -> str:
|
||||||
return '-D' + x
|
return '-D' + x
|
||||||
|
|
||||||
|
|
||||||
def run_tool(cmd, desc=None):
|
def run_tool(cmd: Union[str, List[str]], desc: Optional[str] = None) -> None:
|
||||||
if isinstance(cmd, str):
|
if isinstance(cmd, str):
|
||||||
cmd = shlex.split(cmd[0])
|
cmd = shlex.split(cmd[0])
|
||||||
if verbose:
|
if verbose:
|
||||||
@ -336,7 +346,7 @@ def run_tool(cmd, desc=None):
|
|||||||
raise SystemExit(ret)
|
raise SystemExit(ret)
|
||||||
|
|
||||||
|
|
||||||
def get_vcs_rev_defines():
|
def get_vcs_rev_defines() -> List[str]:
|
||||||
ans = []
|
ans = []
|
||||||
if os.path.exists('.git'):
|
if os.path.exists('.git'):
|
||||||
try:
|
try:
|
||||||
@ -355,13 +365,13 @@ def get_vcs_rev_defines():
|
|||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
|
||||||
SPECIAL_SOURCES: Dict[str, Tuple[str, Union[List[str], Callable[[], Iterator[str]]]]] = {
|
SPECIAL_SOURCES: Dict[str, Tuple[str, Union[List[str], Callable[[], Union[List[str], Iterator[str]]]]]] = {
|
||||||
'kitty/parser_dump.c': ('kitty/parser.c', ['DUMP_COMMANDS']),
|
'kitty/parser_dump.c': ('kitty/parser.c', ['DUMP_COMMANDS']),
|
||||||
'kitty/data-types.c': ('kitty/data-types.c', get_vcs_rev_defines),
|
'kitty/data-types.c': ('kitty/data-types.c', get_vcs_rev_defines),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def newer(dest, *sources):
|
def newer(dest: str, *sources: str) -> bool:
|
||||||
try:
|
try:
|
||||||
dtime = os.path.getmtime(dest)
|
dtime = os.path.getmtime(dest)
|
||||||
except OSError:
|
except OSError:
|
||||||
@ -373,7 +383,7 @@ def newer(dest, *sources):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def dependecies_for(src, obj, all_headers):
|
def dependecies_for(src: str, obj: str, all_headers: Iterable[str]) -> Iterable[str]:
|
||||||
dep_file = obj.rpartition('.')[0] + '.d'
|
dep_file = obj.rpartition('.')[0] + '.d'
|
||||||
try:
|
try:
|
||||||
with open(dep_file) as f:
|
with open(dep_file) as f:
|
||||||
@ -398,7 +408,7 @@ def dependecies_for(src, obj, all_headers):
|
|||||||
yield path
|
yield path
|
||||||
|
|
||||||
|
|
||||||
def parallel_run(items):
|
def parallel_run(items: List[Command]) -> None:
|
||||||
try:
|
try:
|
||||||
num_workers = max(2, os.cpu_count() or 1)
|
num_workers = max(2, os.cpu_count() or 1)
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -408,7 +418,7 @@ def parallel_run(items):
|
|||||||
failed = None
|
failed = None
|
||||||
num, total = 0, len(items)
|
num, total = 0, len(items)
|
||||||
|
|
||||||
def wait():
|
def wait() -> None:
|
||||||
nonlocal failed
|
nonlocal failed
|
||||||
if not workers:
|
if not workers:
|
||||||
return
|
return
|
||||||
@ -441,22 +451,33 @@ def parallel_run(items):
|
|||||||
print(' done')
|
print(' done')
|
||||||
if failed:
|
if failed:
|
||||||
print(failed.desc)
|
print(failed.desc)
|
||||||
run_tool(failed.cmd)
|
run_tool(list(failed.cmd))
|
||||||
|
|
||||||
|
|
||||||
class CompilationDatabase:
|
class CompilationDatabase:
|
||||||
|
|
||||||
def __init__(self, incremental):
|
def __init__(self, incremental: bool):
|
||||||
self.incremental = incremental
|
self.incremental = incremental
|
||||||
self.compile_commands = []
|
self.compile_commands: List[Command] = []
|
||||||
self.link_commands = []
|
self.link_commands: List[Command] = []
|
||||||
|
|
||||||
|
def add_command(
|
||||||
|
self,
|
||||||
|
desc: str,
|
||||||
|
cmd: List[str],
|
||||||
|
is_newer_func: Callable,
|
||||||
|
key: Optional[CompileKey] = None,
|
||||||
|
on_success: Optional[Callable] = None,
|
||||||
|
keyfile: Optional[str] = None
|
||||||
|
) -> None:
|
||||||
|
def no_op() -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
def add_command(self, desc, cmd, is_newer_func, key=None, on_success=None, keyfile=None):
|
|
||||||
queue = self.link_commands if keyfile is None else self.compile_commands
|
queue = self.link_commands if keyfile is None else self.compile_commands
|
||||||
queue.append(Command(desc, cmd, is_newer_func, on_success, key, keyfile))
|
queue.append(Command(desc, cmd, is_newer_func, on_success or no_op, key, keyfile))
|
||||||
|
|
||||||
def build_all(self):
|
def build_all(self) -> None:
|
||||||
def sort_key(compile_cmd):
|
def sort_key(compile_cmd: Command) -> int:
|
||||||
if compile_cmd.keyfile:
|
if compile_cmd.keyfile:
|
||||||
return os.path.getsize(compile_cmd.keyfile)
|
return os.path.getsize(compile_cmd.keyfile)
|
||||||
return 0
|
return 0
|
||||||
@ -474,12 +495,12 @@ class CompilationDatabase:
|
|||||||
items.append(compile_cmd)
|
items.append(compile_cmd)
|
||||||
parallel_run(items)
|
parallel_run(items)
|
||||||
|
|
||||||
def cmd_changed(self, compile_cmd):
|
def cmd_changed(self, compile_cmd: Command) -> bool:
|
||||||
key, cmd = compile_cmd.key, compile_cmd.cmd
|
key, cmd = compile_cmd.key, compile_cmd.cmd
|
||||||
return self.db.get(key) != cmd
|
return bool(self.db.get(key) != cmd)
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self) -> 'CompilationDatabase':
|
||||||
self.all_keys = set()
|
self.all_keys: Set[CompileKey] = set()
|
||||||
self.dbpath = os.path.abspath('compile_commands.json')
|
self.dbpath = os.path.abspath('compile_commands.json')
|
||||||
self.linkdbpath = os.path.join(os.path.dirname(self.dbpath), 'link_commands.json')
|
self.linkdbpath = os.path.join(os.path.dirname(self.dbpath), 'link_commands.json')
|
||||||
try:
|
try:
|
||||||
@ -499,12 +520,12 @@ class CompilationDatabase:
|
|||||||
self.linkdb = {k['output']: k['arguments'] for k in link_database}
|
self.linkdb = {k['output']: k['arguments'] for k in link_database}
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, *a):
|
def __exit__(self, *a: object) -> None:
|
||||||
cdb = self.db
|
cdb = self.db
|
||||||
for key in set(cdb) - self.all_keys:
|
for key in set(cdb) - self.all_keys:
|
||||||
del cdb[key]
|
del cdb[key]
|
||||||
compilation_database = [
|
compilation_database = [
|
||||||
{'file': c.key.src, 'arguments': c.cmd, 'directory': base, 'output': c.key.dest} for c in self.compile_commands
|
{'file': c.key.src, 'arguments': c.cmd, 'directory': base, 'output': c.key.dest} for c in self.compile_commands if c.key is not None
|
||||||
]
|
]
|
||||||
with open(self.dbpath, 'w') as f:
|
with open(self.dbpath, 'w') as f:
|
||||||
json.dump(compilation_database, f, indent=2, sort_keys=True)
|
json.dump(compilation_database, f, indent=2, sort_keys=True)
|
||||||
@ -512,7 +533,14 @@ class CompilationDatabase:
|
|||||||
json.dump([{'output': c.key, 'arguments': c.cmd, 'directory': base} for c in self.link_commands], f, indent=2, sort_keys=True)
|
json.dump([{'output': c.key, 'arguments': c.cmd, 'directory': base} for c in self.link_commands], f, indent=2, sort_keys=True)
|
||||||
|
|
||||||
|
|
||||||
def compile_c_extension(kenv, module, compilation_database, sources, headers, desc_prefix=''):
|
def compile_c_extension(
|
||||||
|
kenv: Env,
|
||||||
|
module: str,
|
||||||
|
compilation_database: CompilationDatabase,
|
||||||
|
sources: List[str],
|
||||||
|
headers: List[str],
|
||||||
|
desc_prefix: str = ''
|
||||||
|
) -> None:
|
||||||
prefix = os.path.basename(module)
|
prefix = os.path.basename(module)
|
||||||
objects = [
|
objects = [
|
||||||
os.path.join(build_dir, prefix + '-' + os.path.basename(src) + '.o')
|
os.path.join(build_dir, prefix + '-' + os.path.basename(src) + '.o')
|
||||||
@ -547,13 +575,13 @@ def compile_c_extension(kenv, module, compilation_database, sources, headers, de
|
|||||||
linker_cflags = list(filter(lambda x: x not in unsafe, kenv.cflags))
|
linker_cflags = list(filter(lambda x: x not in unsafe, kenv.cflags))
|
||||||
cmd = [kenv.cc] + linker_cflags + kenv.ldflags + objects + kenv.ldpaths + ['-o', dest]
|
cmd = [kenv.cc] + linker_cflags + kenv.ldflags + objects + kenv.ldpaths + ['-o', dest]
|
||||||
|
|
||||||
def on_success():
|
def on_success() -> None:
|
||||||
os.rename(dest, real_dest)
|
os.rename(dest, real_dest)
|
||||||
|
|
||||||
compilation_database.add_command(desc, cmd, partial(newer, real_dest, *objects), on_success=on_success, key=module + '.so')
|
compilation_database.add_command(desc, cmd, partial(newer, real_dest, *objects), on_success=on_success, key=CompileKey('', module + '.so'))
|
||||||
|
|
||||||
|
|
||||||
def find_c_files():
|
def find_c_files() -> Tuple[List[str], List[str]]:
|
||||||
ans, headers = [], []
|
ans, headers = [], []
|
||||||
d = 'kitty'
|
d = 'kitty'
|
||||||
exclude = {'fontconfig.c', 'freetype.c', 'desktop.c'} if is_macos else {'core_text.m', 'cocoa_window.m', 'macos_process_info.c'}
|
exclude = {'fontconfig.c', 'freetype.c', 'desktop.c'} if is_macos else {'core_text.m', 'cocoa_window.m', 'macos_process_info.c'}
|
||||||
@ -564,10 +592,10 @@ def find_c_files():
|
|||||||
elif ext == '.h':
|
elif ext == '.h':
|
||||||
headers.append(os.path.join('kitty', x))
|
headers.append(os.path.join('kitty', x))
|
||||||
ans.append('kitty/parser_dump.c')
|
ans.append('kitty/parser_dump.c')
|
||||||
return tuple(ans), tuple(headers)
|
return ans, headers
|
||||||
|
|
||||||
|
|
||||||
def compile_glfw(compilation_database):
|
def compile_glfw(compilation_database: CompilationDatabase) -> None:
|
||||||
modules = 'cocoa' if is_macos else 'x11 wayland'
|
modules = 'cocoa' if is_macos else 'x11 wayland'
|
||||||
for module in modules.split():
|
for module in modules.split():
|
||||||
try:
|
try:
|
||||||
@ -592,7 +620,7 @@ def compile_glfw(compilation_database):
|
|||||||
sources, all_headers, desc_prefix='[{}] '.format(module))
|
sources, all_headers, desc_prefix='[{}] '.format(module))
|
||||||
|
|
||||||
|
|
||||||
def kittens_env():
|
def kittens_env() -> Env:
|
||||||
kenv = env.copy()
|
kenv = env.copy()
|
||||||
cflags = kenv.cflags
|
cflags = kenv.cflags
|
||||||
cflags.append('-pthread')
|
cflags.append('-pthread')
|
||||||
@ -602,10 +630,10 @@ def kittens_env():
|
|||||||
return kenv
|
return kenv
|
||||||
|
|
||||||
|
|
||||||
def compile_kittens(compilation_database):
|
def compile_kittens(compilation_database: CompilationDatabase) -> None:
|
||||||
kenv = kittens_env()
|
kenv = kittens_env()
|
||||||
|
|
||||||
def list_files(q):
|
def list_files(q: str) -> List[str]:
|
||||||
return sorted(glob.glob(q))
|
return sorted(glob.glob(q))
|
||||||
|
|
||||||
def files(
|
def files(
|
||||||
@ -632,21 +660,22 @@ def compile_kittens(compilation_database):
|
|||||||
kenv, dest, compilation_database, sources, all_headers + ['kitty/data-types.h'])
|
kenv, dest, compilation_database, sources, all_headers + ['kitty/data-types.h'])
|
||||||
|
|
||||||
|
|
||||||
def build(args, native_optimizations=True):
|
def build(args: Options, native_optimizations: bool = True) -> None:
|
||||||
global env
|
global env
|
||||||
env = init_env(args.debug, args.sanitize, native_optimizations, args.profile, args.extra_logging)
|
env = init_env(args.debug, args.sanitize, native_optimizations, args.profile, args.extra_logging)
|
||||||
|
sources, headers = find_c_files()
|
||||||
compile_c_extension(
|
compile_c_extension(
|
||||||
kitty_env(), 'kitty/fast_data_types', args.compilation_database, *find_c_files()
|
kitty_env(), 'kitty/fast_data_types', args.compilation_database, sources, headers
|
||||||
)
|
)
|
||||||
compile_glfw(args.compilation_database)
|
compile_glfw(args.compilation_database)
|
||||||
compile_kittens(args.compilation_database)
|
compile_kittens(args.compilation_database)
|
||||||
|
|
||||||
|
|
||||||
def safe_makedirs(path):
|
def safe_makedirs(path: str) -> None:
|
||||||
os.makedirs(path, exist_ok=True)
|
os.makedirs(path, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
def build_launcher(args, launcher_dir='.', bundle_type='source'):
|
def build_launcher(args: Options, launcher_dir: str = '.', bundle_type: str = 'source') -> None:
|
||||||
cflags = '-Wall -Werror -fpie'.split()
|
cflags = '-Wall -Werror -fpie'.split()
|
||||||
cppflags = []
|
cppflags = []
|
||||||
libs: List[str] = []
|
libs: List[str] = []
|
||||||
@ -696,7 +725,7 @@ def build_launcher(args, launcher_dir='.', bundle_type='source'):
|
|||||||
# Packaging {{{
|
# Packaging {{{
|
||||||
|
|
||||||
|
|
||||||
def copy_man_pages(ddir):
|
def copy_man_pages(ddir: str) -> None:
|
||||||
mandir = os.path.join(ddir, 'share', 'man')
|
mandir = os.path.join(ddir, 'share', 'man')
|
||||||
safe_makedirs(mandir)
|
safe_makedirs(mandir)
|
||||||
with suppress(FileNotFoundError):
|
with suppress(FileNotFoundError):
|
||||||
@ -711,7 +740,7 @@ make && make docs
|
|||||||
shutil.copytree(src, os.path.join(mandir, 'man1'))
|
shutil.copytree(src, os.path.join(mandir, 'man1'))
|
||||||
|
|
||||||
|
|
||||||
def copy_html_docs(ddir):
|
def copy_html_docs(ddir: str) -> None:
|
||||||
htmldir = os.path.join(ddir, 'share', 'doc', appname, 'html')
|
htmldir = os.path.join(ddir, 'share', 'doc', appname, 'html')
|
||||||
safe_makedirs(os.path.dirname(htmldir))
|
safe_makedirs(os.path.dirname(htmldir))
|
||||||
with suppress(FileNotFoundError):
|
with suppress(FileNotFoundError):
|
||||||
@ -726,7 +755,7 @@ make && make docs
|
|||||||
shutil.copytree(src, htmldir)
|
shutil.copytree(src, htmldir)
|
||||||
|
|
||||||
|
|
||||||
def compile_python(base_path):
|
def compile_python(base_path: str) -> None:
|
||||||
import compileall
|
import compileall
|
||||||
import py_compile
|
import py_compile
|
||||||
try:
|
try:
|
||||||
@ -738,18 +767,18 @@ def compile_python(base_path):
|
|||||||
if f.rpartition('.')[-1] in ('pyc', 'pyo'):
|
if f.rpartition('.')[-1] in ('pyc', 'pyo'):
|
||||||
os.remove(os.path.join(root, f))
|
os.remove(os.path.join(root, f))
|
||||||
|
|
||||||
def c(base_path: str, **kw) -> None:
|
def c(base_path: str, **kw: object) -> None:
|
||||||
try:
|
try:
|
||||||
kw['invalidation_mode'] = py_compile.PycInvalidationMode.UNCHECKED_HASH
|
kw['invalidation_mode'] = py_compile.PycInvalidationMode.UNCHECKED_HASH
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
compileall.compile_dir(base_path, **kw)
|
compileall.compile_dir(base_path, **kw) # type: ignore
|
||||||
|
|
||||||
for optimize in (0, 1, 2):
|
for optimize in (0, 1, 2):
|
||||||
c(base_path, ddir='', force=True, optimize=optimize, quiet=1, workers=num_workers)
|
c(base_path, ddir='', force=True, optimize=optimize, quiet=1, workers=num_workers)
|
||||||
|
|
||||||
|
|
||||||
def create_linux_bundle_gunk(ddir, libdir_name):
|
def create_linux_bundle_gunk(ddir: str, libdir_name: str) -> None:
|
||||||
if not os.path.exists('docs/_build/html'):
|
if not os.path.exists('docs/_build/html'):
|
||||||
run_tool(['make', 'docs'])
|
run_tool(['make', 'docs'])
|
||||||
copy_man_pages(ddir)
|
copy_man_pages(ddir)
|
||||||
@ -774,16 +803,17 @@ Icon=kitty
|
|||||||
Categories=System;TerminalEmulator;
|
Categories=System;TerminalEmulator;
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
ddir = Path(ddir)
|
|
||||||
in_src_launcher = ddir / (libdir_name + '/kitty/kitty/launcher/kitty')
|
base = Path(ddir)
|
||||||
launcher = ddir / 'bin/kitty'
|
in_src_launcher = base / (libdir_name + '/kitty/kitty/launcher/kitty')
|
||||||
|
launcher = base / 'bin/kitty'
|
||||||
if os.path.exists(in_src_launcher):
|
if os.path.exists(in_src_launcher):
|
||||||
os.remove(in_src_launcher)
|
os.remove(in_src_launcher)
|
||||||
os.makedirs(os.path.dirname(in_src_launcher), exist_ok=True)
|
os.makedirs(os.path.dirname(in_src_launcher), exist_ok=True)
|
||||||
os.symlink(os.path.relpath(launcher, os.path.dirname(in_src_launcher)), in_src_launcher)
|
os.symlink(os.path.relpath(launcher, os.path.dirname(in_src_launcher)), in_src_launcher)
|
||||||
|
|
||||||
|
|
||||||
def macos_info_plist():
|
def macos_info_plist() -> bytes:
|
||||||
import plistlib
|
import plistlib
|
||||||
VERSION = '.'.join(map(str, version))
|
VERSION = '.'.join(map(str, version))
|
||||||
pl = dict(
|
pl = dict(
|
||||||
@ -827,7 +857,7 @@ def macos_info_plist():
|
|||||||
return plistlib.dumps(pl)
|
return plistlib.dumps(pl)
|
||||||
|
|
||||||
|
|
||||||
def create_macos_app_icon(where='Resources'):
|
def create_macos_app_icon(where: str = 'Resources') -> None:
|
||||||
logo_dir = os.path.abspath(os.path.join('logo', appname + '.iconset'))
|
logo_dir = os.path.abspath(os.path.join('logo', appname + '.iconset'))
|
||||||
subprocess.check_call([
|
subprocess.check_call([
|
||||||
'iconutil', '-c', 'icns', logo_dir, '-o',
|
'iconutil', '-c', 'icns', logo_dir, '-o',
|
||||||
@ -835,7 +865,7 @@ def create_macos_app_icon(where='Resources'):
|
|||||||
])
|
])
|
||||||
|
|
||||||
|
|
||||||
def create_minimal_macos_bundle(args, where):
|
def create_minimal_macos_bundle(args: Options, where: str) -> None:
|
||||||
if os.path.exists(where):
|
if os.path.exists(where):
|
||||||
shutil.rmtree(where)
|
shutil.rmtree(where)
|
||||||
bin_dir = os.path.join(where, 'kitty.app/Contents/MacOS')
|
bin_dir = os.path.join(where, 'kitty.app/Contents/MacOS')
|
||||||
@ -851,8 +881,8 @@ def create_minimal_macos_bundle(args, where):
|
|||||||
create_macos_app_icon(resources_dir)
|
create_macos_app_icon(resources_dir)
|
||||||
|
|
||||||
|
|
||||||
def create_macos_bundle_gunk(ddir):
|
def create_macos_bundle_gunk(dest: str) -> None:
|
||||||
ddir = Path(ddir)
|
ddir = Path(dest)
|
||||||
os.mkdir(ddir / 'Contents')
|
os.mkdir(ddir / 'Contents')
|
||||||
with open(ddir / 'Contents/Info.plist', 'wb') as fp:
|
with open(ddir / 'Contents/Info.plist', 'wb') as fp:
|
||||||
fp.write(macos_info_plist())
|
fp.write(macos_info_plist())
|
||||||
@ -869,7 +899,7 @@ def create_macos_bundle_gunk(ddir):
|
|||||||
create_macos_app_icon(os.path.join(ddir, 'Contents', 'Resources'))
|
create_macos_app_icon(os.path.join(ddir, 'Contents', 'Resources'))
|
||||||
|
|
||||||
|
|
||||||
def package(args, bundle_type):
|
def package(args: Options, bundle_type: str) -> None:
|
||||||
ddir = args.prefix
|
ddir = args.prefix
|
||||||
if bundle_type == 'linux-freeze':
|
if bundle_type == 'linux-freeze':
|
||||||
args.libdir_name = 'lib'
|
args.libdir_name = 'lib'
|
||||||
@ -891,7 +921,7 @@ def package(args, bundle_type):
|
|||||||
shutil.copy2('logo/beam-cursor.png', os.path.join(libdir, 'logo'))
|
shutil.copy2('logo/beam-cursor.png', os.path.join(libdir, 'logo'))
|
||||||
shutil.copy2('logo/beam-cursor@2x.png', os.path.join(libdir, 'logo'))
|
shutil.copy2('logo/beam-cursor@2x.png', os.path.join(libdir, 'logo'))
|
||||||
|
|
||||||
def src_ignore(parent, entries):
|
def src_ignore(parent: str, entries: Iterable[str]) -> List[str]:
|
||||||
return [
|
return [
|
||||||
x for x in entries
|
x for x in entries
|
||||||
if '.' in x and x.rpartition('.')[2] not in
|
if '.' in x and x.rpartition('.')[2] not in
|
||||||
@ -920,9 +950,9 @@ def package(args, bundle_type):
|
|||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
|
|
||||||
def clean():
|
def clean() -> None:
|
||||||
|
|
||||||
def safe_remove(*entries):
|
def safe_remove(*entries: str) -> None:
|
||||||
for x in entries:
|
for x in entries:
|
||||||
if os.path.exists(x):
|
if os.path.exists(x):
|
||||||
if os.path.isdir(x):
|
if os.path.isdir(x):
|
||||||
@ -949,66 +979,66 @@ def clean():
|
|||||||
os.unlink(x)
|
os.unlink(x)
|
||||||
|
|
||||||
|
|
||||||
def option_parser(): # {{{
|
def option_parser() -> argparse.ArgumentParser: # {{{
|
||||||
p = argparse.ArgumentParser()
|
p = argparse.ArgumentParser()
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'action',
|
'action',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
default='build',
|
default=Options.action,
|
||||||
choices='build test linux-package kitty.app linux-freeze macos-freeze clean'.split(),
|
choices='build test linux-package kitty.app linux-freeze macos-freeze clean'.split(),
|
||||||
help='Action to perform (default is build)'
|
help='Action to perform (default is build)'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--debug',
|
'--debug',
|
||||||
default=False,
|
default=Options.debug,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Build extension modules with debugging symbols'
|
help='Build extension modules with debugging symbols'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'-v', '--verbose',
|
'-v', '--verbose',
|
||||||
default=0,
|
default=Options.verbose,
|
||||||
action='count',
|
action='count',
|
||||||
help='Be verbose'
|
help='Be verbose'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--sanitize',
|
'--sanitize',
|
||||||
default=False,
|
default=Options.sanitize,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Turn on sanitization to detect memory access errors and undefined behavior. This is a big performance hit.'
|
help='Turn on sanitization to detect memory access errors and undefined behavior. This is a big performance hit.'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--prefix',
|
'--prefix',
|
||||||
default='./linux-package',
|
default=Options.prefix,
|
||||||
help='Where to create the linux package'
|
help='Where to create the linux package'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--full',
|
'--full',
|
||||||
dest='incremental',
|
dest='incremental',
|
||||||
default=True,
|
default=Options.incremental,
|
||||||
action='store_false',
|
action='store_false',
|
||||||
help='Do a full build, even for unchanged files'
|
help='Do a full build, even for unchanged files'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--profile',
|
'--profile',
|
||||||
default=False,
|
default=Options.profile,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Use the -pg compile flag to add profiling information'
|
help='Use the -pg compile flag to add profiling information'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--for-freeze',
|
'--for-freeze',
|
||||||
default=False,
|
default=Options.for_freeze,
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='Internal use'
|
help='Internal use'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--libdir-name',
|
'--libdir-name',
|
||||||
default='lib',
|
default=Options.libdir_name,
|
||||||
help='The name of the directory inside --prefix in which to store compiled files. Defaults to "lib"'
|
help='The name of the directory inside --prefix in which to store compiled files. Defaults to "lib"'
|
||||||
)
|
)
|
||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--extra-logging',
|
'--extra-logging',
|
||||||
action='append',
|
action='append',
|
||||||
default=[],
|
default=Options.extra_logging,
|
||||||
choices=('event-loop',),
|
choices=('event-loop',),
|
||||||
help='Turn on extra logging for debugging in this build. Can be specified multiple times, to turn'
|
help='Turn on extra logging for debugging in this build. Can be specified multiple times, to turn'
|
||||||
' on different types of logging.'
|
' on different types of logging.'
|
||||||
@ -1016,7 +1046,7 @@ def option_parser(): # {{{
|
|||||||
p.add_argument(
|
p.add_argument(
|
||||||
'--update-check-interval',
|
'--update-check-interval',
|
||||||
type=float,
|
type=float,
|
||||||
default=24,
|
default=Options.update_check_interval,
|
||||||
help='When building a package, the default value for the update_check_interval setting will'
|
help='When building a package, the default value for the update_check_interval setting will'
|
||||||
' be set to this number. Use zero to disable update checking.'
|
' be set to this number. Use zero to disable update checking.'
|
||||||
)
|
)
|
||||||
@ -1024,11 +1054,11 @@ def option_parser(): # {{{
|
|||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main() -> None:
|
||||||
global verbose
|
global verbose
|
||||||
if sys.version_info < (3, 5):
|
if sys.version_info < (3, 5):
|
||||||
raise SystemExit('python >= 3.5 required')
|
raise SystemExit('python >= 3.5 required')
|
||||||
args = option_parser().parse_args()
|
args = option_parser().parse_args(namespace=Options())
|
||||||
verbose = args.verbose > 0
|
verbose = args.verbose > 0
|
||||||
args.prefix = os.path.abspath(args.prefix)
|
args.prefix = os.path.abspath(args.prefix)
|
||||||
os.chdir(base)
|
os.chdir(base)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user