Fix develop (#12039)
Fixes file encoding errors on Windows, and layouts not correctly merging into info.json. * force utf8 encoding * correctly merge layouts and layout aliases * show what aliases point to
This commit is contained in:
parent
23ed6c4ec0
commit
1581ea48dc
10 changed files with 66 additions and 32 deletions
|
@ -85,8 +85,16 @@
|
|||
"layout_aliases": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "string",
|
||||
"pattern": "^LAYOUT_[0-9a-z_]*$"
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"enum": ["LAYOUT"]
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "^LAYOUT_[0-9a-z_]*$"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"layouts": {
|
||||
|
|
|
@ -46,7 +46,7 @@ def find_layouts(file):
|
|||
parsed_layouts = {}
|
||||
|
||||
# Search the file for LAYOUT macros and aliases
|
||||
file_contents = file.read_text()
|
||||
file_contents = file.read_text(encoding='utf-8')
|
||||
file_contents = comment_remover(file_contents)
|
||||
file_contents = file_contents.replace('\\\n', '')
|
||||
|
||||
|
@ -87,12 +87,7 @@ def find_layouts(file):
|
|||
except ValueError:
|
||||
continue
|
||||
|
||||
# Populate our aliases
|
||||
for alias, text in aliases.items():
|
||||
if text in parsed_layouts and 'KEYMAP' not in alias:
|
||||
parsed_layouts[alias] = parsed_layouts[text]
|
||||
|
||||
return parsed_layouts
|
||||
return parsed_layouts, aliases
|
||||
|
||||
|
||||
def parse_config_h_file(config_h_file, config_h=None):
|
||||
|
@ -104,7 +99,7 @@ def parse_config_h_file(config_h_file, config_h=None):
|
|||
config_h_file = Path(config_h_file)
|
||||
|
||||
if config_h_file.exists():
|
||||
config_h_text = config_h_file.read_text()
|
||||
config_h_text = config_h_file.read_text(encoding='utf-8')
|
||||
config_h_text = config_h_text.replace('\\\n', '')
|
||||
config_h_text = strip_multiline_comment(config_h_text)
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ file_header = """\
|
|||
|
||||
|
||||
def collect_defines(filepath):
|
||||
with open(filepath, 'r') as f:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
define_search = re.compile(r'(?m)^#\s*define\s+(?:.*\\\r?\n)*.*$', re.MULTILINE)
|
||||
value_search = re.compile(r'^#\s*define\s+(?P<name>[a-zA-Z0-9_]+(\([^\)]*\))?)\s*(?P<value>.*)', re.DOTALL)
|
||||
|
@ -146,17 +146,17 @@ def chibios_confmigrate(cli):
|
|||
if cli.args.input.name == "chconf.h" and ("CHCONF_H" in input_defs["dict"] or "_CHCONF_H_" in input_defs["dict"] or cli.args.force):
|
||||
migrate_chconf_h(to_override, outfile=sys.stdout)
|
||||
if cli.args.overwrite:
|
||||
with open(cli.args.input, "w") as out_file:
|
||||
with open(cli.args.input, "w", encoding='utf-8') as out_file:
|
||||
migrate_chconf_h(to_override, outfile=out_file)
|
||||
|
||||
elif cli.args.input.name == "halconf.h" and ("HALCONF_H" in input_defs["dict"] or "_HALCONF_H_" in input_defs["dict"] or cli.args.force):
|
||||
migrate_halconf_h(to_override, outfile=sys.stdout)
|
||||
if cli.args.overwrite:
|
||||
with open(cli.args.input, "w") as out_file:
|
||||
with open(cli.args.input, "w", encoding='utf-8') as out_file:
|
||||
migrate_halconf_h(to_override, outfile=out_file)
|
||||
|
||||
elif cli.args.input.name == "mcuconf.h" and ("MCUCONF_H" in input_defs["dict"] or "_MCUCONF_H_" in input_defs["dict"] or cli.args.force):
|
||||
migrate_mcuconf_h(to_override, outfile=sys.stdout)
|
||||
if cli.args.overwrite:
|
||||
with open(cli.args.input, "w") as out_file:
|
||||
with open(cli.args.input, "w", encoding='utf-8') as out_file:
|
||||
migrate_mcuconf_h(to_override, outfile=out_file)
|
||||
|
|
|
@ -82,6 +82,10 @@ def generate_layouts(cli):
|
|||
layouts_h_lines.append(rows)
|
||||
layouts_h_lines.append('}')
|
||||
|
||||
for alias, target in kb_info_json.get('layout_aliases', {}).items():
|
||||
layouts_h_lines.append('')
|
||||
layouts_h_lines.append('#define %s %s' % (alias, target))
|
||||
|
||||
# Show the results
|
||||
layouts_h = '\n'.join(layouts_h_lines) + '\n'
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ def show_keymap(kb_info_json, title_caps=True):
|
|||
else:
|
||||
cli.echo('{fg_blue}keymap_%s{fg_reset}:', cli.config.info.keymap)
|
||||
|
||||
keymap_data = json.load(keymap_path.open())
|
||||
keymap_data = json.load(keymap_path.open(encoding='utf-8'))
|
||||
layout_name = keymap_data['layout']
|
||||
|
||||
for layer_num, layer in enumerate(keymap_data['layers']):
|
||||
|
@ -57,7 +57,7 @@ def show_matrix(kb_info_json, title_caps=True):
|
|||
# Build our label list
|
||||
labels = []
|
||||
for key in layout['layout']:
|
||||
if key['matrix']:
|
||||
if 'matrix' in key:
|
||||
row = ROW_LETTERS[key['matrix'][0]]
|
||||
col = COL_LETTERS[key['matrix'][1]]
|
||||
|
||||
|
@ -91,6 +91,9 @@ def print_friendly_output(kb_info_json):
|
|||
cli.echo('{fg_blue}Size{fg_reset}: %s x %s' % (kb_info_json['width'], kb_info_json['height']))
|
||||
cli.echo('{fg_blue}Processor{fg_reset}: %s', kb_info_json.get('processor', 'Unknown'))
|
||||
cli.echo('{fg_blue}Bootloader{fg_reset}: %s', kb_info_json.get('bootloader', 'Unknown'))
|
||||
if 'layout_aliases' in kb_info_json:
|
||||
aliases = [f'{key}={value}' for key, value in kb_info_json['layout_aliases'].items()]
|
||||
cli.echo('{fg_blue}Layout aliases:{fg_reset} %s' % (', '.join(aliases),))
|
||||
|
||||
if cli.config.info.layouts:
|
||||
show_layouts(kb_info_json, True)
|
||||
|
|
|
@ -27,7 +27,7 @@ def kle2json(cli):
|
|||
cli.log.error('File {fg_cyan}%s{style_reset_all} was not found.', file_path)
|
||||
return False
|
||||
out_path = file_path.parent
|
||||
raw_code = file_path.open().read()
|
||||
raw_code = file_path.read_text(encoding='utf-8')
|
||||
# Check if info.json exists, allow overwrite with force
|
||||
if Path(out_path, "info.json").exists() and not cli.args.force:
|
||||
cli.log.error('File {fg_cyan}%s/info.json{style_reset_all} already exists, use -f or --force to overwrite.', out_path)
|
||||
|
|
|
@ -45,7 +45,12 @@ def info_json(keyboard):
|
|||
info_data['keymaps'][keymap.name] = {'url': f'https://raw.githubusercontent.com/qmk/qmk_firmware/master/{keymap}/keymap.json'}
|
||||
|
||||
# Populate layout data
|
||||
for layout_name, layout_json in _find_all_layouts(info_data, keyboard).items():
|
||||
layouts, aliases = _find_all_layouts(info_data, keyboard)
|
||||
|
||||
if aliases:
|
||||
info_data['layout_aliases'] = aliases
|
||||
|
||||
for layout_name, layout_json in layouts.items():
|
||||
if not layout_name.startswith('LAYOUT_kc'):
|
||||
layout_json['c_macro'] = True
|
||||
info_data['layouts'][layout_name] = layout_json
|
||||
|
@ -92,7 +97,7 @@ def _json_load(json_file):
|
|||
Note: file must be a Path object.
|
||||
"""
|
||||
try:
|
||||
return hjson.load(json_file.open())
|
||||
return hjson.load(json_file.open(encoding='utf-8'))
|
||||
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
cli.log.error('Invalid JSON encountered attempting to load {fg_cyan}%s{fg_reset}:\n\t{fg_red}%s', json_file, e)
|
||||
|
@ -415,21 +420,28 @@ def _merge_layouts(info_data, new_info_data):
|
|||
|
||||
def _search_keyboard_h(path):
|
||||
current_path = Path('keyboards/')
|
||||
aliases = {}
|
||||
layouts = {}
|
||||
|
||||
for directory in path.parts:
|
||||
current_path = current_path / directory
|
||||
keyboard_h = '%s.h' % (directory,)
|
||||
keyboard_h_path = current_path / keyboard_h
|
||||
if keyboard_h_path.exists():
|
||||
layouts.update(find_layouts(keyboard_h_path))
|
||||
new_layouts, new_aliases = find_layouts(keyboard_h_path)
|
||||
layouts.update(new_layouts)
|
||||
|
||||
return layouts
|
||||
for alias, alias_text in new_aliases.items():
|
||||
if alias_text in layouts:
|
||||
aliases[alias] = alias_text
|
||||
|
||||
return layouts, aliases
|
||||
|
||||
|
||||
def _find_all_layouts(info_data, keyboard):
|
||||
"""Looks for layout macros associated with this keyboard.
|
||||
"""
|
||||
layouts = _search_keyboard_h(Path(keyboard))
|
||||
layouts, aliases = _search_keyboard_h(Path(keyboard))
|
||||
|
||||
if not layouts:
|
||||
# If we don't find any layouts from info.json or keyboard.h we widen our search. This is error prone which is why we want to encourage people to follow the standard above.
|
||||
|
@ -437,11 +449,15 @@ def _find_all_layouts(info_data, keyboard):
|
|||
|
||||
for file in glob('keyboards/%s/*.h' % keyboard):
|
||||
if file.endswith('.h'):
|
||||
these_layouts = find_layouts(file)
|
||||
these_layouts, these_aliases = find_layouts(file)
|
||||
|
||||
if these_layouts:
|
||||
layouts.update(these_layouts)
|
||||
|
||||
return layouts
|
||||
if these_aliases:
|
||||
aliases.update(these_aliases)
|
||||
|
||||
return layouts, aliases
|
||||
|
||||
|
||||
def _log_error(info_data, message):
|
||||
|
@ -540,11 +556,19 @@ def merge_info_jsons(keyboard, info_data):
|
|||
cli.log.error('\t%s: %s', json_path, e.message)
|
||||
continue
|
||||
|
||||
# Mark the layouts as coming from json
|
||||
for layout in new_info_data.get('layouts', {}).values():
|
||||
layout['c_macro'] = False
|
||||
# Merge layout data in
|
||||
for layout_name, layout in new_info_data.get('layouts', {}).items():
|
||||
if layout_name in info_data['layouts']:
|
||||
for new_key, existing_key in zip(layout['layout'], info_data['layouts'][layout_name]['layout']):
|
||||
existing_key.update(new_key)
|
||||
else:
|
||||
layout['c_macro'] = False
|
||||
info_data['layouts'][layout_name] = layout
|
||||
|
||||
# Update info_data with the new data
|
||||
if 'layouts' in new_info_data:
|
||||
del (new_info_data['layouts'])
|
||||
|
||||
deep_update(info_data, new_info_data)
|
||||
|
||||
return info_data
|
||||
|
|
|
@ -42,7 +42,7 @@ def template_json(keyboard):
|
|||
template_file = Path('keyboards/%s/templates/keymap.json' % keyboard)
|
||||
template = {'keyboard': keyboard}
|
||||
if template_file.exists():
|
||||
template.update(json.loads(template_file.read_text()))
|
||||
template.update(json.load(template_file.open(encoding='utf-8')))
|
||||
|
||||
return template
|
||||
|
||||
|
@ -58,7 +58,7 @@ def template_c(keyboard):
|
|||
"""
|
||||
template_file = Path('keyboards/%s/templates/keymap.c' % keyboard)
|
||||
if template_file.exists():
|
||||
template = template_file.read_text()
|
||||
template = template_file.read_text(encoding='utf-8')
|
||||
else:
|
||||
template = DEFAULT_KEYMAP_C
|
||||
|
||||
|
@ -469,7 +469,7 @@ def parse_keymap_c(keymap_file, use_cpp=True):
|
|||
if use_cpp:
|
||||
keymap_file = _c_preprocess(keymap_file)
|
||||
else:
|
||||
keymap_file = keymap_file.read_text()
|
||||
keymap_file = keymap_file.read_text(encoding='utf-8')
|
||||
|
||||
keymap = dict()
|
||||
keymap['layers'] = _get_layers(keymap_file)
|
||||
|
|
|
@ -95,7 +95,7 @@ def check_udev_rules():
|
|||
|
||||
# Collect all rules from the config files
|
||||
for rule_file in udev_rules:
|
||||
for line in rule_file.read_text().split('\n'):
|
||||
for line in rule_file.read_text(encoding='utf-8').split('\n'):
|
||||
line = line.strip()
|
||||
if not line.startswith("#") and len(line):
|
||||
current_rules.add(line)
|
||||
|
|
|
@ -16,7 +16,7 @@ def check_subcommand(command, *args):
|
|||
def check_subcommand_stdin(file_to_read, command, *args):
|
||||
"""Pipe content of a file to a command and return output.
|
||||
"""
|
||||
with open(file_to_read) as my_file:
|
||||
with open(file_to_read, encoding='utf-8') as my_file:
|
||||
cmd = ['bin/qmk', command, *args]
|
||||
result = run(cmd, stdin=my_file, stdout=PIPE, stderr=STDOUT, universal_newlines=True)
|
||||
return result
|
||||
|
|
Loading…
Reference in a new issue