meson: Move all code generation scripts to utils/codegen/
We have multiple code generation scripts in utils/, mixed with other miscellaneous utilities, as well as a larger code base based on mojom in utils/ipc/. To make code sharing easier between the generator scripts, without creating a mess in the utils/ directory, move all the code generation code to utils/codegen/. Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Daniel Scally <dan.scally@ideasonboard.com> Reviewed-by: Paul Elder <paul.elder@ideasonboard.com>
This commit is contained in:
parent
d3bf27180e
commit
50c92cc7e2
91 changed files with 15 additions and 15 deletions
389
utils/codegen/gen-controls.py
Executable file
389
utils/codegen/gen-controls.py
Executable file
|
@ -0,0 +1,389 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright (C) 2019, Google Inc.
|
||||
#
|
||||
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
|
||||
#
|
||||
# Generate control definitions from YAML
|
||||
|
||||
import argparse
|
||||
from functools import reduce
|
||||
import operator
|
||||
import string
|
||||
import sys
|
||||
import yaml
|
||||
import os
|
||||
|
||||
|
||||
class ControlEnum(object):
|
||||
def __init__(self, data):
|
||||
self.__data = data
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""The enum description"""
|
||||
return self.__data.get('description')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The enum name"""
|
||||
return self.__data.get('name')
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
"""The enum value"""
|
||||
return self.__data.get('value')
|
||||
|
||||
|
||||
class Control(object):
|
||||
def __init__(self, name, data, vendor):
|
||||
self.__name = name
|
||||
self.__data = data
|
||||
self.__enum_values = None
|
||||
self.__size = None
|
||||
self.__vendor = vendor
|
||||
|
||||
enum_values = data.get('enum')
|
||||
if enum_values is not None:
|
||||
self.__enum_values = [ControlEnum(enum) for enum in enum_values]
|
||||
|
||||
size = self.__data.get('size')
|
||||
if size is not None:
|
||||
if len(size) == 0:
|
||||
raise RuntimeError(f'Control `{self.__name}` size must have at least one dimension')
|
||||
|
||||
# Compute the total number of elements in the array. If any of the
|
||||
# array dimension is a string, the array is variable-sized.
|
||||
num_elems = 1
|
||||
for dim in size:
|
||||
if type(dim) is str:
|
||||
num_elems = 0
|
||||
break
|
||||
|
||||
dim = int(dim)
|
||||
if dim <= 0:
|
||||
raise RuntimeError(f'Control `{self.__name}` size must have positive values only')
|
||||
|
||||
num_elems *= dim
|
||||
|
||||
self.__size = num_elems
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""The control description"""
|
||||
return self.__data.get('description')
|
||||
|
||||
@property
|
||||
def enum_values(self):
|
||||
"""The enum values, if the control is an enumeration"""
|
||||
if self.__enum_values is None:
|
||||
return
|
||||
for enum in self.__enum_values:
|
||||
yield enum
|
||||
|
||||
@property
|
||||
def is_enum(self):
|
||||
"""Is the control an enumeration"""
|
||||
return self.__enum_values is not None
|
||||
|
||||
@property
|
||||
def vendor(self):
|
||||
"""The vendor string, or None"""
|
||||
return self.__vendor
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""The control name (CamelCase)"""
|
||||
return self.__name
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
typ = self.__data.get('type')
|
||||
size = self.__data.get('size')
|
||||
|
||||
if typ == 'string':
|
||||
return 'std::string'
|
||||
|
||||
if self.__size is None:
|
||||
return typ
|
||||
|
||||
if self.__size:
|
||||
return f"Span<const {typ}, {self.__size}>"
|
||||
else:
|
||||
return f"Span<const {typ}>"
|
||||
|
||||
|
||||
def snake_case(s):
|
||||
return ''.join([c.isupper() and ('_' + c) or c for c in s]).strip('_')
|
||||
|
||||
|
||||
def format_description(description):
|
||||
description = description.strip('\n').split('\n')
|
||||
description[0] = '\\brief ' + description[0]
|
||||
return '\n'.join([(line and ' * ' or ' *') + line for line in description])
|
||||
|
||||
|
||||
def generate_cpp(controls):
|
||||
enum_doc_start_template = string.Template('''/**
|
||||
* \\enum ${name}Enum
|
||||
* \\brief Supported ${name} values''')
|
||||
enum_doc_value_template = string.Template(''' * \\var ${value}
|
||||
${description}''')
|
||||
doc_template = string.Template('''/**
|
||||
* \\var ${name}
|
||||
${description}
|
||||
*/''')
|
||||
def_template = string.Template('extern const Control<${type}> ${name}(${id_name}, "${name}");')
|
||||
enum_values_doc = string.Template('''/**
|
||||
* \\var ${name}Values
|
||||
* \\brief List of all $name supported values
|
||||
*/''')
|
||||
enum_values_start = string.Template('''extern const std::array<const ControlValue, ${size}> ${name}Values = {''')
|
||||
enum_values_values = string.Template('''\tstatic_cast<int32_t>(${name}),''')
|
||||
name_value_map_doc = string.Template('''/**
|
||||
* \\var ${name}NameValueMap
|
||||
* \\brief Map of all $name supported value names (in std::string format) to value
|
||||
*/''')
|
||||
name_value_map_start = string.Template('''extern const std::map<std::string, ${type}> ${name}NameValueMap = {''')
|
||||
name_value_values = string.Template('''\t{ "${name}", ${name} },''')
|
||||
|
||||
ctrls_doc = {}
|
||||
ctrls_def = {}
|
||||
ctrls_map = []
|
||||
|
||||
for ctrl in controls:
|
||||
id_name = snake_case(ctrl.name).upper()
|
||||
|
||||
vendor = ctrl.vendor
|
||||
if vendor not in ctrls_doc:
|
||||
ctrls_doc[vendor] = []
|
||||
ctrls_def[vendor] = []
|
||||
|
||||
info = {
|
||||
'name': ctrl.name,
|
||||
'type': ctrl.type,
|
||||
'description': format_description(ctrl.description),
|
||||
'id_name': id_name,
|
||||
}
|
||||
|
||||
target_doc = ctrls_doc[vendor]
|
||||
target_def = ctrls_def[vendor]
|
||||
|
||||
if ctrl.is_enum:
|
||||
enum_doc = []
|
||||
enum_doc.append(enum_doc_start_template.substitute(info))
|
||||
|
||||
num_entries = 0
|
||||
for enum in ctrl.enum_values:
|
||||
value_info = {
|
||||
'name': ctrl.name,
|
||||
'value': enum.name,
|
||||
'description': format_description(enum.description),
|
||||
}
|
||||
enum_doc.append(enum_doc_value_template.substitute(value_info))
|
||||
num_entries += 1
|
||||
|
||||
enum_doc = '\n *\n'.join(enum_doc)
|
||||
enum_doc += '\n */'
|
||||
target_doc.append(enum_doc)
|
||||
|
||||
values_info = {
|
||||
'name': info['name'],
|
||||
'type': ctrl.type,
|
||||
'size': num_entries,
|
||||
}
|
||||
target_doc.append(enum_values_doc.substitute(values_info))
|
||||
target_def.append(enum_values_start.substitute(values_info))
|
||||
for enum in ctrl.enum_values:
|
||||
value_info = {
|
||||
'name': enum.name
|
||||
}
|
||||
target_def.append(enum_values_values.substitute(value_info))
|
||||
target_def.append("};")
|
||||
|
||||
target_doc.append(name_value_map_doc.substitute(values_info))
|
||||
target_def.append(name_value_map_start.substitute(values_info))
|
||||
for enum in ctrl.enum_values:
|
||||
value_info = {
|
||||
'name': enum.name
|
||||
}
|
||||
target_def.append(name_value_values.substitute(value_info))
|
||||
target_def.append("};")
|
||||
|
||||
target_doc.append(doc_template.substitute(info))
|
||||
target_def.append(def_template.substitute(info))
|
||||
|
||||
vendor_ns = vendor + '::' if vendor != "libcamera" else ''
|
||||
ctrls_map.append('\t{ ' + vendor_ns + id_name + ', &' + vendor_ns + ctrl.name + ' },')
|
||||
|
||||
vendor_ctrl_doc_sub = []
|
||||
vendor_ctrl_template = string.Template('''
|
||||
/**
|
||||
* \\brief Namespace for ${vendor} controls
|
||||
*/
|
||||
namespace ${vendor} {
|
||||
|
||||
${vendor_controls_str}
|
||||
|
||||
} /* namespace ${vendor} */''')
|
||||
|
||||
for vendor in [v for v in ctrls_doc.keys() if v not in ['libcamera']]:
|
||||
vendor_ctrl_doc_sub.append(vendor_ctrl_template.substitute({'vendor': vendor, 'vendor_controls_str': '\n\n'.join(ctrls_doc[vendor])}))
|
||||
|
||||
vendor_ctrl_def_sub = []
|
||||
for vendor in [v for v in ctrls_def.keys() if v not in ['libcamera']]:
|
||||
vendor_ctrl_def_sub.append(vendor_ctrl_template.substitute({'vendor': vendor, 'vendor_controls_str': '\n'.join(ctrls_def[vendor])}))
|
||||
|
||||
return {
|
||||
'controls_doc': '\n\n'.join(ctrls_doc['libcamera']),
|
||||
'controls_def': '\n'.join(ctrls_def['libcamera']),
|
||||
'controls_map': '\n'.join(ctrls_map),
|
||||
'vendor_controls_doc': '\n'.join(vendor_ctrl_doc_sub),
|
||||
'vendor_controls_def': '\n'.join(vendor_ctrl_def_sub),
|
||||
}
|
||||
|
||||
|
||||
def generate_h(controls, mode, ranges):
|
||||
enum_template_start = string.Template('''enum ${name}Enum {''')
|
||||
enum_value_template = string.Template('''\t${name} = ${value},''')
|
||||
enum_values_template = string.Template('''extern const std::array<const ControlValue, ${size}> ${name}Values;''')
|
||||
name_value_map_template = string.Template('''extern const std::map<std::string, ${type}> ${name}NameValueMap;''')
|
||||
template = string.Template('''extern const Control<${type}> ${name};''')
|
||||
|
||||
ctrls = {}
|
||||
ids = {}
|
||||
id_value = {}
|
||||
|
||||
for ctrl in controls:
|
||||
id_name = snake_case(ctrl.name).upper()
|
||||
|
||||
vendor = ctrl.vendor
|
||||
if vendor not in ctrls:
|
||||
if vendor not in ranges.keys():
|
||||
raise RuntimeError(f'Control id range is not defined for vendor {vendor}')
|
||||
id_value[vendor] = ranges[vendor] + 1
|
||||
ids[vendor] = []
|
||||
ctrls[vendor] = []
|
||||
|
||||
target_ids = ids[vendor]
|
||||
target_ids.append('\t' + id_name + ' = ' + str(id_value[vendor]) + ',')
|
||||
|
||||
info = {
|
||||
'name': ctrl.name,
|
||||
'type': ctrl.type,
|
||||
}
|
||||
|
||||
target_ctrls = ctrls[vendor]
|
||||
|
||||
if ctrl.is_enum:
|
||||
target_ctrls.append(enum_template_start.substitute(info))
|
||||
|
||||
num_entries = 0
|
||||
for enum in ctrl.enum_values:
|
||||
value_info = {
|
||||
'name': enum.name,
|
||||
'value': enum.value,
|
||||
}
|
||||
target_ctrls.append(enum_value_template.substitute(value_info))
|
||||
num_entries += 1
|
||||
target_ctrls.append("};")
|
||||
|
||||
values_info = {
|
||||
'name': info['name'],
|
||||
'type': ctrl.type,
|
||||
'size': num_entries,
|
||||
}
|
||||
target_ctrls.append(enum_values_template.substitute(values_info))
|
||||
target_ctrls.append(name_value_map_template.substitute(values_info))
|
||||
|
||||
target_ctrls.append(template.substitute(info))
|
||||
id_value[vendor] += 1
|
||||
|
||||
vendor_template = string.Template('''
|
||||
namespace ${vendor} {
|
||||
|
||||
#define LIBCAMERA_HAS_${vendor_def}_VENDOR_${mode}
|
||||
|
||||
enum {
|
||||
${vendor_enums}
|
||||
};
|
||||
|
||||
${vendor_controls}
|
||||
|
||||
} /* namespace ${vendor} */
|
||||
''')
|
||||
|
||||
vendor_sub = []
|
||||
for vendor in [v for v in ctrls.keys() if v != 'libcamera']:
|
||||
vendor_sub.append(vendor_template.substitute({'mode': mode.upper(),
|
||||
'vendor': vendor,
|
||||
'vendor_def': vendor.upper(),
|
||||
'vendor_enums': '\n'.join(ids[vendor]),
|
||||
'vendor_controls': '\n'.join(ctrls[vendor])}))
|
||||
|
||||
return {
|
||||
'ids': '\n'.join(ids['libcamera']),
|
||||
'controls': '\n'.join(ctrls['libcamera']),
|
||||
'vendor_controls': '\n'.join(vendor_sub)
|
||||
}
|
||||
|
||||
|
||||
def fill_template(template, data):
|
||||
|
||||
template = open(template, 'rb').read()
|
||||
template = template.decode('utf-8')
|
||||
template = string.Template(template)
|
||||
return template.substitute(data)
|
||||
|
||||
|
||||
def main(argv):
|
||||
|
||||
# Parse command line arguments
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--mode', '-m', type=str, required=True, choices=['controls', 'properties'],
|
||||
help='Mode of operation')
|
||||
parser.add_argument('--output', '-o', metavar='file', type=str,
|
||||
help='Output file name. Defaults to standard output if not specified.')
|
||||
parser.add_argument('--ranges', '-r', type=str, required=True,
|
||||
help='Control id range reservation file.')
|
||||
parser.add_argument('--template', '-t', dest='template', type=str, required=True,
|
||||
help='Template file name.')
|
||||
parser.add_argument('input', type=str, nargs='+',
|
||||
help='Input file name.')
|
||||
|
||||
args = parser.parse_args(argv[1:])
|
||||
|
||||
ranges = {}
|
||||
with open(args.ranges, 'rb') as f:
|
||||
data = open(args.ranges, 'rb').read()
|
||||
ranges = yaml.safe_load(data)['ranges']
|
||||
|
||||
controls = []
|
||||
for input in args.input:
|
||||
with open(input, 'rb') as f:
|
||||
data = f.read()
|
||||
vendor = yaml.safe_load(data)['vendor']
|
||||
ctrls = yaml.safe_load(data)['controls']
|
||||
controls = controls + [Control(*ctrl.popitem(), vendor) for ctrl in ctrls]
|
||||
|
||||
if args.template.endswith('.cpp.in'):
|
||||
data = generate_cpp(controls)
|
||||
elif args.template.endswith('.h.in'):
|
||||
data = generate_h(controls, args.mode, ranges)
|
||||
else:
|
||||
raise RuntimeError('Unknown template type')
|
||||
|
||||
data = fill_template(args.template, data)
|
||||
|
||||
if args.output:
|
||||
output = open(args.output, 'wb')
|
||||
output.write(data.encode('utf-8'))
|
||||
output.close()
|
||||
else:
|
||||
sys.stdout.write(data)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
121
utils/codegen/gen-formats.py
Executable file
121
utils/codegen/gen-formats.py
Executable file
|
@ -0,0 +1,121 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
#
|
||||
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
|
||||
#
|
||||
# Generate formats definitions from YAML
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
|
||||
class DRMFourCC(object):
|
||||
format_regex = re.compile(r"#define (DRM_FORMAT_[A-Z0-9_]+)[ \t]+fourcc_code\(('.', '.', '.', '.')\)")
|
||||
mod_vendor_regex = re.compile(r"#define DRM_FORMAT_MOD_VENDOR_([A-Z0-9_]+)[ \t]+([0-9a-fA-Fx]+)")
|
||||
mod_regex = re.compile(r"#define ([A-Za-z0-9_]+)[ \t]+fourcc_mod_code\(([A-Z0-9_]+), ([0-9a-fA-Fx]+)\)")
|
||||
|
||||
def __init__(self, filename):
|
||||
self.formats = {}
|
||||
self.vendors = {}
|
||||
self.mods = {}
|
||||
|
||||
for line in open(filename, 'rb').readlines():
|
||||
line = line.decode('utf-8')
|
||||
|
||||
match = DRMFourCC.format_regex.match(line)
|
||||
if match:
|
||||
format, fourcc = match.groups()
|
||||
self.formats[format] = fourcc
|
||||
continue
|
||||
|
||||
match = DRMFourCC.mod_vendor_regex.match(line)
|
||||
if match:
|
||||
vendor, value = match.groups()
|
||||
self.vendors[vendor] = int(value, 0)
|
||||
continue
|
||||
|
||||
match = DRMFourCC.mod_regex.match(line)
|
||||
if match:
|
||||
mod, vendor, value = match.groups()
|
||||
self.mods[mod] = (vendor, int(value, 0))
|
||||
continue
|
||||
|
||||
def fourcc(self, name):
|
||||
return self.formats[name]
|
||||
|
||||
def mod(self, name):
|
||||
vendor, value = self.mods[name]
|
||||
return self.vendors[vendor], value
|
||||
|
||||
|
||||
def generate_h(formats, drm_fourcc):
|
||||
template = string.Template('constexpr PixelFormat ${name}{ __fourcc(${fourcc}), __mod(${mod}) };')
|
||||
|
||||
fmts = []
|
||||
|
||||
for format in formats:
|
||||
name, format = format.popitem()
|
||||
fourcc = drm_fourcc.fourcc(format['fourcc'])
|
||||
if format.get('big-endian'):
|
||||
fourcc += '| DRM_FORMAT_BIG_ENDIAN'
|
||||
|
||||
data = {
|
||||
'name': name,
|
||||
'fourcc': fourcc,
|
||||
'mod': '0, 0',
|
||||
}
|
||||
|
||||
mod = format.get('mod')
|
||||
if mod:
|
||||
data['mod'] = '%u, %u' % drm_fourcc.mod(mod)
|
||||
|
||||
fmts.append(template.substitute(data))
|
||||
|
||||
return {'formats': '\n'.join(fmts)}
|
||||
|
||||
|
||||
def fill_template(template, data):
|
||||
|
||||
template = open(template, 'rb').read()
|
||||
template = template.decode('utf-8')
|
||||
template = string.Template(template)
|
||||
return template.substitute(data)
|
||||
|
||||
|
||||
def main(argv):
|
||||
|
||||
# Parse command line arguments
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-o', dest='output', metavar='file', type=str,
|
||||
help='Output file name. Defaults to standard output if not specified.')
|
||||
parser.add_argument('input', type=str,
|
||||
help='Input file name.')
|
||||
parser.add_argument('template', type=str,
|
||||
help='Template file name.')
|
||||
parser.add_argument('drm_fourcc', type=str,
|
||||
help='Path to drm_fourcc.h.')
|
||||
args = parser.parse_args(argv[1:])
|
||||
|
||||
data = open(args.input, 'rb').read()
|
||||
formats = yaml.safe_load(data)['formats']
|
||||
drm_fourcc = DRMFourCC(args.drm_fourcc)
|
||||
|
||||
data = generate_h(formats, drm_fourcc)
|
||||
data = fill_template(args.template, data)
|
||||
|
||||
if args.output:
|
||||
output = open(args.output, 'wb')
|
||||
output.write(data.encode('utf-8'))
|
||||
output.close()
|
||||
else:
|
||||
sys.stdout.write(data)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
27
utils/codegen/gen-header.sh
Executable file
27
utils/codegen/gen-header.sh
Executable file
|
@ -0,0 +1,27 @@
|
|||
#!/bin/sh
|
||||
|
||||
src_dir="$1"
|
||||
dst_file="$2"
|
||||
|
||||
cat <<EOF > "$dst_file"
|
||||
/* SPDX-License-Identifier: LGPL-2.1-or-later */
|
||||
/* This file is auto-generated, do not edit! */
|
||||
/*
|
||||
* Copyright (C) 2018-2019, Google Inc.
|
||||
*
|
||||
* libcamera public API
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
EOF
|
||||
|
||||
headers=$(for header in "$src_dir"/*.h "$src_dir"/*.h.in ; do
|
||||
header=$(basename "$header")
|
||||
header="${header%.in}"
|
||||
echo "$header"
|
||||
done | sort)
|
||||
|
||||
for header in $headers ; do
|
||||
echo "#include <libcamera/$header>" >> "$dst_file"
|
||||
done
|
48
utils/codegen/gen-ipa-pub-key.py
Executable file
48
utils/codegen/gen-ipa-pub-key.py
Executable file
|
@ -0,0 +1,48 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
#
|
||||
# Author: Laurent Pinchart <laurent.pinchart@ideasonboard.com>
|
||||
#
|
||||
# Generate the IPA module signing public key
|
||||
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def main(argv):
|
||||
if len(argv) != 4:
|
||||
print('Usage: %s priv-key template output' % argv[0])
|
||||
return 1
|
||||
|
||||
priv_key = argv[1]
|
||||
template = argv[2]
|
||||
output = argv[3]
|
||||
|
||||
try:
|
||||
ret = subprocess.run(['openssl', 'rsa', '-pubout', '-in', priv_key,
|
||||
'-outform', 'DER'],
|
||||
stdout=subprocess.PIPE)
|
||||
except FileNotFoundError:
|
||||
print('Please install openssl to sign IPA modules')
|
||||
return 1
|
||||
|
||||
ipa_key = ['0x%02x' % c for c in ret.stdout]
|
||||
ipa_key = [', '.join(ipa_key[bound:bound + 8]) for bound in range(0, len(ipa_key), 8)]
|
||||
ipa_key = ',\n\t'.join(ipa_key)
|
||||
data = {'ipa_key': ipa_key}
|
||||
|
||||
template = open(template, 'rb').read()
|
||||
template = template.decode('utf-8')
|
||||
template = string.Template(template)
|
||||
|
||||
f = open(output, 'wb')
|
||||
f.write(template.substitute(data).encode('utf-8'))
|
||||
f.close()
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
39
utils/codegen/gen-tp-header.py
Executable file
39
utils/codegen/gen-tp-header.py
Executable file
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
#
|
||||
# Author: Paul Elder <paul.elder@ideasonboard.com>
|
||||
#
|
||||
# Generate header file to contain lttng tracepoints
|
||||
|
||||
import datetime
|
||||
import jinja2
|
||||
import pathlib
|
||||
import os
|
||||
import sys
|
||||
|
||||
def main(argv):
|
||||
if len(argv) < 4:
|
||||
print(f'Usage: {argv[0]} include_build_dir output template tp_files...')
|
||||
return 1
|
||||
|
||||
output = argv[2]
|
||||
template = argv[3]
|
||||
|
||||
year = datetime.datetime.now().year
|
||||
path = pathlib.Path(output).absolute().relative_to(argv[1])
|
||||
|
||||
source = ''
|
||||
for fname in argv[4:]:
|
||||
source += open(fname, 'r', encoding='utf-8').read() + '\n\n'
|
||||
|
||||
template = jinja2.Template(open(template, 'r', encoding='utf-8').read())
|
||||
string = template.render(year=year, path=path, source=source)
|
||||
|
||||
f = open(output, 'w', encoding='utf-8').write(string)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
79
utils/codegen/ipc/extract-docs.py
Executable file
79
utils/codegen/ipc/extract-docs.py
Executable file
|
@ -0,0 +1,79 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright (C) 2021, Google Inc.
|
||||
#
|
||||
# Author: Paul Elder <paul.elder@ideasonboard.com>
|
||||
#
|
||||
# Extract doxygen documentation from mojom files
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
regex_block_start = re.compile(r'^/\*\*$')
|
||||
regex_block_end = re.compile(r'^ \*/$')
|
||||
regex_spdx = re.compile(r'^/\* SPDX-License-Identifier: .* \*/$')
|
||||
|
||||
|
||||
def main(argv):
|
||||
|
||||
# Parse command line arguments
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-o', dest='output', metavar='file',
|
||||
type=argparse.FileType('w', encoding='utf-8'),
|
||||
default=sys.stdout,
|
||||
help='Output file name (default: standard output)')
|
||||
parser.add_argument('input', type=str,
|
||||
help='Input file name.')
|
||||
args = parser.parse_args(argv[1:])
|
||||
|
||||
lines = open(args.input, 'r').readlines()
|
||||
pipeline = args.input.split('/')[-1].replace('.mojom', '')
|
||||
|
||||
if not regex_spdx.match(lines[0]):
|
||||
raise Exception(f'Missing SPDX license header in {args.input}')
|
||||
|
||||
data = lines[0]
|
||||
data += f'''\
|
||||
/*
|
||||
* Copyright (C) 2021, Google Inc.
|
||||
*
|
||||
* Docs file for generated {pipeline}.mojom
|
||||
*
|
||||
* This file is auto-generated. Do not edit.
|
||||
*/
|
||||
|
||||
namespace libcamera {{
|
||||
|
||||
'''
|
||||
|
||||
in_block = False
|
||||
comment = ''
|
||||
for lineno, line in enumerate(lines, start=1):
|
||||
if regex_block_start.match(line):
|
||||
if in_block:
|
||||
raise SyntaxError('Expected end of comment',
|
||||
(args.input, lineno, 1, line))
|
||||
in_block = True
|
||||
comment = line
|
||||
continue
|
||||
|
||||
if regex_block_end.match(line):
|
||||
if in_block:
|
||||
comment += line
|
||||
data += comment + '\n'
|
||||
in_block = False
|
||||
continue
|
||||
|
||||
if in_block:
|
||||
comment += line
|
||||
|
||||
data += '} /* namespace libcamera */\n'
|
||||
|
||||
args.output.write(data)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv))
|
39
utils/codegen/ipc/generate.py
Executable file
39
utils/codegen/ipc/generate.py
Executable file
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: BSD-3-Clause
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
#
|
||||
# Author: Paul Elder <paul.elder@ideasonboard.com>
|
||||
#
|
||||
# Run mojo code generator for generating libcamera IPC files
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# TODO set sys.pycache_prefix for >= python3.8
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
sys.path.insert(0, f'{os.path.dirname(__file__)}/mojo/public/tools/bindings')
|
||||
|
||||
import mojo.public.tools.bindings.mojom_bindings_generator as generator
|
||||
|
||||
def _GetModulePath(path, output_dir):
|
||||
return os.path.join(output_dir, path.relative_path())
|
||||
|
||||
|
||||
# Disable the attribute checker to support our custom attributes. Ideally we
|
||||
# should add the attributes to the list of allowed attributes in
|
||||
# utils/ipc/mojo/public/tools/bindings/checks/mojom_attributes_check.py, but
|
||||
# we're trying hard to use the upstream mojom as-is.
|
||||
if hasattr(generator, '_BUILTIN_CHECKS'):
|
||||
del generator._BUILTIN_CHECKS['attributes']
|
||||
|
||||
# Override the mojo code generator's generator list to only contain our
|
||||
# libcamera generator
|
||||
generator._BUILTIN_GENERATORS = {'libcamera': 'mojom_libcamera_generator'}
|
||||
|
||||
# Override the mojo code generator's _GetModulePath method to not add
|
||||
# the '-module' suffix when searching for mojo modules, so that we can
|
||||
# pass the path to the mojom module without having to trim the '-module' suffix
|
||||
generator._GetModulePath = _GetModulePath
|
||||
|
||||
generator.main()
|
0
utils/codegen/ipc/generators/__init__.py
Normal file
0
utils/codegen/ipc/generators/__init__.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{%- import "definition_functions.tmpl" as funcs -%}
|
||||
/* SPDX-License-Identifier: LGPL-2.1-or-later */
|
||||
/*
|
||||
* Copyright (C) 2020, Google Inc.
|
||||
*
|
||||
* libcamera core definitions for Image Processing Algorithms
|
||||
*
|
||||
* This file is auto-generated. Do not edit.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
{% if has_map %}#include <map>{% endif %}
|
||||
{% if has_array %}#include <vector>{% endif %}
|
||||
|
||||
#include <libcamera/ipa/ipa_interface.h>
|
||||
|
||||
namespace libcamera {
|
||||
|
||||
{# \todo Use const char * instead of std::string for strings #}
|
||||
{% for const in consts %}
|
||||
static const {{const.kind|name}} {{const.mojom_name}} = {{const.value}};
|
||||
{% endfor %}
|
||||
|
||||
{% for enum in enums_gen_header %}
|
||||
{{funcs.define_enum(enum)}}
|
||||
{% endfor %}
|
||||
|
||||
{%- for struct in structs_gen_header %}
|
||||
{{funcs.define_struct(struct)}}
|
||||
{% endfor %}
|
||||
|
||||
} /* namespace libcamera */
|
|
@ -0,0 +1,44 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{%- import "serializer.tmpl" as serializer -%}
|
||||
|
||||
/* SPDX-License-Identifier: LGPL-2.1-or-later */
|
||||
/*
|
||||
* Copyright (C) 2020, Google Inc.
|
||||
*
|
||||
* Data serializer for core libcamera definitions for IPA
|
||||
*
|
||||
* This file is auto-generated. Do not edit.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <tuple>
|
||||
#include <vector>
|
||||
|
||||
#include <libcamera/ipa/core_ipa_interface.h>
|
||||
|
||||
#include "libcamera/internal/control_serializer.h"
|
||||
#include "libcamera/internal/ipa_data_serializer.h"
|
||||
|
||||
namespace libcamera {
|
||||
|
||||
LOG_DECLARE_CATEGORY(IPADataSerializer)
|
||||
{% for struct in structs_gen_serializer %}
|
||||
template<>
|
||||
class IPADataSerializer<{{struct|name}}>
|
||||
{
|
||||
public:
|
||||
{{- serializer.serializer(struct, "")}}
|
||||
{%- if struct|has_fd %}
|
||||
{{serializer.deserializer_fd(struct, "")}}
|
||||
{%- else %}
|
||||
{{serializer.deserializer_no_fd(struct, "")}}
|
||||
{{serializer.deserializer_fd_simple(struct, "")}}
|
||||
{%- endif %}
|
||||
};
|
||||
{% endfor %}
|
||||
|
||||
} /* namespace libcamera */
|
|
@ -0,0 +1,56 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
|
||||
{#
|
||||
# \brief Generate enum definition
|
||||
#
|
||||
# \param enum Enum object whose definition is to be generated
|
||||
#}
|
||||
{%- macro define_enum(enum) -%}
|
||||
enum{{" class" if enum|is_scoped}} {{enum.mojom_name}} {
|
||||
{%- for field in enum.fields %}
|
||||
{{field.mojom_name}} = {{field.numeric_value}},
|
||||
{%- endfor %}
|
||||
};
|
||||
{%- endmacro -%}
|
||||
|
||||
{#
|
||||
# \brief Generate struct definition
|
||||
#
|
||||
# \param struct Struct object whose definition is to be generated
|
||||
#}
|
||||
{%- macro define_struct(struct) -%}
|
||||
struct {{struct.mojom_name}}
|
||||
{
|
||||
public:
|
||||
#ifndef __DOXYGEN__
|
||||
{{struct.mojom_name}}() {%- if struct|has_default_fields %}
|
||||
:{% endif %}
|
||||
{%- for field in struct.fields|with_default_values -%}
|
||||
{{" " if loop.first}}{{field.mojom_name}}({{field|default_value}}){{", " if not loop.last}}
|
||||
{%- endfor %}
|
||||
{
|
||||
}
|
||||
|
||||
{{struct.mojom_name}}(
|
||||
{%- for field in struct.fields -%}
|
||||
{{"const " if not field|is_pod}}{{field|name}} {{"&" if not field|is_pod}}_{{field.mojom_name}}{{", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
)
|
||||
:
|
||||
{%- for field in struct.fields -%}
|
||||
{{" " if loop.first}}{{field.mojom_name}}(_{{field.mojom_name}}){{", " if not loop.last}}
|
||||
{%- endfor %}
|
||||
{
|
||||
}
|
||||
#endif
|
||||
|
||||
{% for field in struct.fields %}
|
||||
{{field|name}} {{field.mojom_name}};
|
||||
{%- endfor %}
|
||||
};
|
||||
{%- endmacro -%}
|
||||
|
||||
|
14
utils/codegen/ipc/generators/libcamera_templates/meson.build
Normal file
14
utils/codegen/ipc/generators/libcamera_templates/meson.build
Normal file
|
@ -0,0 +1,14 @@
|
|||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
mojom_template_files = files([
|
||||
'core_ipa_interface.h.tmpl',
|
||||
'core_ipa_serializer.h.tmpl',
|
||||
'definition_functions.tmpl',
|
||||
'module_ipa_interface.h.tmpl',
|
||||
'module_ipa_proxy.cpp.tmpl',
|
||||
'module_ipa_proxy.h.tmpl',
|
||||
'module_ipa_proxy_worker.cpp.tmpl',
|
||||
'module_ipa_serializer.h.tmpl',
|
||||
'proxy_functions.tmpl',
|
||||
'serializer.tmpl',
|
||||
])
|
|
@ -0,0 +1,84 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{%- import "definition_functions.tmpl" as funcs -%}
|
||||
/* SPDX-License-Identifier: LGPL-2.1-or-later */
|
||||
/*
|
||||
* Copyright (C) 2020, Google Inc.
|
||||
*
|
||||
* Image Processing Algorithm interface for {{module_name}}
|
||||
*
|
||||
* This file is auto-generated. Do not edit.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <libcamera/ipa/core_ipa_interface.h>
|
||||
#include <libcamera/ipa/ipa_interface.h>
|
||||
|
||||
{% if has_map %}#include <map>{% endif %}
|
||||
{% if has_array %}#include <vector>{% endif %}
|
||||
|
||||
namespace libcamera {
|
||||
{%- if has_namespace %}
|
||||
{% for ns in namespace %}
|
||||
namespace {{ns}} {
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
|
||||
{% for const in consts %}
|
||||
const {{const.kind|name}} {{const.mojom_name}} = {{const.value}};
|
||||
{% endfor %}
|
||||
|
||||
enum class {{cmd_enum_name}} {
|
||||
Exit = 0,
|
||||
{%- for method in interface_main.methods %}
|
||||
{{method.mojom_name|cap}} = {{loop.index}},
|
||||
{%- endfor %}
|
||||
};
|
||||
|
||||
enum class {{cmd_event_enum_name}} {
|
||||
{%- for method in interface_event.methods %}
|
||||
{{method.mojom_name|cap}} = {{loop.index}},
|
||||
{%- endfor %}
|
||||
};
|
||||
|
||||
{% for enum in enums %}
|
||||
{{funcs.define_enum(enum)}}
|
||||
{% endfor %}
|
||||
|
||||
{%- for struct in structs_nonempty %}
|
||||
{{funcs.define_struct(struct)}}
|
||||
{% endfor %}
|
||||
|
||||
{#-
|
||||
Any consts or #defines should be moved to the mojom file.
|
||||
#}
|
||||
class {{interface_name}} : public IPAInterface
|
||||
{
|
||||
public:
|
||||
{% for method in interface_main.methods %}
|
||||
virtual {{method|method_return_value}} {{method.mojom_name}}(
|
||||
{%- for param in method|method_parameters %}
|
||||
{{param}}{{- "," if not loop.last}}
|
||||
{%- endfor -%}
|
||||
) = 0;
|
||||
{% endfor %}
|
||||
|
||||
{%- for method in interface_event.methods %}
|
||||
Signal<
|
||||
{%- for param in method.parameters -%}
|
||||
{{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod and not param|is_enum}}
|
||||
{{- ", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
> {{method.mojom_name}};
|
||||
{% endfor -%}
|
||||
};
|
||||
|
||||
{%- if has_namespace %}
|
||||
{% for ns in namespace|reverse %}
|
||||
} /* namespace {{ns}} */
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
} /* namespace libcamera */
|
|
@ -0,0 +1,255 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{%- import "proxy_functions.tmpl" as proxy_funcs -%}
|
||||
|
||||
/* SPDX-License-Identifier: LGPL-2.1-or-later */
|
||||
/*
|
||||
* Copyright (C) 2020, Google Inc.
|
||||
*
|
||||
* Image Processing Algorithm proxy for {{module_name}}
|
||||
*
|
||||
* This file is auto-generated. Do not edit.
|
||||
*/
|
||||
|
||||
#include <libcamera/ipa/{{module_name}}_ipa_proxy.h>
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include <libcamera/ipa/ipa_module_info.h>
|
||||
#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
|
||||
#include <libcamera/ipa/{{module_name}}_ipa_serializer.h>
|
||||
|
||||
#include <libcamera/base/log.h>
|
||||
#include <libcamera/base/thread.h>
|
||||
|
||||
#include "libcamera/internal/control_serializer.h"
|
||||
#include "libcamera/internal/ipa_data_serializer.h"
|
||||
#include "libcamera/internal/ipa_module.h"
|
||||
#include "libcamera/internal/ipa_proxy.h"
|
||||
#include "libcamera/internal/ipc_pipe.h"
|
||||
#include "libcamera/internal/ipc_pipe_unixsocket.h"
|
||||
#include "libcamera/internal/ipc_unixsocket.h"
|
||||
#include "libcamera/internal/process.h"
|
||||
|
||||
namespace libcamera {
|
||||
|
||||
LOG_DECLARE_CATEGORY(IPAProxy)
|
||||
|
||||
{%- if has_namespace %}
|
||||
{% for ns in namespace %}
|
||||
namespace {{ns}} {
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
|
||||
{{proxy_name}}::{{proxy_name}}(IPAModule *ipam, bool isolate)
|
||||
: IPAProxy(ipam), isolate_(isolate),
|
||||
controlSerializer_(ControlSerializer::Role::Proxy), seq_(0)
|
||||
{
|
||||
LOG(IPAProxy, Debug)
|
||||
<< "initializing {{module_name}} proxy: loading IPA from "
|
||||
<< ipam->path();
|
||||
|
||||
if (isolate_) {
|
||||
const std::string proxyWorkerPath = resolvePath("{{module_name}}_ipa_proxy");
|
||||
if (proxyWorkerPath.empty()) {
|
||||
LOG(IPAProxy, Error)
|
||||
<< "Failed to get proxy worker path";
|
||||
return;
|
||||
}
|
||||
|
||||
ipc_ = std::make_unique<IPCPipeUnixSocket>(ipam->path().c_str(),
|
||||
proxyWorkerPath.c_str());
|
||||
if (!ipc_->isConnected()) {
|
||||
LOG(IPAProxy, Error) << "Failed to create IPCPipe";
|
||||
return;
|
||||
}
|
||||
|
||||
ipc_->recv.connect(this, &{{proxy_name}}::recvMessage);
|
||||
|
||||
valid_ = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!ipam->load())
|
||||
return;
|
||||
|
||||
IPAInterface *ipai = ipam->createInterface();
|
||||
if (!ipai) {
|
||||
LOG(IPAProxy, Error)
|
||||
<< "Failed to create IPA context for " << ipam->path();
|
||||
return;
|
||||
}
|
||||
|
||||
ipa_ = std::unique_ptr<{{interface_name}}>(static_cast<{{interface_name}} *>(ipai));
|
||||
proxy_.setIPA(ipa_.get());
|
||||
|
||||
{% for method in interface_event.methods %}
|
||||
ipa_->{{method.mojom_name}}.connect(this, &{{proxy_name}}::{{method.mojom_name}}Thread);
|
||||
{%- endfor %}
|
||||
|
||||
valid_ = true;
|
||||
}
|
||||
|
||||
{{proxy_name}}::~{{proxy_name}}()
|
||||
{
|
||||
if (isolate_) {
|
||||
IPCMessage::Header header =
|
||||
{ static_cast<uint32_t>({{cmd_enum_name}}::Exit), seq_++ };
|
||||
IPCMessage msg(header);
|
||||
ipc_->sendAsync(msg);
|
||||
}
|
||||
}
|
||||
|
||||
{% if interface_event.methods|length > 0 %}
|
||||
void {{proxy_name}}::recvMessage(const IPCMessage &data)
|
||||
{
|
||||
size_t dataSize = data.data().size();
|
||||
{{cmd_event_enum_name}} _cmd = static_cast<{{cmd_event_enum_name}}>(data.header().cmd);
|
||||
|
||||
switch (_cmd) {
|
||||
{%- for method in interface_event.methods %}
|
||||
case {{cmd_event_enum_name}}::{{method.mojom_name|cap}}: {
|
||||
{{method.mojom_name}}IPC(data.data().cbegin(), dataSize, data.fds());
|
||||
break;
|
||||
}
|
||||
{%- endfor %}
|
||||
default:
|
||||
LOG(IPAProxy, Error) << "Unknown command " << static_cast<uint32_t>(_cmd);
|
||||
}
|
||||
}
|
||||
{%- endif %}
|
||||
|
||||
{% for method in interface_main.methods %}
|
||||
{{proxy_funcs.func_sig(proxy_name, method)}}
|
||||
{
|
||||
if (isolate_)
|
||||
{{"return " if method|method_return_value != "void"}}{{method.mojom_name}}IPC(
|
||||
{%- for param in method|method_param_names -%}
|
||||
{{param}}{{- ", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
);
|
||||
else
|
||||
{{"return " if method|method_return_value != "void"}}{{method.mojom_name}}Thread(
|
||||
{%- for param in method|method_param_names -%}
|
||||
{{param}}{{- ", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
);
|
||||
}
|
||||
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "Thread")}}
|
||||
{
|
||||
{%- if method.mojom_name == "stop" %}
|
||||
{{proxy_funcs.stop_thread_body()}}
|
||||
{%- elif method.mojom_name == "init" %}
|
||||
{{ method|method_return_value + " _ret = " if method|method_return_value != "void" -}}
|
||||
ipa_->{{method.mojom_name}}(
|
||||
{%- for param in method|method_param_names -%}
|
||||
{{param}}{{- ", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
);
|
||||
|
||||
proxy_.moveToThread(&thread_);
|
||||
|
||||
return {{ "_ret" if method|method_return_value != "void" }};
|
||||
{%- elif method.mojom_name == "start" %}
|
||||
state_ = ProxyRunning;
|
||||
thread_.start();
|
||||
|
||||
{{ "return " if method|method_return_value != "void" -}}
|
||||
proxy_.invokeMethod(&ThreadProxy::start, ConnectionTypeBlocking
|
||||
{{- ", " if method|method_param_names}}
|
||||
{%- for param in method|method_param_names -%}
|
||||
{{param}}{{- ", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
);
|
||||
{%- elif not method|is_async %}
|
||||
{{ "return " if method|method_return_value != "void" -}}
|
||||
ipa_->{{method.mojom_name}}(
|
||||
{%- for param in method|method_param_names -%}
|
||||
{{param}}{{- ", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
);
|
||||
{% elif method|is_async %}
|
||||
ASSERT(state_ == ProxyRunning);
|
||||
proxy_.invokeMethod(&ThreadProxy::{{method.mojom_name}}, ConnectionTypeQueued
|
||||
{%- for param in method|method_param_names -%}
|
||||
, {{param}}
|
||||
{%- endfor -%}
|
||||
);
|
||||
{%- endif %}
|
||||
}
|
||||
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "IPC")}}
|
||||
{
|
||||
{%- if method.mojom_name == "configure" %}
|
||||
controlSerializer_.reset();
|
||||
{%- endif %}
|
||||
{%- set has_output = true if method|method_param_outputs|length > 0 or method|method_return_value != "void" %}
|
||||
{%- set cmd = cmd_enum_name + "::" + method.mojom_name|cap %}
|
||||
IPCMessage::Header _header = { static_cast<uint32_t>({{cmd}}), seq_++ };
|
||||
IPCMessage _ipcInputBuf(_header);
|
||||
{%- if has_output %}
|
||||
IPCMessage _ipcOutputBuf;
|
||||
{%- endif %}
|
||||
|
||||
{{proxy_funcs.serialize_call(method|method_param_inputs, '_ipcInputBuf.data()', '_ipcInputBuf.fds()')}}
|
||||
|
||||
{% if method|is_async %}
|
||||
int _ret = ipc_->sendAsync(_ipcInputBuf);
|
||||
{%- else %}
|
||||
int _ret = ipc_->sendSync(_ipcInputBuf
|
||||
{{- ", &_ipcOutputBuf" if has_output -}}
|
||||
);
|
||||
{%- endif %}
|
||||
if (_ret < 0) {
|
||||
LOG(IPAProxy, Error) << "Failed to call {{method.mojom_name}}";
|
||||
{%- if method|method_return_value != "void" %}
|
||||
return static_cast<{{method|method_return_value}}>(_ret);
|
||||
{%- else %}
|
||||
return;
|
||||
{%- endif %}
|
||||
}
|
||||
{% if method|method_return_value != "void" %}
|
||||
{{method|method_return_value}} _retValue = IPADataSerializer<{{method|method_return_value}}>::deserialize(_ipcOutputBuf.data(), 0);
|
||||
|
||||
{{proxy_funcs.deserialize_call(method|method_param_outputs, '_ipcOutputBuf.data()', '_ipcOutputBuf.fds()', init_offset = method|method_return_value|byte_width|int)}}
|
||||
|
||||
return _retValue;
|
||||
|
||||
{% elif method|method_param_outputs|length > 0 %}
|
||||
{{proxy_funcs.deserialize_call(method|method_param_outputs, '_ipcOutputBuf.data()', '_ipcOutputBuf.fds()')}}
|
||||
{% endif -%}
|
||||
}
|
||||
|
||||
{% endfor %}
|
||||
|
||||
{% for method in interface_event.methods %}
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "Thread")}}
|
||||
{
|
||||
ASSERT(state_ != ProxyStopped);
|
||||
{{method.mojom_name}}.emit({{method.parameters|params_comma_sep}});
|
||||
}
|
||||
|
||||
void {{proxy_name}}::{{method.mojom_name}}IPC(
|
||||
[[maybe_unused]] std::vector<uint8_t>::const_iterator data,
|
||||
[[maybe_unused]] size_t dataSize,
|
||||
[[maybe_unused]] const std::vector<SharedFD> &fds)
|
||||
{
|
||||
{%- for param in method.parameters %}
|
||||
{{param|name}} {{param.mojom_name}};
|
||||
{%- endfor %}
|
||||
{{proxy_funcs.deserialize_call(method.parameters, 'data', 'fds', false, false, true, 'dataSize')}}
|
||||
{{method.mojom_name}}.emit({{method.parameters|params_comma_sep}});
|
||||
}
|
||||
{% endfor %}
|
||||
|
||||
{%- if has_namespace %}
|
||||
{% for ns in namespace|reverse %}
|
||||
} /* namespace {{ns}} */
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
} /* namespace libcamera */
|
|
@ -0,0 +1,132 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{%- import "proxy_functions.tmpl" as proxy_funcs -%}
|
||||
|
||||
/* SPDX-License-Identifier: LGPL-2.1-or-later */
|
||||
/*
|
||||
* Copyright (C) 2020, Google Inc.
|
||||
*
|
||||
* Image Processing Algorithm proxy for {{module_name}}
|
||||
*
|
||||
* This file is auto-generated. Do not edit.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <libcamera/ipa/ipa_interface.h>
|
||||
#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
|
||||
|
||||
#include <libcamera/base/object.h>
|
||||
#include <libcamera/base/thread.h>
|
||||
|
||||
#include "libcamera/internal/control_serializer.h"
|
||||
#include "libcamera/internal/ipa_proxy.h"
|
||||
#include "libcamera/internal/ipc_pipe.h"
|
||||
#include "libcamera/internal/ipc_pipe_unixsocket.h"
|
||||
#include "libcamera/internal/ipc_unixsocket.h"
|
||||
|
||||
namespace libcamera {
|
||||
{%- if has_namespace %}
|
||||
{% for ns in namespace %}
|
||||
namespace {{ns}} {
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
|
||||
class {{proxy_name}} : public IPAProxy, public {{interface_name}}, public Object
|
||||
{
|
||||
public:
|
||||
{{proxy_name}}(IPAModule *ipam, bool isolate);
|
||||
~{{proxy_name}}();
|
||||
|
||||
{% for method in interface_main.methods %}
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "", false, true)|indent(8, true)}};
|
||||
{% endfor %}
|
||||
|
||||
{%- for method in interface_event.methods %}
|
||||
Signal<
|
||||
{%- for param in method.parameters -%}
|
||||
{{"const " if not param|is_pod}}{{param|name}}{{" &" if not param|is_pod and not param|is_enum}}
|
||||
{{- ", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
> {{method.mojom_name}};
|
||||
{% endfor %}
|
||||
|
||||
private:
|
||||
void recvMessage(const IPCMessage &data);
|
||||
|
||||
{% for method in interface_main.methods %}
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "Thread", false)|indent(8, true)}};
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "IPC", false)|indent(8, true)}};
|
||||
{% endfor %}
|
||||
{% for method in interface_event.methods %}
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "Thread", false)|indent(8, true)}};
|
||||
void {{method.mojom_name}}IPC(
|
||||
std::vector<uint8_t>::const_iterator data,
|
||||
size_t dataSize,
|
||||
const std::vector<SharedFD> &fds);
|
||||
{% endfor %}
|
||||
|
||||
/* Helper class to invoke async functions in another thread. */
|
||||
class ThreadProxy : public Object
|
||||
{
|
||||
public:
|
||||
ThreadProxy()
|
||||
: ipa_(nullptr)
|
||||
{
|
||||
}
|
||||
|
||||
void setIPA({{interface_name}} *ipa)
|
||||
{
|
||||
ipa_ = ipa;
|
||||
}
|
||||
|
||||
void stop()
|
||||
{
|
||||
ipa_->stop();
|
||||
}
|
||||
{% for method in interface_main.methods %}
|
||||
{%- if method|is_async %}
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "", false)|indent(16)}}
|
||||
{
|
||||
ipa_->{{method.mojom_name}}({{method.parameters|params_comma_sep}});
|
||||
}
|
||||
{%- elif method.mojom_name == "start" %}
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "", false)|indent(16)}}
|
||||
{
|
||||
{%- if method|method_return_value != "void" %}
|
||||
return ipa_->{{method.mojom_name}}({{method.parameters|params_comma_sep}});
|
||||
{%- else %}
|
||||
ipa_->{{method.mojom_name}}({{method.parameters|params_comma_sep}}
|
||||
{{- ", " if method|method_param_outputs|params_comma_sep -}}
|
||||
{{- method|method_param_outputs|params_comma_sep}});
|
||||
{%- endif %}
|
||||
}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
|
||||
private:
|
||||
{{interface_name}} *ipa_;
|
||||
};
|
||||
|
||||
Thread thread_;
|
||||
ThreadProxy proxy_;
|
||||
std::unique_ptr<{{interface_name}}> ipa_;
|
||||
|
||||
const bool isolate_;
|
||||
|
||||
std::unique_ptr<IPCPipeUnixSocket> ipc_;
|
||||
|
||||
ControlSerializer controlSerializer_;
|
||||
|
||||
{# \todo Move this to IPCPipe #}
|
||||
uint32_t seq_;
|
||||
};
|
||||
|
||||
{%- if has_namespace %}
|
||||
{% for ns in namespace|reverse %}
|
||||
} /* namespace {{ns}} */
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
} /* namespace libcamera */
|
|
@ -0,0 +1,246 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{%- import "proxy_functions.tmpl" as proxy_funcs -%}
|
||||
|
||||
/* SPDX-License-Identifier: LGPL-2.1-or-later */
|
||||
/*
|
||||
* Copyright (C) 2020, Google Inc.
|
||||
*
|
||||
* Image Processing Algorithm proxy worker for {{module_name}}
|
||||
*
|
||||
* This file is auto-generated. Do not edit.
|
||||
*/
|
||||
|
||||
{#- \todo Split proxy worker into IPC worker and proxy worker. #}
|
||||
|
||||
#include <algorithm>
|
||||
#include <iostream>
|
||||
#include <sys/types.h>
|
||||
#include <tuple>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <libcamera/ipa/ipa_interface.h>
|
||||
#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
|
||||
#include <libcamera/ipa/{{module_name}}_ipa_serializer.h>
|
||||
#include <libcamera/logging.h>
|
||||
|
||||
#include <libcamera/base/event_dispatcher.h>
|
||||
#include <libcamera/base/log.h>
|
||||
#include <libcamera/base/thread.h>
|
||||
#include <libcamera/base/unique_fd.h>
|
||||
|
||||
#include "libcamera/internal/camera_sensor.h"
|
||||
#include "libcamera/internal/control_serializer.h"
|
||||
#include "libcamera/internal/ipa_data_serializer.h"
|
||||
#include "libcamera/internal/ipa_module.h"
|
||||
#include "libcamera/internal/ipa_proxy.h"
|
||||
#include "libcamera/internal/ipc_pipe.h"
|
||||
#include "libcamera/internal/ipc_pipe_unixsocket.h"
|
||||
#include "libcamera/internal/ipc_unixsocket.h"
|
||||
|
||||
using namespace libcamera;
|
||||
|
||||
LOG_DEFINE_CATEGORY({{proxy_worker_name}})
|
||||
|
||||
{%- if has_namespace %}
|
||||
{% for ns in namespace -%}
|
||||
using namespace {{ns}};
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
|
||||
class {{proxy_worker_name}}
|
||||
{
|
||||
public:
|
||||
{{proxy_worker_name}}()
|
||||
: ipa_(nullptr),
|
||||
controlSerializer_(ControlSerializer::Role::Worker),
|
||||
exit_(false) {}
|
||||
|
||||
~{{proxy_worker_name}}() {}
|
||||
|
||||
void readyRead()
|
||||
{
|
||||
IPCUnixSocket::Payload _message;
|
||||
int _retRecv = socket_.receive(&_message);
|
||||
if (_retRecv) {
|
||||
LOG({{proxy_worker_name}}, Error)
|
||||
<< "Receive message failed: " << _retRecv;
|
||||
return;
|
||||
}
|
||||
|
||||
IPCMessage _ipcMessage(_message);
|
||||
|
||||
{{cmd_enum_name}} _cmd = static_cast<{{cmd_enum_name}}>(_ipcMessage.header().cmd);
|
||||
|
||||
switch (_cmd) {
|
||||
case {{cmd_enum_name}}::Exit: {
|
||||
exit_ = true;
|
||||
break;
|
||||
}
|
||||
|
||||
{% for method in interface_main.methods %}
|
||||
case {{cmd_enum_name}}::{{method.mojom_name|cap}}: {
|
||||
{%- if method.mojom_name == "configure" %}
|
||||
controlSerializer_.reset();
|
||||
{%- endif %}
|
||||
{{proxy_funcs.deserialize_call(method|method_param_inputs, '_ipcMessage.data()', '_ipcMessage.fds()', false, true)|indent(16, true)}}
|
||||
{% for param in method|method_param_outputs %}
|
||||
{{param|name}} {{param.mojom_name}};
|
||||
{% endfor %}
|
||||
{%- if method|method_return_value != "void" %}
|
||||
{{method|method_return_value}} _callRet =
|
||||
{%- endif -%}
|
||||
ipa_->{{method.mojom_name}}({{method.parameters|params_comma_sep}}
|
||||
{{- ", " if method|method_param_outputs|params_comma_sep -}}
|
||||
{%- for param in method|method_param_outputs -%}
|
||||
&{{param.mojom_name}}{{", " if not loop.last}}
|
||||
{%- endfor -%}
|
||||
);
|
||||
{% if not method|is_async %}
|
||||
IPCMessage::Header header = { _ipcMessage.header().cmd, _ipcMessage.header().cookie };
|
||||
IPCMessage _response(header);
|
||||
{%- if method|method_return_value != "void" %}
|
||||
std::vector<uint8_t> _callRetBuf;
|
||||
std::tie(_callRetBuf, std::ignore) =
|
||||
IPADataSerializer<{{method|method_return_value}}>::serialize(_callRet);
|
||||
_response.data().insert(_response.data().end(), _callRetBuf.cbegin(), _callRetBuf.cend());
|
||||
{%- endif %}
|
||||
{{proxy_funcs.serialize_call(method|method_param_outputs, "_response.data()", "_response.fds()")|indent(16, true)}}
|
||||
int _ret = socket_.send(_response.payload());
|
||||
if (_ret < 0) {
|
||||
LOG({{proxy_worker_name}}, Error)
|
||||
<< "Reply to {{method.mojom_name}}() failed: " << _ret;
|
||||
}
|
||||
LOG({{proxy_worker_name}}, Debug) << "Done replying to {{method.mojom_name}}()";
|
||||
{%- endif %}
|
||||
break;
|
||||
}
|
||||
{% endfor %}
|
||||
default:
|
||||
LOG({{proxy_worker_name}}, Error) << "Unknown command " << _ipcMessage.header().cmd;
|
||||
}
|
||||
}
|
||||
|
||||
int init(std::unique_ptr<IPAModule> &ipam, UniqueFD socketfd)
|
||||
{
|
||||
if (socket_.bind(std::move(socketfd)) < 0) {
|
||||
LOG({{proxy_worker_name}}, Error)
|
||||
<< "IPC socket binding failed";
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
socket_.readyRead.connect(this, &{{proxy_worker_name}}::readyRead);
|
||||
|
||||
ipa_ = dynamic_cast<{{interface_name}} *>(ipam->createInterface());
|
||||
if (!ipa_) {
|
||||
LOG({{proxy_worker_name}}, Error)
|
||||
<< "Failed to create IPA interface instance";
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
{% for method in interface_event.methods %}
|
||||
ipa_->{{method.mojom_name}}.connect(this, &{{proxy_worker_name}}::{{method.mojom_name}});
|
||||
{%- endfor %}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void run()
|
||||
{
|
||||
EventDispatcher *dispatcher = Thread::current()->eventDispatcher();
|
||||
while (!exit_)
|
||||
dispatcher->processEvents();
|
||||
}
|
||||
|
||||
void cleanup()
|
||||
{
|
||||
delete ipa_;
|
||||
socket_.close();
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
{% for method in interface_event.methods %}
|
||||
{{proxy_funcs.func_sig(proxy_name, method, "", false)|indent(8, true)}}
|
||||
{
|
||||
IPCMessage::Header header = {
|
||||
static_cast<uint32_t>({{cmd_event_enum_name}}::{{method.mojom_name|cap}}),
|
||||
0
|
||||
};
|
||||
IPCMessage _message(header);
|
||||
|
||||
{{proxy_funcs.serialize_call(method|method_param_inputs, "_message.data()", "_message.fds()")}}
|
||||
|
||||
int _ret = socket_.send(_message.payload());
|
||||
if (_ret < 0)
|
||||
LOG({{proxy_worker_name}}, Error)
|
||||
<< "Sending event {{method.mojom_name}}() failed: " << _ret;
|
||||
|
||||
LOG({{proxy_worker_name}}, Debug) << "{{method.mojom_name}} done";
|
||||
}
|
||||
{% endfor %}
|
||||
|
||||
{{interface_name}} *ipa_;
|
||||
IPCUnixSocket socket_;
|
||||
|
||||
ControlSerializer controlSerializer_;
|
||||
|
||||
bool exit_;
|
||||
};
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
{#- \todo Handle enabling debugging more dynamically. #}
|
||||
/* Uncomment this for debugging. */
|
||||
#if 0
|
||||
std::string logPath = "/tmp/libcamera.worker." +
|
||||
std::to_string(getpid()) + ".log";
|
||||
logSetFile(logPath.c_str());
|
||||
#endif
|
||||
|
||||
if (argc < 3) {
|
||||
LOG({{proxy_worker_name}}, Error)
|
||||
<< "Tried to start worker with no args: "
|
||||
<< "expected <path to IPA so> <fd to bind unix socket>";
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
UniqueFD fd(std::stoi(argv[2]));
|
||||
LOG({{proxy_worker_name}}, Info)
|
||||
<< "Starting worker for IPA module " << argv[1]
|
||||
<< " with IPC fd = " << fd.get();
|
||||
|
||||
std::unique_ptr<IPAModule> ipam = std::make_unique<IPAModule>(argv[1]);
|
||||
if (!ipam->isValid() || !ipam->load()) {
|
||||
LOG({{proxy_worker_name}}, Error)
|
||||
<< "IPAModule " << argv[1] << " isn't valid";
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
/*
|
||||
* Shutdown of proxy worker can be pre-empted by events like
|
||||
* SIGINT/SIGTERM, even before the pipeline handler can request
|
||||
* shutdown. Hence, assign a new gid to prevent signals on the
|
||||
* application being delivered to the proxy.
|
||||
*/
|
||||
if (setpgid(0, 0) < 0) {
|
||||
int err = errno;
|
||||
LOG({{proxy_worker_name}}, Warning)
|
||||
<< "Failed to set new gid: " << strerror(err);
|
||||
}
|
||||
|
||||
{{proxy_worker_name}} proxyWorker;
|
||||
int ret = proxyWorker.init(ipam, std::move(fd));
|
||||
if (ret < 0) {
|
||||
LOG({{proxy_worker_name}}, Error)
|
||||
<< "Failed to initialize proxy worker";
|
||||
return ret;
|
||||
}
|
||||
|
||||
LOG({{proxy_worker_name}}, Debug) << "Proxy worker successfully initialized";
|
||||
|
||||
proxyWorker.run();
|
||||
|
||||
proxyWorker.cleanup();
|
||||
|
||||
return 0;
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{%- import "serializer.tmpl" as serializer -%}
|
||||
|
||||
/* SPDX-License-Identifier: LGPL-2.1-or-later */
|
||||
/*
|
||||
* Copyright (C) 2020, Google Inc.
|
||||
*
|
||||
* Image Processing Algorithm data serializer for {{module_name}}
|
||||
*
|
||||
* This file is auto-generated. Do not edit.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <tuple>
|
||||
#include <vector>
|
||||
|
||||
#include <libcamera/ipa/{{module_name}}_ipa_interface.h>
|
||||
#include <libcamera/ipa/core_ipa_serializer.h>
|
||||
|
||||
#include "libcamera/internal/control_serializer.h"
|
||||
#include "libcamera/internal/ipa_data_serializer.h"
|
||||
|
||||
namespace libcamera {
|
||||
|
||||
LOG_DECLARE_CATEGORY(IPADataSerializer)
|
||||
{% for struct in structs_nonempty %}
|
||||
template<>
|
||||
class IPADataSerializer<{{struct|name_full}}>
|
||||
{
|
||||
public:
|
||||
{{- serializer.serializer(struct, namespace_str)}}
|
||||
{%- if struct|has_fd %}
|
||||
{{serializer.deserializer_fd(struct, namespace_str)}}
|
||||
{%- else %}
|
||||
{{serializer.deserializer_no_fd(struct, namespace_str)}}
|
||||
{{serializer.deserializer_fd_simple(struct, namespace_str)}}
|
||||
{%- endif %}
|
||||
};
|
||||
{% endfor %}
|
||||
|
||||
} /* namespace libcamera */
|
|
@ -0,0 +1,202 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{#
|
||||
# \brief Generate function prototype
|
||||
#
|
||||
# \param class Class name
|
||||
# \param method mojom Method object
|
||||
# \param suffix Suffix to append to \a method function name
|
||||
# \param need_class_name If true, generate class name with function
|
||||
# \param override If true, generate override tag after the function prototype
|
||||
#}
|
||||
{%- macro func_sig(class, method, suffix = "", need_class_name = true, override = false) -%}
|
||||
{{method|method_return_value}} {{class + "::" if need_class_name}}{{method.mojom_name}}{{suffix}}(
|
||||
{%- for param in method|method_parameters %}
|
||||
{{param}}{{- "," if not loop.last}}
|
||||
{%- endfor -%}
|
||||
){{" override" if override}}
|
||||
{%- endmacro -%}
|
||||
|
||||
{#
|
||||
# \brief Generate function body for IPA stop() function for thread
|
||||
#}
|
||||
{%- macro stop_thread_body() -%}
|
||||
ASSERT(state_ != ProxyStopping);
|
||||
if (state_ != ProxyRunning)
|
||||
return;
|
||||
|
||||
state_ = ProxyStopping;
|
||||
|
||||
proxy_.invokeMethod(&ThreadProxy::stop, ConnectionTypeBlocking);
|
||||
|
||||
thread_.exit();
|
||||
thread_.wait();
|
||||
|
||||
Thread::current()->dispatchMessages(Message::Type::InvokeMessage);
|
||||
|
||||
state_ = ProxyStopped;
|
||||
{%- endmacro -%}
|
||||
|
||||
|
||||
{#
|
||||
# \brief Serialize multiple objects into data buffer and fd vector
|
||||
#
|
||||
# Generate code to serialize multiple objects, as specified in \a params
|
||||
# (which are the parameters to some function), into \a buf data buffer and
|
||||
# \a fds fd vector.
|
||||
# This code is meant to be used by the proxy, for serializing prior to IPC calls.
|
||||
#
|
||||
# \todo Avoid intermediate vectors
|
||||
#}
|
||||
{%- macro serialize_call(params, buf, fds) %}
|
||||
{%- for param in params %}
|
||||
{%- if param|is_enum %}
|
||||
static_assert(sizeof({{param|name_full}}) <= 4);
|
||||
{%- endif %}
|
||||
std::vector<uint8_t> {{param.mojom_name}}Buf;
|
||||
{%- if param|has_fd %}
|
||||
std::vector<SharedFD> {{param.mojom_name}}Fds;
|
||||
std::tie({{param.mojom_name}}Buf, {{param.mojom_name}}Fds) =
|
||||
{%- else %}
|
||||
std::tie({{param.mojom_name}}Buf, std::ignore) =
|
||||
{%- endif %}
|
||||
{%- if param|is_flags %}
|
||||
IPADataSerializer<{{param|name_full}}>::serialize({{param.mojom_name}}
|
||||
{%- elif param|is_enum %}
|
||||
IPADataSerializer<uint32_t>::serialize(static_cast<uint32_t>({{param.mojom_name}})
|
||||
{%- else %}
|
||||
IPADataSerializer<{{param|name}}>::serialize({{param.mojom_name}}
|
||||
{% endif -%}
|
||||
{{- ", &controlSerializer_" if param|needs_control_serializer -}}
|
||||
);
|
||||
{%- endfor %}
|
||||
|
||||
{%- if params|length > 1 %}
|
||||
{%- for param in params %}
|
||||
appendPOD<uint32_t>({{buf}}, {{param.mojom_name}}Buf.size());
|
||||
{%- if param|has_fd %}
|
||||
appendPOD<uint32_t>({{buf}}, {{param.mojom_name}}Fds.size());
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
|
||||
{%- for param in params %}
|
||||
{{buf}}.insert({{buf}}.end(), {{param.mojom_name}}Buf.begin(), {{param.mojom_name}}Buf.end());
|
||||
{%- endfor %}
|
||||
|
||||
{%- for param in params %}
|
||||
{%- if param|has_fd %}
|
||||
{{fds}}.insert({{fds}}.end(), {{param.mojom_name}}Fds.begin(), {{param.mojom_name}}Fds.end());
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- endmacro -%}
|
||||
|
||||
|
||||
{#
|
||||
# \brief Deserialize a single object from data buffer and fd vector
|
||||
#
|
||||
# \param pointer If true, deserialize the object into a dereferenced pointer
|
||||
# \param iter If true, treat \a buf as an iterator instead of a vector
|
||||
# \param data_size Variable that holds the size of the vector referenced by \a buf
|
||||
#
|
||||
# Generate code to deserialize a single object, as specified in \a param,
|
||||
# from \a buf data buffer and \a fds fd vector.
|
||||
# This code is meant to be used by macro deserialize_call.
|
||||
#}
|
||||
{%- macro deserialize_param(param, pointer, loop, buf, fds, iter, data_size) -%}
|
||||
{{"*" if pointer}}{{param.mojom_name}} =
|
||||
{%- if param|is_flags %}
|
||||
IPADataSerializer<{{param|name_full}}>::deserialize(
|
||||
{%- elif param|is_enum %}
|
||||
static_cast<{{param|name_full}}>(IPADataSerializer<uint32_t>::deserialize(
|
||||
{%- else %}
|
||||
IPADataSerializer<{{param|name}}>::deserialize(
|
||||
{%- endif %}
|
||||
{{buf}}{{- ".cbegin()" if not iter}} + {{param.mojom_name}}Start,
|
||||
{%- if loop.last and not iter %}
|
||||
{{buf}}.cend()
|
||||
{%- elif not iter %}
|
||||
{{buf}}.cbegin() + {{param.mojom_name}}Start + {{param.mojom_name}}BufSize
|
||||
{%- elif iter and loop.length == 1 %}
|
||||
{{buf}} + {{data_size}}
|
||||
{%- else %}
|
||||
{{buf}} + {{param.mojom_name}}Start + {{param.mojom_name}}BufSize
|
||||
{%- endif -%}
|
||||
{{- "," if param|has_fd}}
|
||||
{%- if param|has_fd %}
|
||||
{{fds}}.cbegin() + {{param.mojom_name}}FdStart,
|
||||
{%- if loop.last %}
|
||||
{{fds}}.cend()
|
||||
{%- else %}
|
||||
{{fds}}.cbegin() + {{param.mojom_name}}FdStart + {{param.mojom_name}}FdsSize
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{{- "," if param|needs_control_serializer}}
|
||||
{%- if param|needs_control_serializer %}
|
||||
&controlSerializer_
|
||||
{%- endif -%}
|
||||
){{")" if param|is_enum and not param|is_flags}};
|
||||
{%- endmacro -%}
|
||||
|
||||
|
||||
{#
|
||||
# \brief Deserialize multiple objects from data buffer and fd vector
|
||||
#
|
||||
# \param pointer If true, deserialize objects into pointers, and adds a null check.
|
||||
# \param declare If true, declare the objects in addition to deserialization.
|
||||
# \param iter if true, treat \a buf as an iterator instead of a vector
|
||||
# \param data_size Variable that holds the size of the vector referenced by \a buf
|
||||
#
|
||||
# Generate code to deserialize multiple objects, as specified in \a params
|
||||
# (which are the parameters to some function), from \a buf data buffer and
|
||||
# \a fds fd vector.
|
||||
# This code is meant to be used by the proxy, for deserializing after IPC calls.
|
||||
#
|
||||
# \todo Avoid intermediate vectors
|
||||
#}
|
||||
{%- macro deserialize_call(params, buf, fds, pointer = true, declare = false, iter = false, data_size = '', init_offset = 0) -%}
|
||||
{% set ns = namespace(size_offset = init_offset) %}
|
||||
{%- if params|length > 1 %}
|
||||
{%- for param in params %}
|
||||
[[maybe_unused]] const size_t {{param.mojom_name}}BufSize = readPOD<uint32_t>({{buf}}, {{ns.size_offset}}
|
||||
{%- if iter -%}
|
||||
, {{buf}} + {{data_size}}
|
||||
{%- endif -%}
|
||||
);
|
||||
{%- set ns.size_offset = ns.size_offset + 4 %}
|
||||
{%- if param|has_fd %}
|
||||
[[maybe_unused]] const size_t {{param.mojom_name}}FdsSize = readPOD<uint32_t>({{buf}}, {{ns.size_offset}}
|
||||
{%- if iter -%}
|
||||
, {{buf}} + {{data_size}}
|
||||
{%- endif -%}
|
||||
);
|
||||
{%- set ns.size_offset = ns.size_offset + 4 %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
{% for param in params %}
|
||||
{%- if loop.first %}
|
||||
const size_t {{param.mojom_name}}Start = {{ns.size_offset}};
|
||||
{%- else %}
|
||||
const size_t {{param.mojom_name}}Start = {{loop.previtem.mojom_name}}Start + {{loop.previtem.mojom_name}}BufSize;
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{% for param in params|with_fds %}
|
||||
{%- if loop.first %}
|
||||
const size_t {{param.mojom_name}}FdStart = 0;
|
||||
{%- else %}
|
||||
const size_t {{param.mojom_name}}FdStart = {{loop.previtem.mojom_name}}FdStart + {{loop.previtem.mojom_name}}FdsSize;
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{% for param in params %}
|
||||
{%- if pointer %}
|
||||
if ({{param.mojom_name}}) {
|
||||
{{deserialize_param(param, pointer, loop, buf, fds, iter, data_size)|indent(16, True)}}
|
||||
}
|
||||
{%- else %}
|
||||
{{param|name + " " if declare}}{{deserialize_param(param, pointer, loop, buf, fds, iter, data_size)|indent(8)}}
|
||||
{%- endif %}
|
||||
{% endfor %}
|
||||
{%- endmacro -%}
|
319
utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl
Normal file
319
utils/codegen/ipc/generators/libcamera_templates/serializer.tmpl
Normal file
|
@ -0,0 +1,319 @@
|
|||
{#-
|
||||
# SPDX-License-Identifier: LGPL-2.1-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
-#}
|
||||
{#
|
||||
# \brief Verify that there is enough bytes to deserialize
|
||||
#
|
||||
# Generate code that verifies that \a size is not greater than \a dataSize.
|
||||
# Otherwise log an error with \a name and \a typename.
|
||||
#}
|
||||
{%- macro check_data_size(size, dataSize, name, typename) %}
|
||||
if ({{dataSize}} < {{size}}) {
|
||||
LOG(IPADataSerializer, Error)
|
||||
<< "Failed to deserialize " << "{{name}}"
|
||||
<< ": not enough {{typename}}, expected "
|
||||
<< ({{size}}) << ", got " << ({{dataSize}});
|
||||
return ret;
|
||||
}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{#
|
||||
# \brief Serialize a field into return vector
|
||||
#
|
||||
# Generate code to serialize \a field into retData, including size of the
|
||||
# field and fds (where appropriate).
|
||||
# This code is meant to be used by the IPADataSerializer specialization.
|
||||
#
|
||||
# \todo Avoid intermediate vectors
|
||||
#}
|
||||
{%- macro serializer_field(field, namespace, loop) %}
|
||||
{%- if field|is_pod or field|is_enum %}
|
||||
std::vector<uint8_t> {{field.mojom_name}};
|
||||
std::tie({{field.mojom_name}}, std::ignore) =
|
||||
{%- if field|is_pod %}
|
||||
IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}});
|
||||
{%- elif field|is_flags %}
|
||||
IPADataSerializer<{{field|name_full}}>::serialize(data.{{field.mojom_name}});
|
||||
{%- elif field|is_enum_scoped %}
|
||||
IPADataSerializer<uint{{field|bit_width}}_t>::serialize(static_cast<uint{{field|bit_width}}_t>(data.{{field.mojom_name}}));
|
||||
{%- elif field|is_enum %}
|
||||
IPADataSerializer<uint{{field|bit_width}}_t>::serialize(data.{{field.mojom_name}});
|
||||
{%- endif %}
|
||||
retData.insert(retData.end(), {{field.mojom_name}}.begin(), {{field.mojom_name}}.end());
|
||||
{%- elif field|is_fd %}
|
||||
std::vector<uint8_t> {{field.mojom_name}};
|
||||
std::vector<SharedFD> {{field.mojom_name}}Fds;
|
||||
std::tie({{field.mojom_name}}, {{field.mojom_name}}Fds) =
|
||||
IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}});
|
||||
retData.insert(retData.end(), {{field.mojom_name}}.begin(), {{field.mojom_name}}.end());
|
||||
retFds.insert(retFds.end(), {{field.mojom_name}}Fds.begin(), {{field.mojom_name}}Fds.end());
|
||||
{%- elif field|is_controls %}
|
||||
if (data.{{field.mojom_name}}.size() > 0) {
|
||||
std::vector<uint8_t> {{field.mojom_name}};
|
||||
std::tie({{field.mojom_name}}, std::ignore) =
|
||||
IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}}, cs);
|
||||
appendPOD<uint32_t>(retData, {{field.mojom_name}}.size());
|
||||
retData.insert(retData.end(), {{field.mojom_name}}.begin(), {{field.mojom_name}}.end());
|
||||
} else {
|
||||
appendPOD<uint32_t>(retData, 0);
|
||||
}
|
||||
{%- elif field|is_plain_struct or field|is_array or field|is_map or field|is_str %}
|
||||
std::vector<uint8_t> {{field.mojom_name}};
|
||||
{%- if field|has_fd %}
|
||||
std::vector<SharedFD> {{field.mojom_name}}Fds;
|
||||
std::tie({{field.mojom_name}}, {{field.mojom_name}}Fds) =
|
||||
{%- else %}
|
||||
std::tie({{field.mojom_name}}, std::ignore) =
|
||||
{%- endif %}
|
||||
{%- if field|is_array or field|is_map %}
|
||||
IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}}, cs);
|
||||
{%- elif field|is_str %}
|
||||
IPADataSerializer<{{field|name}}>::serialize(data.{{field.mojom_name}});
|
||||
{%- else %}
|
||||
IPADataSerializer<{{field|name_full}}>::serialize(data.{{field.mojom_name}}, cs);
|
||||
{%- endif %}
|
||||
appendPOD<uint32_t>(retData, {{field.mojom_name}}.size());
|
||||
{%- if field|has_fd %}
|
||||
appendPOD<uint32_t>(retData, {{field.mojom_name}}Fds.size());
|
||||
{%- endif %}
|
||||
retData.insert(retData.end(), {{field.mojom_name}}.begin(), {{field.mojom_name}}.end());
|
||||
{%- if field|has_fd %}
|
||||
retFds.insert(retFds.end(), {{field.mojom_name}}Fds.begin(), {{field.mojom_name}}Fds.end());
|
||||
{%- endif %}
|
||||
{%- else %}
|
||||
/* Unknown serialization for {{field.mojom_name}}. */
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{#
|
||||
# \brief Deserialize a field into return struct
|
||||
#
|
||||
# Generate code to deserialize \a field into object ret.
|
||||
# This code is meant to be used by the IPADataSerializer specialization.
|
||||
#}
|
||||
{%- macro deserializer_field(field, namespace, loop) %}
|
||||
{% if field|is_pod or field|is_enum %}
|
||||
{%- set field_size = (field|bit_width|int / 8)|int %}
|
||||
{{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
|
||||
{%- if field|is_pod %}
|
||||
ret.{{field.mojom_name}} = IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field_size}});
|
||||
{%- elif field|is_flags %}
|
||||
ret.{{field.mojom_name}} = IPADataSerializer<{{field|name_full}}>::deserialize(m, m + {{field_size}});
|
||||
{%- else %}
|
||||
ret.{{field.mojom_name}} = static_cast<{{field|name_full}}>(IPADataSerializer<uint{{field|bit_width}}_t>::deserialize(m, m + {{field_size}}));
|
||||
{%- endif %}
|
||||
{%- if not loop.last %}
|
||||
m += {{field_size}};
|
||||
dataSize -= {{field_size}};
|
||||
{%- endif %}
|
||||
{% elif field|is_fd %}
|
||||
{%- set field_size = 4 %}
|
||||
{{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
|
||||
ret.{{field.mojom_name}} = IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field_size}}, n, n + 1, cs);
|
||||
{%- if not loop.last %}
|
||||
m += {{field_size}};
|
||||
dataSize -= {{field_size}};
|
||||
n += ret.{{field.mojom_name}}.isValid() ? 1 : 0;
|
||||
fdsSize -= ret.{{field.mojom_name}}.isValid() ? 1 : 0;
|
||||
{%- endif %}
|
||||
{% elif field|is_controls %}
|
||||
{%- set field_size = 4 %}
|
||||
{{- check_data_size(field_size, 'dataSize', field.mojom_name + 'Size', 'data')}}
|
||||
const size_t {{field.mojom_name}}Size = readPOD<uint32_t>(m, 0, dataEnd);
|
||||
m += {{field_size}};
|
||||
dataSize -= {{field_size}};
|
||||
{%- set field_size = field.mojom_name + 'Size' -%}
|
||||
{{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
|
||||
if ({{field.mojom_name}}Size > 0)
|
||||
ret.{{field.mojom_name}} =
|
||||
IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field.mojom_name}}Size, cs);
|
||||
{%- if not loop.last %}
|
||||
m += {{field_size}};
|
||||
dataSize -= {{field_size}};
|
||||
{%- endif %}
|
||||
{% elif field|is_plain_struct or field|is_array or field|is_map or field|is_str %}
|
||||
{%- set field_size = 4 %}
|
||||
{{- check_data_size(field_size, 'dataSize', field.mojom_name + 'Size', 'data')}}
|
||||
const size_t {{field.mojom_name}}Size = readPOD<uint32_t>(m, 0, dataEnd);
|
||||
m += {{field_size}};
|
||||
dataSize -= {{field_size}};
|
||||
{%- if field|has_fd %}
|
||||
{%- set field_size = 4 %}
|
||||
{{- check_data_size(field_size, 'dataSize', field.mojom_name + 'FdsSize', 'data')}}
|
||||
const size_t {{field.mojom_name}}FdsSize = readPOD<uint32_t>(m, 0, dataEnd);
|
||||
m += {{field_size}};
|
||||
dataSize -= {{field_size}};
|
||||
{{- check_data_size(field.mojom_name + 'FdsSize', 'fdsSize', field.mojom_name, 'fds')}}
|
||||
{%- endif %}
|
||||
{%- set field_size = field.mojom_name + 'Size' -%}
|
||||
{{- check_data_size(field_size, 'dataSize', field.mojom_name, 'data')}}
|
||||
ret.{{field.mojom_name}} =
|
||||
{%- if field|is_str %}
|
||||
IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field.mojom_name}}Size);
|
||||
{%- elif field|has_fd and (field|is_array or field|is_map) %}
|
||||
IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field.mojom_name}}Size, n, n + {{field.mojom_name}}FdsSize, cs);
|
||||
{%- elif field|has_fd and (not (field|is_array or field|is_map)) %}
|
||||
IPADataSerializer<{{field|name_full}}>::deserialize(m, m + {{field.mojom_name}}Size, n, n + {{field.mojom_name}}FdsSize, cs);
|
||||
{%- elif (not field|has_fd) and (field|is_array or field|is_map) %}
|
||||
IPADataSerializer<{{field|name}}>::deserialize(m, m + {{field.mojom_name}}Size, cs);
|
||||
{%- else %}
|
||||
IPADataSerializer<{{field|name_full}}>::deserialize(m, m + {{field.mojom_name}}Size, cs);
|
||||
{%- endif %}
|
||||
{%- if not loop.last %}
|
||||
m += {{field_size}};
|
||||
dataSize -= {{field_size}};
|
||||
{%- if field|has_fd %}
|
||||
n += {{field.mojom_name}}FdsSize;
|
||||
fdsSize -= {{field.mojom_name}}FdsSize;
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
{% else %}
|
||||
/* Unknown deserialization for {{field.mojom_name}}. */
|
||||
{%- endif %}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{#
|
||||
# \brief Serialize a struct
|
||||
#
|
||||
# Generate code for IPADataSerializer specialization, for serializing
|
||||
# \a struct.
|
||||
#}
|
||||
{%- macro serializer(struct, namespace) %}
|
||||
static std::tuple<std::vector<uint8_t>, std::vector<SharedFD>>
|
||||
serialize(const {{struct|name_full}} &data,
|
||||
{%- if struct|needs_control_serializer %}
|
||||
ControlSerializer *cs)
|
||||
{%- else %}
|
||||
[[maybe_unused]] ControlSerializer *cs = nullptr)
|
||||
{%- endif %}
|
||||
{
|
||||
std::vector<uint8_t> retData;
|
||||
{%- if struct|has_fd %}
|
||||
std::vector<SharedFD> retFds;
|
||||
{%- endif %}
|
||||
{%- for field in struct.fields %}
|
||||
{{serializer_field(field, namespace, loop)}}
|
||||
{%- endfor %}
|
||||
{% if struct|has_fd %}
|
||||
return {retData, retFds};
|
||||
{%- else %}
|
||||
return {retData, {}};
|
||||
{%- endif %}
|
||||
}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{#
|
||||
# \brief Deserialize a struct that has fds
|
||||
#
|
||||
# Generate code for IPADataSerializer specialization, for deserializing
|
||||
# \a struct, in the case that \a struct has file descriptors.
|
||||
#}
|
||||
{%- macro deserializer_fd(struct, namespace) %}
|
||||
static {{struct|name_full}}
|
||||
deserialize(std::vector<uint8_t> &data,
|
||||
std::vector<SharedFD> &fds,
|
||||
{%- if struct|needs_control_serializer %}
|
||||
ControlSerializer *cs)
|
||||
{%- else %}
|
||||
ControlSerializer *cs = nullptr)
|
||||
{%- endif %}
|
||||
{
|
||||
return IPADataSerializer<{{struct|name_full}}>::deserialize(data.cbegin(), data.cend(), fds.cbegin(), fds.cend(), cs);
|
||||
}
|
||||
|
||||
{# \todo Don't inline this function #}
|
||||
static {{struct|name_full}}
|
||||
deserialize(std::vector<uint8_t>::const_iterator dataBegin,
|
||||
std::vector<uint8_t>::const_iterator dataEnd,
|
||||
std::vector<SharedFD>::const_iterator fdsBegin,
|
||||
std::vector<SharedFD>::const_iterator fdsEnd,
|
||||
{%- if struct|needs_control_serializer %}
|
||||
ControlSerializer *cs)
|
||||
{%- else %}
|
||||
[[maybe_unused]] ControlSerializer *cs = nullptr)
|
||||
{%- endif %}
|
||||
{
|
||||
{{struct|name_full}} ret;
|
||||
std::vector<uint8_t>::const_iterator m = dataBegin;
|
||||
std::vector<SharedFD>::const_iterator n = fdsBegin;
|
||||
|
||||
size_t dataSize = std::distance(dataBegin, dataEnd);
|
||||
[[maybe_unused]] size_t fdsSize = std::distance(fdsBegin, fdsEnd);
|
||||
{%- for field in struct.fields -%}
|
||||
{{deserializer_field(field, namespace, loop)}}
|
||||
{%- endfor %}
|
||||
return ret;
|
||||
}
|
||||
{%- endmacro %}
|
||||
|
||||
{#
|
||||
# \brief Deserialize a struct that has fds, using non-fd
|
||||
#
|
||||
# Generate code for IPADataSerializer specialization, for deserializing
|
||||
# \a struct, in the case that \a struct has no file descriptors but requires
|
||||
# deserializers with file descriptors.
|
||||
#}
|
||||
{%- macro deserializer_fd_simple(struct, namespace) %}
|
||||
static {{struct|name_full}}
|
||||
deserialize(std::vector<uint8_t> &data,
|
||||
[[maybe_unused]] std::vector<SharedFD> &fds,
|
||||
ControlSerializer *cs = nullptr)
|
||||
{
|
||||
return IPADataSerializer<{{struct|name_full}}>::deserialize(data.cbegin(), data.cend(), cs);
|
||||
}
|
||||
|
||||
static {{struct|name_full}}
|
||||
deserialize(std::vector<uint8_t>::const_iterator dataBegin,
|
||||
std::vector<uint8_t>::const_iterator dataEnd,
|
||||
[[maybe_unused]] std::vector<SharedFD>::const_iterator fdsBegin,
|
||||
[[maybe_unused]] std::vector<SharedFD>::const_iterator fdsEnd,
|
||||
ControlSerializer *cs = nullptr)
|
||||
{
|
||||
return IPADataSerializer<{{struct|name_full}}>::deserialize(dataBegin, dataEnd, cs);
|
||||
}
|
||||
{%- endmacro %}
|
||||
|
||||
|
||||
{#
|
||||
# \brief Deserialize a struct that has no fds
|
||||
#
|
||||
# Generate code for IPADataSerializer specialization, for deserializing
|
||||
# \a struct, in the case that \a struct does not have file descriptors.
|
||||
#}
|
||||
{%- macro deserializer_no_fd(struct, namespace) %}
|
||||
static {{struct|name_full}}
|
||||
deserialize(std::vector<uint8_t> &data,
|
||||
{%- if struct|needs_control_serializer %}
|
||||
ControlSerializer *cs)
|
||||
{%- else %}
|
||||
ControlSerializer *cs = nullptr)
|
||||
{%- endif %}
|
||||
{
|
||||
return IPADataSerializer<{{struct|name_full}}>::deserialize(data.cbegin(), data.cend(), cs);
|
||||
}
|
||||
|
||||
{# \todo Don't inline this function #}
|
||||
static {{struct|name_full}}
|
||||
deserialize(std::vector<uint8_t>::const_iterator dataBegin,
|
||||
std::vector<uint8_t>::const_iterator dataEnd,
|
||||
{%- if struct|needs_control_serializer %}
|
||||
ControlSerializer *cs)
|
||||
{%- else %}
|
||||
[[maybe_unused]] ControlSerializer *cs = nullptr)
|
||||
{%- endif %}
|
||||
{
|
||||
{{struct|name_full}} ret;
|
||||
std::vector<uint8_t>::const_iterator m = dataBegin;
|
||||
|
||||
size_t dataSize = std::distance(dataBegin, dataEnd);
|
||||
{%- for field in struct.fields -%}
|
||||
{{deserializer_field(field, namespace, loop)}}
|
||||
{%- endfor %}
|
||||
return ret;
|
||||
}
|
||||
{%- endmacro %}
|
3
utils/codegen/ipc/generators/meson.build
Normal file
3
utils/codegen/ipc/generators/meson.build
Normal file
|
@ -0,0 +1,3 @@
|
|||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
subdir('libcamera_templates')
|
553
utils/codegen/ipc/generators/mojom_libcamera_generator.py
Normal file
553
utils/codegen/ipc/generators/mojom_libcamera_generator.py
Normal file
|
@ -0,0 +1,553 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
#
|
||||
# Author: Paul Elder <paul.elder@ideasonboard.com>
|
||||
#
|
||||
# Generates libcamera files from a mojom.Module.
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import os
|
||||
import re
|
||||
|
||||
import mojom.fileutil as fileutil
|
||||
import mojom.generate.generator as generator
|
||||
import mojom.generate.module as mojom
|
||||
from mojom.generate.template_expander import UseJinja
|
||||
|
||||
|
||||
GENERATOR_PREFIX = 'libcamera'
|
||||
|
||||
_kind_to_cpp_type = {
|
||||
mojom.BOOL: 'bool',
|
||||
mojom.INT8: 'int8_t',
|
||||
mojom.UINT8: 'uint8_t',
|
||||
mojom.INT16: 'int16_t',
|
||||
mojom.UINT16: 'uint16_t',
|
||||
mojom.INT32: 'int32_t',
|
||||
mojom.UINT32: 'uint32_t',
|
||||
mojom.FLOAT: 'float',
|
||||
mojom.INT64: 'int64_t',
|
||||
mojom.UINT64: 'uint64_t',
|
||||
mojom.DOUBLE: 'double',
|
||||
}
|
||||
|
||||
_bit_widths = {
|
||||
mojom.BOOL: '8',
|
||||
mojom.INT8: '8',
|
||||
mojom.UINT8: '8',
|
||||
mojom.INT16: '16',
|
||||
mojom.UINT16: '16',
|
||||
mojom.INT32: '32',
|
||||
mojom.UINT32: '32',
|
||||
mojom.FLOAT: '32',
|
||||
mojom.INT64: '64',
|
||||
mojom.UINT64: '64',
|
||||
mojom.DOUBLE: '64',
|
||||
}
|
||||
|
||||
def ModuleName(path):
|
||||
return path.split('/')[-1].split('.')[0]
|
||||
|
||||
def ModuleClassName(module):
|
||||
return re.sub(r'^IPA(.*)Interface$', lambda match: match.group(1),
|
||||
module.interfaces[0].mojom_name)
|
||||
|
||||
def Capitalize(name):
|
||||
return name[0].upper() + name[1:]
|
||||
|
||||
def ConstantStyle(name):
|
||||
return generator.ToUpperSnakeCase(name)
|
||||
|
||||
def Choose(cond, t, f):
|
||||
return t if cond else f
|
||||
|
||||
def CommaSep(l):
|
||||
return ', '.join([m for m in l])
|
||||
|
||||
def ParamsCommaSep(l):
|
||||
return ', '.join([m.mojom_name for m in l])
|
||||
|
||||
def GetDefaultValue(element):
|
||||
if element.default is not None:
|
||||
return element.default
|
||||
if type(element.kind) == mojom.ValueKind:
|
||||
return '0'
|
||||
if IsFlags(element):
|
||||
return ''
|
||||
if mojom.IsEnumKind(element.kind):
|
||||
return f'static_cast<{element.kind.mojom_name}>(0)'
|
||||
if isinstance(element.kind, mojom.Struct) and \
|
||||
element.kind.mojom_name == 'SharedFD':
|
||||
return '-1'
|
||||
return ''
|
||||
|
||||
def HasDefaultValue(element):
|
||||
return GetDefaultValue(element) != ''
|
||||
|
||||
def HasDefaultFields(element):
|
||||
return True in [HasDefaultValue(x) for x in element.fields]
|
||||
|
||||
def GetAllTypes(element):
|
||||
if mojom.IsArrayKind(element):
|
||||
return GetAllTypes(element.kind)
|
||||
if mojom.IsMapKind(element):
|
||||
return GetAllTypes(element.key_kind) + GetAllTypes(element.value_kind)
|
||||
if isinstance(element, mojom.Parameter):
|
||||
return GetAllTypes(element.kind)
|
||||
if mojom.IsEnumKind(element):
|
||||
return [element.mojom_name]
|
||||
if not mojom.IsStructKind(element):
|
||||
return [element.spec]
|
||||
if len(element.fields) == 0:
|
||||
return [element.mojom_name]
|
||||
ret = [GetAllTypes(x.kind) for x in element.fields]
|
||||
ret = [x for sublist in ret for x in sublist]
|
||||
return list(set(ret))
|
||||
|
||||
def GetAllAttrs(element):
|
||||
if mojom.IsArrayKind(element):
|
||||
return GetAllAttrs(element.kind)
|
||||
if mojom.IsMapKind(element):
|
||||
return {**GetAllAttrs(element.key_kind), **GetAllAttrs(element.value_kind)}
|
||||
if isinstance(element, mojom.Parameter):
|
||||
return GetAllAttrs(element.kind)
|
||||
if mojom.IsEnumKind(element):
|
||||
return element.attributes if element.attributes is not None else {}
|
||||
if mojom.IsStructKind(element) and len(element.fields) == 0:
|
||||
return element.attributes if element.attributes is not None else {}
|
||||
if not mojom.IsStructKind(element):
|
||||
if hasattr(element, 'attributes'):
|
||||
return element.attributes or {}
|
||||
return {}
|
||||
attrs = [(x.attributes) for x in element.fields]
|
||||
ret = {}
|
||||
for d in attrs:
|
||||
ret.update(d or {})
|
||||
if hasattr(element, 'attributes'):
|
||||
ret.update(element.attributes or {})
|
||||
return ret
|
||||
|
||||
def NeedsControlSerializer(element):
|
||||
types = GetAllTypes(element)
|
||||
for type in ['ControlList', 'ControlInfoMap']:
|
||||
if f'x:{type}' in types:
|
||||
raise Exception(f'Unknown type "{type}" in {element.mojom_name}, did you mean "libcamera.{type}"?')
|
||||
return "ControlList" in types or "ControlInfoMap" in types
|
||||
|
||||
def HasFd(element):
|
||||
attrs = GetAllAttrs(element)
|
||||
if isinstance(element, mojom.Kind):
|
||||
types = GetAllTypes(element)
|
||||
else:
|
||||
types = GetAllTypes(element.kind)
|
||||
return "SharedFD" in types or (attrs is not None and "hasFd" in attrs)
|
||||
|
||||
def WithDefaultValues(element):
|
||||
return [x for x in element if HasDefaultValue(x)]
|
||||
|
||||
def WithFds(element):
|
||||
return [x for x in element if HasFd(x)]
|
||||
|
||||
def MethodParamInputs(method):
|
||||
return method.parameters
|
||||
|
||||
def MethodParamOutputs(method):
|
||||
if method.response_parameters is None:
|
||||
return []
|
||||
|
||||
if MethodReturnValue(method) == 'void':
|
||||
return method.response_parameters
|
||||
|
||||
if len(method.response_parameters) <= 1:
|
||||
return []
|
||||
|
||||
return method.response_parameters[1:]
|
||||
|
||||
def MethodParamsHaveFd(parameters):
|
||||
return len([x for x in parameters if HasFd(x)]) > 0
|
||||
|
||||
def MethodInputHasFd(method):
|
||||
return MethodParamsHaveFd(method.parameters)
|
||||
|
||||
def MethodOutputHasFd(method):
|
||||
return MethodParamsHaveFd(MethodParamOutputs(method))
|
||||
|
||||
def MethodParamNames(method):
|
||||
params = []
|
||||
for param in method.parameters:
|
||||
params.append(param.mojom_name)
|
||||
for param in MethodParamOutputs(method):
|
||||
params.append(param.mojom_name)
|
||||
return params
|
||||
|
||||
def MethodParameters(method):
|
||||
params = []
|
||||
for param in method.parameters:
|
||||
params.append('const %s %s%s' % (GetNameForElement(param),
|
||||
'' if IsPod(param) or IsEnum(param) else '&',
|
||||
param.mojom_name))
|
||||
for param in MethodParamOutputs(method):
|
||||
params.append(f'{GetNameForElement(param)} *{param.mojom_name}')
|
||||
return params
|
||||
|
||||
def MethodReturnValue(method):
|
||||
if method.response_parameters is None or len(method.response_parameters) == 0:
|
||||
return 'void'
|
||||
first_output = method.response_parameters[0]
|
||||
if ((len(method.response_parameters) == 1 and IsPod(first_output)) or
|
||||
first_output.kind == mojom.INT32):
|
||||
return GetNameForElement(first_output)
|
||||
return 'void'
|
||||
|
||||
def IsAsync(method):
|
||||
# Events are always async
|
||||
if re.match("^IPA.*EventInterface$", method.interface.mojom_name):
|
||||
return True
|
||||
elif re.match("^IPA.*Interface$", method.interface.mojom_name):
|
||||
if method.attributes is None:
|
||||
return False
|
||||
elif 'async' in method.attributes and method.attributes['async']:
|
||||
return True
|
||||
return False
|
||||
|
||||
def IsArray(element):
|
||||
return mojom.IsArrayKind(element.kind)
|
||||
|
||||
def IsControls(element):
|
||||
return mojom.IsStructKind(element.kind) and (element.kind.mojom_name == "ControlList" or
|
||||
element.kind.mojom_name == "ControlInfoMap")
|
||||
|
||||
def IsEnum(element):
|
||||
return mojom.IsEnumKind(element.kind)
|
||||
|
||||
|
||||
# Only works the enum definition, not types
|
||||
def IsScoped(element):
|
||||
attributes = getattr(element, 'attributes', None)
|
||||
if not attributes:
|
||||
return False
|
||||
return 'scopedEnum' in attributes
|
||||
|
||||
|
||||
def IsEnumScoped(element):
|
||||
if not IsEnum(element):
|
||||
return False
|
||||
return IsScoped(element.kind)
|
||||
|
||||
def IsFd(element):
|
||||
return mojom.IsStructKind(element.kind) and element.kind.mojom_name == "SharedFD"
|
||||
|
||||
|
||||
def IsFlags(element):
|
||||
attributes = getattr(element, 'attributes', None)
|
||||
if not attributes:
|
||||
return False
|
||||
return 'flags' in attributes
|
||||
|
||||
def IsMap(element):
|
||||
return mojom.IsMapKind(element.kind)
|
||||
|
||||
def IsPlainStruct(element):
|
||||
return mojom.IsStructKind(element.kind) and not IsControls(element) and not IsFd(element)
|
||||
|
||||
def IsPod(element):
|
||||
return element.kind in _kind_to_cpp_type
|
||||
|
||||
def IsStr(element):
|
||||
return element.kind.spec == 's'
|
||||
|
||||
def BitWidth(element):
|
||||
if element.kind in _bit_widths:
|
||||
return _bit_widths[element.kind]
|
||||
if mojom.IsEnumKind(element.kind):
|
||||
return '32'
|
||||
return ''
|
||||
|
||||
def ByteWidthFromCppType(t):
|
||||
key = None
|
||||
for mojo_type, cpp_type in _kind_to_cpp_type.items():
|
||||
if t == cpp_type:
|
||||
key = mojo_type
|
||||
if key is None:
|
||||
raise Exception('invalid type')
|
||||
return str(int(_bit_widths[key]) // 8)
|
||||
|
||||
# Get the type name for a given element
|
||||
def GetNameForElement(element):
|
||||
# Flags
|
||||
if IsFlags(element):
|
||||
return f'Flags<{GetFullNameForElement(element.kind)}>'
|
||||
# structs
|
||||
if (mojom.IsEnumKind(element) or
|
||||
mojom.IsInterfaceKind(element) or
|
||||
mojom.IsStructKind(element)):
|
||||
return element.mojom_name
|
||||
# vectors
|
||||
if (mojom.IsArrayKind(element)):
|
||||
elem_name = GetFullNameForElement(element.kind)
|
||||
return f'std::vector<{elem_name}>'
|
||||
# maps
|
||||
if (mojom.IsMapKind(element)):
|
||||
key_name = GetFullNameForElement(element.key_kind)
|
||||
value_name = GetFullNameForElement(element.value_kind)
|
||||
return f'std::map<{key_name}, {value_name}>'
|
||||
# struct fields and function parameters
|
||||
if isinstance(element, (mojom.Field, mojom.Method, mojom.Parameter)):
|
||||
# maps and vectors
|
||||
if (mojom.IsArrayKind(element.kind) or mojom.IsMapKind(element.kind)):
|
||||
return GetNameForElement(element.kind)
|
||||
# strings
|
||||
if (mojom.IsReferenceKind(element.kind) and element.kind.spec == 's'):
|
||||
return 'std::string'
|
||||
# PODs
|
||||
if element.kind in _kind_to_cpp_type:
|
||||
return _kind_to_cpp_type[element.kind]
|
||||
# structs and enums
|
||||
return element.kind.mojom_name
|
||||
# PODs that are members of vectors/maps
|
||||
if (hasattr(element, '__hash__') and element in _kind_to_cpp_type):
|
||||
return _kind_to_cpp_type[element]
|
||||
if (hasattr(element, 'spec')):
|
||||
# strings that are members of vectors/maps
|
||||
if (element.spec == 's'):
|
||||
return 'std::string'
|
||||
# structs that aren't defined in mojom that are members of vectors/maps
|
||||
if (element.spec[0] == 'x'):
|
||||
return element.spec.replace('x:', '').replace('.', '::')
|
||||
if (mojom.IsInterfaceRequestKind(element) or
|
||||
mojom.IsAssociatedKind(element) or
|
||||
mojom.IsPendingRemoteKind(element) or
|
||||
mojom.IsPendingReceiverKind(element) or
|
||||
mojom.IsUnionKind(element)):
|
||||
raise Exception('Unsupported element: %s' % element)
|
||||
raise Exception('Unexpected element: %s' % element)
|
||||
|
||||
def GetFullNameForElement(element):
|
||||
name = GetNameForElement(element)
|
||||
namespace_str = ''
|
||||
if (mojom.IsStructKind(element) or mojom.IsEnumKind(element)):
|
||||
namespace_str = element.module.mojom_namespace.replace('.', '::')
|
||||
elif (hasattr(element, 'kind') and
|
||||
(mojom.IsStructKind(element.kind) or mojom.IsEnumKind(element.kind))):
|
||||
namespace_str = element.kind.module.mojom_namespace.replace('.', '::')
|
||||
|
||||
if namespace_str == '':
|
||||
return name
|
||||
|
||||
if IsFlags(element):
|
||||
return GetNameForElement(element)
|
||||
|
||||
return f'{namespace_str}::{name}'
|
||||
|
||||
def ValidateZeroLength(l, s, cap=True):
|
||||
if l is None:
|
||||
return
|
||||
if len(l) > 0:
|
||||
raise Exception(f'{s.capitalize() if cap else s} should be empty')
|
||||
|
||||
def ValidateSingleLength(l, s, cap=True):
|
||||
if len(l) > 1:
|
||||
raise Exception(f'Only one {s} allowed')
|
||||
if len(l) < 1:
|
||||
raise Exception(f'{s.capitalize() if cap else s} is required')
|
||||
|
||||
def GetMainInterface(interfaces):
|
||||
intf = [x for x in interfaces
|
||||
if re.match("^IPA.*Interface", x.mojom_name) and
|
||||
not re.match("^IPA.*EventInterface", x.mojom_name)]
|
||||
ValidateSingleLength(intf, 'main interface')
|
||||
return None if len(intf) == 0 else intf[0]
|
||||
|
||||
def GetEventInterface(interfaces):
|
||||
event = [x for x in interfaces if re.match("^IPA.*EventInterface", x.mojom_name)]
|
||||
ValidateSingleLength(event, 'event interface')
|
||||
return None if len(event) == 0 else event[0]
|
||||
|
||||
def ValidateNamespace(namespace):
|
||||
if namespace == '':
|
||||
raise Exception('Must have a namespace')
|
||||
|
||||
if not re.match(r'^ipa\.[0-9A-Za-z_]+', namespace):
|
||||
raise Exception('Namespace must be of the form "ipa.{pipeline_name}"')
|
||||
|
||||
def ValidateInterfaces(interfaces):
|
||||
# Validate presence of main interface
|
||||
intf = GetMainInterface(interfaces)
|
||||
if intf is None:
|
||||
raise Exception('Must have main IPA interface')
|
||||
|
||||
# Validate presence of event interface
|
||||
event = GetEventInterface(interfaces)
|
||||
if intf is None:
|
||||
raise Exception('Must have event IPA interface')
|
||||
|
||||
# Validate required main interface functions
|
||||
f_init = [x for x in intf.methods if x.mojom_name == 'init']
|
||||
f_start = [x for x in intf.methods if x.mojom_name == 'start']
|
||||
f_stop = [x for x in intf.methods if x.mojom_name == 'stop']
|
||||
|
||||
ValidateSingleLength(f_init, 'init()', False)
|
||||
ValidateSingleLength(f_start, 'start()', False)
|
||||
ValidateSingleLength(f_stop, 'stop()', False)
|
||||
|
||||
f_stop = f_stop[0]
|
||||
|
||||
# No need to validate init() and start() as they are customizable
|
||||
|
||||
# Validate parameters to stop()
|
||||
ValidateZeroLength(f_stop.parameters, 'input parameter to stop()')
|
||||
ValidateZeroLength(f_stop.parameters, 'output parameter from stop()')
|
||||
|
||||
# Validate that event interface has at least one event
|
||||
if len(event.methods) < 1:
|
||||
raise Exception('Event interface must have at least one event')
|
||||
|
||||
# Validate that all async methods don't have return values
|
||||
intf_methods_async = [x for x in intf.methods if IsAsync(x)]
|
||||
for method in intf_methods_async:
|
||||
ValidateZeroLength(method.response_parameters,
|
||||
f'{method.mojom_name} response parameters', False)
|
||||
|
||||
event_methods_async = [x for x in event.methods if IsAsync(x)]
|
||||
for method in event_methods_async:
|
||||
ValidateZeroLength(method.response_parameters,
|
||||
f'{method.mojom_name} response parameters', False)
|
||||
|
||||
class Generator(generator.Generator):
|
||||
@staticmethod
|
||||
def GetTemplatePrefix():
|
||||
return 'libcamera_templates'
|
||||
|
||||
def GetFilters(self):
|
||||
libcamera_filters = {
|
||||
'all_types': GetAllTypes,
|
||||
'bit_width': BitWidth,
|
||||
'byte_width' : ByteWidthFromCppType,
|
||||
'cap': Capitalize,
|
||||
'choose': Choose,
|
||||
'comma_sep': CommaSep,
|
||||
'default_value': GetDefaultValue,
|
||||
'has_default_fields': HasDefaultFields,
|
||||
'has_fd': HasFd,
|
||||
'is_async': IsAsync,
|
||||
'is_array': IsArray,
|
||||
'is_controls': IsControls,
|
||||
'is_enum': IsEnum,
|
||||
'is_enum_scoped': IsEnumScoped,
|
||||
'is_fd': IsFd,
|
||||
'is_flags': IsFlags,
|
||||
'is_map': IsMap,
|
||||
'is_plain_struct': IsPlainStruct,
|
||||
'is_pod': IsPod,
|
||||
'is_scoped': IsScoped,
|
||||
'is_str': IsStr,
|
||||
'method_input_has_fd': MethodInputHasFd,
|
||||
'method_output_has_fd': MethodOutputHasFd,
|
||||
'method_param_names': MethodParamNames,
|
||||
'method_param_inputs': MethodParamInputs,
|
||||
'method_param_outputs': MethodParamOutputs,
|
||||
'method_parameters': MethodParameters,
|
||||
'method_return_value': MethodReturnValue,
|
||||
'name': GetNameForElement,
|
||||
'name_full': GetFullNameForElement,
|
||||
'needs_control_serializer': NeedsControlSerializer,
|
||||
'params_comma_sep': ParamsCommaSep,
|
||||
'with_default_values': WithDefaultValues,
|
||||
'with_fds': WithFds,
|
||||
}
|
||||
return libcamera_filters
|
||||
|
||||
def _GetJinjaExports(self):
|
||||
return {
|
||||
'cmd_enum_name': '_%sCmd' % self.module_name,
|
||||
'cmd_event_enum_name': '_%sEventCmd' % self.module_name,
|
||||
'consts': self.module.constants,
|
||||
'enums': self.module.enums,
|
||||
'has_array': len([x for x in self.module.kinds.keys() if x[0] == 'a']) > 0,
|
||||
'has_map': len([x for x in self.module.kinds.keys() if x[0] == 'm']) > 0,
|
||||
'has_namespace': self.module.mojom_namespace != '',
|
||||
'interface_event': GetEventInterface(self.module.interfaces),
|
||||
'interface_main': GetMainInterface(self.module.interfaces),
|
||||
'interface_name': 'IPA%sInterface' % self.module_name,
|
||||
'module_name': ModuleName(self.module.path),
|
||||
'namespace': self.module.mojom_namespace.split('.'),
|
||||
'namespace_str': self.module.mojom_namespace.replace('.', '::') if
|
||||
self.module.mojom_namespace is not None else '',
|
||||
'proxy_name': 'IPAProxy%s' % self.module_name,
|
||||
'proxy_worker_name': 'IPAProxy%sWorker' % self.module_name,
|
||||
'structs_nonempty': [x for x in self.module.structs if len(x.fields) > 0],
|
||||
}
|
||||
|
||||
def _GetJinjaExportsForCore(self):
|
||||
return {
|
||||
'consts': self.module.constants,
|
||||
'enums_gen_header': [x for x in self.module.enums if x.attributes is None or 'skipHeader' not in x.attributes],
|
||||
'has_array': len([x for x in self.module.kinds.keys() if x[0] == 'a']) > 0,
|
||||
'has_map': len([x for x in self.module.kinds.keys() if x[0] == 'm']) > 0,
|
||||
'structs_gen_header': [x for x in self.module.structs if x.attributes is None or 'skipHeader' not in x.attributes],
|
||||
'structs_gen_serializer': [x for x in self.module.structs if x.attributes is None or 'skipSerdes' not in x.attributes],
|
||||
}
|
||||
|
||||
@UseJinja('core_ipa_interface.h.tmpl')
|
||||
def _GenerateCoreHeader(self):
|
||||
return self._GetJinjaExportsForCore()
|
||||
|
||||
@UseJinja('core_ipa_serializer.h.tmpl')
|
||||
def _GenerateCoreSerializer(self):
|
||||
return self._GetJinjaExportsForCore()
|
||||
|
||||
@UseJinja('module_ipa_interface.h.tmpl')
|
||||
def _GenerateDataHeader(self):
|
||||
return self._GetJinjaExports()
|
||||
|
||||
@UseJinja('module_ipa_serializer.h.tmpl')
|
||||
def _GenerateSerializer(self):
|
||||
return self._GetJinjaExports()
|
||||
|
||||
@UseJinja('module_ipa_proxy.cpp.tmpl')
|
||||
def _GenerateProxyCpp(self):
|
||||
return self._GetJinjaExports()
|
||||
|
||||
@UseJinja('module_ipa_proxy.h.tmpl')
|
||||
def _GenerateProxyHeader(self):
|
||||
return self._GetJinjaExports()
|
||||
|
||||
@UseJinja('module_ipa_proxy_worker.cpp.tmpl')
|
||||
def _GenerateProxyWorker(self):
|
||||
return self._GetJinjaExports()
|
||||
|
||||
def GenerateFiles(self, unparsed_args):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--libcamera_generate_core_header', action='store_true')
|
||||
parser.add_argument('--libcamera_generate_core_serializer', action='store_true')
|
||||
parser.add_argument('--libcamera_generate_header', action='store_true')
|
||||
parser.add_argument('--libcamera_generate_serializer', action='store_true')
|
||||
parser.add_argument('--libcamera_generate_proxy_cpp', action='store_true')
|
||||
parser.add_argument('--libcamera_generate_proxy_h', action='store_true')
|
||||
parser.add_argument('--libcamera_generate_proxy_worker', action='store_true')
|
||||
parser.add_argument('--libcamera_output_path')
|
||||
args = parser.parse_args(unparsed_args)
|
||||
|
||||
if not args.libcamera_generate_core_header and \
|
||||
not args.libcamera_generate_core_serializer:
|
||||
ValidateNamespace(self.module.mojom_namespace)
|
||||
ValidateInterfaces(self.module.interfaces)
|
||||
self.module_name = ModuleClassName(self.module)
|
||||
|
||||
fileutil.EnsureDirectoryExists(os.path.dirname(args.libcamera_output_path))
|
||||
|
||||
gen_funcs = [
|
||||
[args.libcamera_generate_core_header, self._GenerateCoreHeader],
|
||||
[args.libcamera_generate_core_serializer, self._GenerateCoreSerializer],
|
||||
[args.libcamera_generate_header, self._GenerateDataHeader],
|
||||
[args.libcamera_generate_serializer, self._GenerateSerializer],
|
||||
[args.libcamera_generate_proxy_cpp, self._GenerateProxyCpp],
|
||||
[args.libcamera_generate_proxy_h, self._GenerateProxyHeader],
|
||||
[args.libcamera_generate_proxy_worker, self._GenerateProxyWorker],
|
||||
]
|
||||
|
||||
for pair in gen_funcs:
|
||||
if pair[0]:
|
||||
self.Write(pair[1](), args.libcamera_output_path)
|
18
utils/codegen/ipc/meson.build
Normal file
18
utils/codegen/ipc/meson.build
Normal file
|
@ -0,0 +1,18 @@
|
|||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
subdir('generators')
|
||||
|
||||
py_modules += ['jinja2', 'ply']
|
||||
|
||||
mojom_parser = find_program('./parser.py')
|
||||
|
||||
mojom_generator = find_program('./generate.py')
|
||||
|
||||
mojom_docs_extractor = find_program('./extract-docs.py')
|
||||
|
||||
mojom_templates = custom_target('mojom_templates',
|
||||
input : mojom_template_files,
|
||||
output : 'libcamera_templates.zip',
|
||||
command : [mojom_generator, '-o', '@OUTDIR@', 'precompile'])
|
||||
|
||||
mojom_templates_dir = meson.current_build_dir()
|
4
utils/codegen/ipc/mojo/README
Normal file
4
utils/codegen/ipc/mojo/README
Normal file
|
@ -0,0 +1,4 @@
|
|||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
Files in this directory are imported from 9be4263648d7 of Chromium. Do not
|
||||
modify them manually.
|
27
utils/codegen/ipc/mojo/public/LICENSE
Normal file
27
utils/codegen/ipc/mojo/public/LICENSE
Normal file
|
@ -0,0 +1,27 @@
|
|||
// Copyright 2014 The Chromium Authors
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
6
utils/codegen/ipc/mojo/public/tools/.style.yapf
Normal file
6
utils/codegen/ipc/mojo/public/tools/.style.yapf
Normal file
|
@ -0,0 +1,6 @@
|
|||
[style]
|
||||
based_on_style = pep8
|
||||
|
||||
# New directories should use a .style.yapf that does not include the following:
|
||||
column_limit = 80
|
||||
indent_width = 2
|
22
utils/codegen/ipc/mojo/public/tools/BUILD.gn
Normal file
22
utils/codegen/ipc/mojo/public/tools/BUILD.gn
Normal file
|
@ -0,0 +1,22 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# The main target used to aggregate all unit tests for Python-based Mojo tools.
|
||||
# This is used to generate a complete isolate which can be pushed to bots to run
|
||||
# the tests.
|
||||
group("mojo_python_unittests") {
|
||||
data = [
|
||||
"run_all_python_unittests.py",
|
||||
"//testing/scripts/run_isolated_script_test.py",
|
||||
]
|
||||
deps = [
|
||||
"//mojo/public/tools/bindings:tests",
|
||||
"//mojo/public/tools/mojom:tests",
|
||||
"//mojo/public/tools/mojom/mojom:tests",
|
||||
]
|
||||
data_deps = [
|
||||
"//testing:test_scripts_shared",
|
||||
"//third_party/catapult/third_party/typ/",
|
||||
]
|
||||
}
|
131
utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn
Normal file
131
utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn
Normal file
|
@ -0,0 +1,131 @@
|
|||
# Copyright 2016 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import("//mojo/public/tools/bindings/mojom.gni")
|
||||
import("//third_party/jinja2/jinja2.gni")
|
||||
|
||||
action("precompile_templates") {
|
||||
sources = mojom_generator_sources
|
||||
sources += [
|
||||
"$mojom_generator_root/generators/cpp_templates/cpp_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/enum_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/enum_serialization_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/feature_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/feature_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_feature_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_proxy_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_request_validator_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_response_validator_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_stub_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-features.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-forward.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-import-headers.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-params-data.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-shared-internal.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-shared-message-ids.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-shared.cc.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-shared.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-test-utils.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module.cc.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_data_view_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_data_view_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_serialization_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_traits_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_traits_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_unserialized_message_context.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_data_view_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_data_view_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_serialization_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_traits_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_traits_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/validation_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_class_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_class_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_class_template_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_template_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/constant_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/constants.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/data_types_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/enum.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/enum_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/header.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/interface.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/interface_internal.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/struct.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/union.java.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/enum_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/fuzzing.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/enum_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/enum_definition_for_module.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/interface_definition_for_module.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/module_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/mojom-lite.js.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/mojom.m.js.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/struct_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/struct_definition_for_module.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/union_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/union_definition_for_module.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/module.amd.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/module_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/struct_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/union_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/validation_macros.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.cc.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.h.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.proto.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_from_proto_macros.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_macros.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_to_proto_macros.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_traits_specialization_macros.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/enum_definition.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/module_definition.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/struct_definition.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/union_definition.tmpl",
|
||||
]
|
||||
script = mojom_generator_script
|
||||
|
||||
inputs = jinja2_sources
|
||||
outputs = [
|
||||
"$target_gen_dir/cpp_templates.zip",
|
||||
"$target_gen_dir/java_templates.zip",
|
||||
"$target_gen_dir/js_templates.zip",
|
||||
"$target_gen_dir/mojolpm_templates.zip",
|
||||
"$target_gen_dir/ts_templates.zip",
|
||||
]
|
||||
args = [
|
||||
"-o",
|
||||
rebase_path(target_gen_dir, root_build_dir),
|
||||
"--use_bundled_pylibs",
|
||||
"precompile",
|
||||
]
|
||||
}
|
||||
|
||||
group("tests") {
|
||||
data = [
|
||||
mojom_generator_script,
|
||||
"checks/mojom_attributes_check_unittest.py",
|
||||
"checks/mojom_interface_feature_check_unittest.py",
|
||||
"checks/mojom_restrictions_checks_unittest.py",
|
||||
"mojom_bindings_generator_unittest.py",
|
||||
"//tools/diagnosis/crbug_1001171.py",
|
||||
"//third_party/markupsafe/",
|
||||
]
|
||||
data += mojom_generator_sources
|
||||
data += jinja2_sources
|
||||
}
|
1014
utils/codegen/ipc/mojo/public/tools/bindings/README.md
Normal file
1014
utils/codegen/ipc/mojo/public/tools/bindings/README.md
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,170 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Validate mojo attributes are allowed in Chrome before generation."""
|
||||
|
||||
import mojom.generate.check as check
|
||||
import mojom.generate.module as module
|
||||
|
||||
_COMMON_ATTRIBUTES = {
|
||||
'EnableIf',
|
||||
'EnableIfNot',
|
||||
}
|
||||
|
||||
# For struct, union & parameter lists.
|
||||
_COMMON_FIELD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'MinVersion',
|
||||
'RenamedFrom',
|
||||
}
|
||||
|
||||
# Note: `Default`` goes on the default _value_, not on the enum.
|
||||
# Note: [Stable] without [Extensible] is not allowed.
|
||||
_ENUM_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'Extensible',
|
||||
'Native',
|
||||
'Stable',
|
||||
'RenamedFrom',
|
||||
'Uuid',
|
||||
}
|
||||
|
||||
# TODO(crbug.com/1234883) MinVersion is not needed for EnumVal.
|
||||
_ENUMVAL_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'Default',
|
||||
'MinVersion',
|
||||
}
|
||||
|
||||
_INTERFACE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'RenamedFrom',
|
||||
'RequireContext',
|
||||
'RuntimeFeature',
|
||||
'ServiceSandbox',
|
||||
'Stable',
|
||||
'Uuid',
|
||||
}
|
||||
|
||||
_METHOD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'AllowedContext',
|
||||
'MinVersion',
|
||||
'NoInterrupt',
|
||||
'RuntimeFeature',
|
||||
'SupportsUrgent',
|
||||
'Sync',
|
||||
'UnlimitedSize',
|
||||
}
|
||||
|
||||
_MODULE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'JavaConstantsClassName',
|
||||
'JavaPackage',
|
||||
}
|
||||
|
||||
_PARAMETER_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
|
||||
|
||||
_STRUCT_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'CustomSerializer',
|
||||
'JavaClassName',
|
||||
'Native',
|
||||
'Stable',
|
||||
'RenamedFrom',
|
||||
'Uuid',
|
||||
}
|
||||
|
||||
_STRUCT_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
|
||||
|
||||
_UNION_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'Extensible',
|
||||
'Stable',
|
||||
'RenamedFrom',
|
||||
'Uuid',
|
||||
}
|
||||
|
||||
_UNION_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES | {
|
||||
'Default',
|
||||
}
|
||||
|
||||
# TODO(https://crbug.com/1193875) empty this set and remove the allowlist.
|
||||
_STABLE_ONLY_ALLOWLISTED_ENUMS = {
|
||||
'crosapi.mojom.OptionalBool',
|
||||
'crosapi.mojom.TriState',
|
||||
}
|
||||
|
||||
|
||||
class Check(check.Check):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Check, self).__init__(*args, **kwargs)
|
||||
|
||||
def _Respell(self, allowed, attribute):
|
||||
for a in allowed:
|
||||
if a.lower() == attribute.lower():
|
||||
return f" - Did you mean: {a}?"
|
||||
return ""
|
||||
|
||||
def _CheckAttributes(self, context, allowed, attributes):
|
||||
if not attributes:
|
||||
return
|
||||
for attribute in attributes:
|
||||
if not attribute in allowed:
|
||||
# Is there a close misspelling?
|
||||
hint = self._Respell(allowed, attribute)
|
||||
raise check.CheckException(
|
||||
self.module,
|
||||
f"attribute {attribute} not allowed on {context}{hint}")
|
||||
|
||||
def _CheckEnumAttributes(self, enum):
|
||||
if enum.attributes:
|
||||
self._CheckAttributes("enum", _ENUM_ATTRIBUTES, enum.attributes)
|
||||
if 'Stable' in enum.attributes and not 'Extensible' in enum.attributes:
|
||||
full_name = f"{self.module.mojom_namespace}.{enum.mojom_name}"
|
||||
if full_name not in _STABLE_ONLY_ALLOWLISTED_ENUMS:
|
||||
raise check.CheckException(
|
||||
self.module,
|
||||
f"[Extensible] required on [Stable] enum {full_name}")
|
||||
for enumval in enum.fields:
|
||||
self._CheckAttributes("enum value", _ENUMVAL_ATTRIBUTES,
|
||||
enumval.attributes)
|
||||
|
||||
def _CheckInterfaceAttributes(self, interface):
|
||||
self._CheckAttributes("interface", _INTERFACE_ATTRIBUTES,
|
||||
interface.attributes)
|
||||
for method in interface.methods:
|
||||
self._CheckAttributes("method", _METHOD_ATTRIBUTES, method.attributes)
|
||||
for param in method.parameters:
|
||||
self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
|
||||
param.attributes)
|
||||
if method.response_parameters:
|
||||
for param in method.response_parameters:
|
||||
self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
|
||||
param.attributes)
|
||||
for enum in interface.enums:
|
||||
self._CheckEnumAttributes(enum)
|
||||
|
||||
def _CheckModuleAttributes(self):
|
||||
self._CheckAttributes("module", _MODULE_ATTRIBUTES, self.module.attributes)
|
||||
|
||||
def _CheckStructAttributes(self, struct):
|
||||
self._CheckAttributes("struct", _STRUCT_ATTRIBUTES, struct.attributes)
|
||||
for field in struct.fields:
|
||||
self._CheckAttributes("struct field", _STRUCT_FIELD_ATTRIBUTES,
|
||||
field.attributes)
|
||||
for enum in struct.enums:
|
||||
self._CheckEnumAttributes(enum)
|
||||
|
||||
def _CheckUnionAttributes(self, union):
|
||||
self._CheckAttributes("union", _UNION_ATTRIBUTES, union.attributes)
|
||||
for field in union.fields:
|
||||
self._CheckAttributes("union field", _UNION_FIELD_ATTRIBUTES,
|
||||
field.attributes)
|
||||
|
||||
def CheckModule(self):
|
||||
"""Note that duplicate attributes are forbidden at the parse phase.
|
||||
We also do not need to look at the types of any parameters, as they will be
|
||||
checked where they are defined. Consts do not have attributes so can be
|
||||
skipped."""
|
||||
self._CheckModuleAttributes()
|
||||
for interface in self.module.interfaces:
|
||||
self._CheckInterfaceAttributes(interface)
|
||||
for enum in self.module.enums:
|
||||
self._CheckEnumAttributes(enum)
|
||||
for struct in self.module.structs:
|
||||
self._CheckStructAttributes(struct)
|
||||
for union in self.module.unions:
|
||||
self._CheckUnionAttributes(union)
|
|
@ -0,0 +1,194 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
import mojom.generate.check as check
|
||||
from mojom_bindings_generator import LoadChecks, _Generate
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class FakeArgs:
|
||||
"""Fakes args to _Generate - intention is to do just enough to run checks"""
|
||||
|
||||
def __init__(self, tester, files=None):
|
||||
""" `tester` is MojomParserTestCase for paths.
|
||||
`files` will have tester path added."""
|
||||
self.checks_string = 'attributes'
|
||||
self.depth = tester.GetPath('')
|
||||
self.filelist = None
|
||||
self.filename = [tester.GetPath(x) for x in files]
|
||||
self.gen_directories = tester.GetPath('gen')
|
||||
self.generators_string = ''
|
||||
self.import_directories = []
|
||||
self.output_dir = tester.GetPath('out')
|
||||
self.scrambled_message_id_salt_paths = None
|
||||
self.typemaps = []
|
||||
self.variant = 'none'
|
||||
|
||||
|
||||
class MojoBindingsCheckTest(MojomParserTestCase):
|
||||
def _ParseAndGenerate(self, mojoms):
|
||||
self.ParseMojoms(mojoms)
|
||||
args = FakeArgs(self, files=mojoms)
|
||||
_Generate(args, {})
|
||||
|
||||
def _testValid(self, filename, content):
|
||||
self.WriteFile(filename, content)
|
||||
self._ParseAndGenerate([filename])
|
||||
|
||||
def _testThrows(self, filename, content, regexp):
|
||||
mojoms = []
|
||||
self.WriteFile(filename, content)
|
||||
mojoms.append(filename)
|
||||
with self.assertRaisesRegexp(check.CheckException, regexp):
|
||||
self._ParseAndGenerate(mojoms)
|
||||
|
||||
def testLoads(self):
|
||||
"""Validate that the check is registered under the expected name."""
|
||||
check_modules = LoadChecks('attributes')
|
||||
self.assertTrue(check_modules['attributes'])
|
||||
|
||||
def testNoAnnotations(self):
|
||||
# Undecorated mojom should be fine.
|
||||
self._testValid(
|
||||
"a.mojom", """
|
||||
module a;
|
||||
struct Bar { int32 a; };
|
||||
enum Hello { kValue };
|
||||
union Thingy { Bar b; Hello hi; };
|
||||
interface Foo {
|
||||
Foo(int32 a, Hello hi, Thingy t) => (Bar b);
|
||||
};
|
||||
""")
|
||||
|
||||
def testValidAnnotations(self):
|
||||
# Obviously this is meaningless and won't generate, but it should pass
|
||||
# the attribute check's validation.
|
||||
self._testValid(
|
||||
"a.mojom", """
|
||||
[JavaConstantsClassName="FakeClass",JavaPackage="org.chromium.Fake"]
|
||||
module a;
|
||||
[Stable, Extensible]
|
||||
enum Hello { [Default] kValue, kValue2, [MinVersion=2] kValue3 };
|
||||
[Native]
|
||||
enum NativeEnum {};
|
||||
[Stable,Extensible]
|
||||
union Thingy { Bar b; [Default]int32 c; Hello hi; };
|
||||
|
||||
[Stable,RenamedFrom="module.other.Foo",
|
||||
Uuid="4C178401-4B07-4C2E-9255-5401A943D0C7"]
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
[ServiceSandbox=Hello.kValue,RequireContext=Hello.kValue,Stable,
|
||||
Uuid="2F17D7DD-865A-4B1C-9394-9C94E035E82F"]
|
||||
interface Foo {
|
||||
[AllowedContext=Hello.kValue]
|
||||
Foo@0(int32 a) => (int32 b);
|
||||
[MinVersion=2,Sync,UnlimitedSize,NoInterrupt]
|
||||
Bar@1(int32 b, [MinVersion=2]Structure? s) => (bool c);
|
||||
};
|
||||
|
||||
[RuntimeFeature=test.mojom.FeatureName]
|
||||
interface FooFeatureControlled {};
|
||||
|
||||
interface FooMethodFeatureControlled {
|
||||
[RuntimeFeature=test.mojom.FeatureName]
|
||||
MethodWithFeature() => (bool c);
|
||||
};
|
||||
""")
|
||||
|
||||
def testWrongModuleStable(self):
|
||||
contents = """
|
||||
// err: module cannot be Stable
|
||||
[Stable]
|
||||
module a;
|
||||
enum Hello { kValue, kValue2, kValue3 };
|
||||
enum NativeEnum {};
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
interface Foo {
|
||||
Foo(int32 a) => (int32 b);
|
||||
Bar(int32 b, Structure? s) => (bool c);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'attribute Stable not allowed on module')
|
||||
|
||||
def testWrongEnumDefault(self):
|
||||
contents = """
|
||||
module a;
|
||||
// err: default should go on EnumValue not Enum.
|
||||
[Default=kValue]
|
||||
enum Hello { kValue, kValue2, kValue3 };
|
||||
enum NativeEnum {};
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
interface Foo {
|
||||
Foo(int32 a) => (int32 b);
|
||||
Bar(int32 b, Structure? s) => (bool c);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'attribute Default not allowed on enum')
|
||||
|
||||
def testWrongStructMinVersion(self):
|
||||
contents = """
|
||||
module a;
|
||||
enum Hello { kValue, kValue2, kValue3 };
|
||||
enum NativeEnum {};
|
||||
// err: struct cannot have MinVersion.
|
||||
[MinVersion=2]
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
interface Foo {
|
||||
Foo(int32 a) => (int32 b);
|
||||
Bar(int32 b, Structure? s) => (bool c);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'attribute MinVersion not allowed on struct')
|
||||
|
||||
def testWrongMethodRequireContext(self):
|
||||
contents = """
|
||||
module a;
|
||||
enum Hello { kValue, kValue2, kValue3 };
|
||||
enum NativeEnum {};
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
interface Foo {
|
||||
// err: RequireContext is for interfaces.
|
||||
[RequireContext=Hello.kValue]
|
||||
Foo(int32 a) => (int32 b);
|
||||
Bar(int32 b, Structure? s) => (bool c);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'RequireContext not allowed on method')
|
||||
|
||||
def testWrongMethodRequireContext(self):
|
||||
# crbug.com/1230122
|
||||
contents = """
|
||||
module a;
|
||||
interface Foo {
|
||||
// err: sync not Sync.
|
||||
[sync]
|
||||
Foo(int32 a) => (int32 b);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'attribute sync not allowed.*Did you mean: Sync')
|
||||
|
||||
def testStableExtensibleEnum(self):
|
||||
# crbug.com/1193875
|
||||
contents = """
|
||||
module a;
|
||||
[Stable]
|
||||
enum Foo {
|
||||
kDefaultVal,
|
||||
kOtherVal = 2,
|
||||
};
|
||||
"""
|
||||
self._testThrows('a.mojom', contents,
|
||||
'Extensible.*?required.*?Stable.*?enum')
|
|
@ -0,0 +1,34 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Ensure no duplicate type definitions before generation."""
|
||||
|
||||
import mojom.generate.check as check
|
||||
import mojom.generate.module as module
|
||||
|
||||
|
||||
class Check(check.Check):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Check, self).__init__(*args, **kwargs)
|
||||
|
||||
def CheckModule(self):
|
||||
kinds = dict()
|
||||
for module in self.module.imports:
|
||||
for kind in module.enums + module.structs + module.unions:
|
||||
kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
|
||||
if kind_name in kinds:
|
||||
previous_module = kinds[kind_name]
|
||||
if previous_module.path != module.path:
|
||||
raise check.CheckException(
|
||||
self.module, f"multiple-definition for type {kind_name}" +
|
||||
f"(defined in both {previous_module} and {module})")
|
||||
kinds[kind_name] = kind.module
|
||||
|
||||
for kind in self.module.enums + self.module.structs + self.module.unions:
|
||||
kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
|
||||
if kind_name in kinds:
|
||||
previous_module = kinds[kind_name]
|
||||
raise check.CheckException(
|
||||
self.module, f"multiple-definition for type {kind_name}" +
|
||||
f"(previous definition in {previous_module})")
|
||||
return True
|
|
@ -0,0 +1,62 @@
|
|||
# Copyright 2023 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Validate mojo runtime feature guarded interfaces are nullable."""
|
||||
|
||||
import mojom.generate.check as check
|
||||
import mojom.generate.module as module
|
||||
|
||||
|
||||
class Check(check.Check):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Check, self).__init__(*args, **kwargs)
|
||||
|
||||
# `param` is an Interface of some sort.
|
||||
def _CheckNonNullableFeatureGuardedInterface(self, kind):
|
||||
# Only need to validate interface if it has a RuntimeFeature
|
||||
if not kind.kind.runtime_feature:
|
||||
return
|
||||
# Nullable (optional) is ok as the interface expects they might not be sent.
|
||||
if kind.is_nullable:
|
||||
return
|
||||
interface = kind.kind.mojom_name
|
||||
raise check.CheckException(
|
||||
self.module,
|
||||
f"interface {interface} has a RuntimeFeature but is not nullable")
|
||||
|
||||
# `param` can be a lot of things so check if it is a remote/receiver.
|
||||
# Array/Map must be recursed into.
|
||||
def _CheckFieldOrParam(self, kind):
|
||||
if module.IsAnyInterfaceKind(kind):
|
||||
self._CheckNonNullableFeatureGuardedInterface(kind)
|
||||
if module.IsArrayKind(kind):
|
||||
self._CheckFieldOrParam(kind.kind)
|
||||
if module.IsMapKind(kind):
|
||||
self._CheckFieldOrParam(kind.key_kind)
|
||||
self._CheckFieldOrParam(kind.value_kind)
|
||||
|
||||
def _CheckInterfaceFeatures(self, interface):
|
||||
for method in interface.methods:
|
||||
for param in method.parameters:
|
||||
self._CheckFieldOrParam(param.kind)
|
||||
if method.response_parameters:
|
||||
for param in method.response_parameters:
|
||||
self._CheckFieldOrParam(param.kind)
|
||||
|
||||
def _CheckStructFeatures(self, struct):
|
||||
for field in struct.fields:
|
||||
self._CheckFieldOrParam(field.kind)
|
||||
|
||||
def _CheckUnionFeatures(self, union):
|
||||
for field in union.fields:
|
||||
self._CheckFieldOrParam(field.kind)
|
||||
|
||||
def CheckModule(self):
|
||||
"""Validate that any runtime feature guarded interfaces that might be passed
|
||||
over mojo are nullable."""
|
||||
for interface in self.module.interfaces:
|
||||
self._CheckInterfaceFeatures(interface)
|
||||
for struct in self.module.structs:
|
||||
self._CheckStructFeatures(struct)
|
||||
for union in self.module.unions:
|
||||
self._CheckUnionFeatures(union)
|
|
@ -0,0 +1,173 @@
|
|||
# Copyright 2023 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
import mojom.generate.check as check
|
||||
from mojom_bindings_generator import LoadChecks, _Generate
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class FakeArgs:
|
||||
"""Fakes args to _Generate - intention is to do just enough to run checks"""
|
||||
def __init__(self, tester, files=None):
|
||||
""" `tester` is MojomParserTestCase for paths.
|
||||
`files` will have tester path added."""
|
||||
self.checks_string = 'features'
|
||||
self.depth = tester.GetPath('')
|
||||
self.filelist = None
|
||||
self.filename = [tester.GetPath(x) for x in files]
|
||||
self.gen_directories = tester.GetPath('gen')
|
||||
self.generators_string = ''
|
||||
self.import_directories = []
|
||||
self.output_dir = tester.GetPath('out')
|
||||
self.scrambled_message_id_salt_paths = None
|
||||
self.typemaps = []
|
||||
self.variant = 'none'
|
||||
|
||||
|
||||
class MojoBindingsCheckTest(MojomParserTestCase):
|
||||
def _ParseAndGenerate(self, mojoms):
|
||||
self.ParseMojoms(mojoms)
|
||||
args = FakeArgs(self, files=mojoms)
|
||||
_Generate(args, {})
|
||||
|
||||
def assertValid(self, filename, content):
|
||||
self.WriteFile(filename, content)
|
||||
self._ParseAndGenerate([filename])
|
||||
|
||||
def assertThrows(self, filename, content, regexp):
|
||||
mojoms = []
|
||||
self.WriteFile(filename, content)
|
||||
mojoms.append(filename)
|
||||
with self.assertRaisesRegexp(check.CheckException, regexp):
|
||||
self._ParseAndGenerate(mojoms)
|
||||
|
||||
def testLoads(self):
|
||||
"""Validate that the check is registered under the expected name."""
|
||||
check_modules = LoadChecks('features')
|
||||
self.assertTrue(check_modules['features'])
|
||||
|
||||
def testNullableOk(self):
|
||||
self.assertValid(
|
||||
"a.mojom", """
|
||||
module a;
|
||||
// Scaffolding.
|
||||
feature kFeature {
|
||||
const string name = "Hello";
|
||||
const bool enabled_state = false;
|
||||
};
|
||||
[RuntimeFeature=kFeature]
|
||||
interface Guarded {
|
||||
};
|
||||
|
||||
// Unguarded interfaces should be ok everywhere.
|
||||
interface NotGuarded { };
|
||||
|
||||
// Optional (nullable) interfaces should be ok everywhere:
|
||||
struct Bar {
|
||||
pending_remote<Guarded>? remote;
|
||||
pending_receiver<Guarded>? receiver;
|
||||
};
|
||||
union Thingy {
|
||||
pending_remote<Guarded>? remote;
|
||||
pending_receiver<Guarded>? receiver;
|
||||
};
|
||||
interface Foo {
|
||||
Foo(
|
||||
pending_remote<Guarded>? remote,
|
||||
pending_receiver<Guarded>? receiver,
|
||||
pending_associated_remote<Guarded>? a_remote,
|
||||
pending_associated_receiver<Guarded>? a_receiver,
|
||||
// Unguarded interfaces do not have to be nullable.
|
||||
pending_remote<NotGuarded> remote,
|
||||
pending_receiver<NotGuarded> receiver,
|
||||
pending_associated_remote<NotGuarded> a_remote,
|
||||
pending_associated_receiver<NotGuarded> a_receiver
|
||||
) => (
|
||||
pending_remote<Guarded>? remote,
|
||||
pending_receiver<Guarded>? receiver
|
||||
);
|
||||
Bar(array<pending_remote<Guarded>?> remote)
|
||||
=> (map<string, pending_receiver<Guarded>?> a);
|
||||
};
|
||||
""")
|
||||
|
||||
def testMethodParamsMustBeNullable(self):
|
||||
prelude = """
|
||||
module a;
|
||||
// Scaffolding.
|
||||
feature kFeature {
|
||||
const string name = "Hello";
|
||||
const bool enabled_state = false;
|
||||
};
|
||||
[RuntimeFeature=kFeature]
|
||||
interface Guarded { };
|
||||
"""
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(pending_remote<Guarded> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(bool foo) => (pending_receiver<Guarded> a);
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(pending_receiver<Guarded> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(pending_associated_remote<Guarded> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(pending_associated_receiver<Guarded> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(array<pending_associated_receiver<Guarded>> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(map<string, pending_associated_receiver<Guarded>> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
|
||||
def testStructUnionMembersMustBeNullable(self):
|
||||
prelude = """
|
||||
module a;
|
||||
// Scaffolding.
|
||||
feature kFeature {
|
||||
const string name = "Hello";
|
||||
const bool enabled_state = false;
|
||||
};
|
||||
[RuntimeFeature=kFeature]
|
||||
interface Guarded { };
|
||||
"""
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
struct Trial {
|
||||
pending_remote<Guarded> a;
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
union Trial {
|
||||
pending_remote<Guarded> a;
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
|
@ -0,0 +1,102 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Validate RequireContext and AllowedContext annotations before generation."""
|
||||
|
||||
import mojom.generate.check as check
|
||||
import mojom.generate.module as module
|
||||
|
||||
|
||||
class Check(check.Check):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.kind_to_interfaces = dict()
|
||||
super(Check, self).__init__(*args, **kwargs)
|
||||
|
||||
def _IsPassedInterface(self, candidate):
|
||||
if isinstance(
|
||||
candidate.kind,
|
||||
(module.PendingReceiver, module.PendingRemote,
|
||||
module.PendingAssociatedReceiver, module.PendingAssociatedRemote)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _CheckInterface(self, method, param):
|
||||
# |param| is a pending_x<Interface> so need .kind.kind to get Interface.
|
||||
interface = param.kind.kind
|
||||
if interface.require_context:
|
||||
if method.allowed_context is None:
|
||||
raise check.CheckException(
|
||||
self.module, "method `{}` has parameter `{}` which passes interface"
|
||||
" `{}` that requires an AllowedContext annotation but none exists.".
|
||||
format(
|
||||
method.mojom_name,
|
||||
param.mojom_name,
|
||||
interface.mojom_name,
|
||||
))
|
||||
# If a string was provided, or if an enum was not imported, this will
|
||||
# be a string and we cannot validate that it is in range.
|
||||
if not isinstance(method.allowed_context, module.EnumValue):
|
||||
raise check.CheckException(
|
||||
self.module,
|
||||
"method `{}` has AllowedContext={} which is not a valid enum value."
|
||||
.format(method.mojom_name, method.allowed_context))
|
||||
# EnumValue must be from the same enum to be compared.
|
||||
if interface.require_context.enum != method.allowed_context.enum:
|
||||
raise check.CheckException(
|
||||
self.module, "method `{}` has parameter `{}` which passes interface"
|
||||
" `{}` that requires AllowedContext={} but one of kind `{}` was "
|
||||
"provided.".format(
|
||||
method.mojom_name,
|
||||
param.mojom_name,
|
||||
interface.mojom_name,
|
||||
interface.require_context.enum,
|
||||
method.allowed_context.enum,
|
||||
))
|
||||
# RestrictContext enums have most privileged field first (lowest value).
|
||||
interface_value = interface.require_context.field.numeric_value
|
||||
method_value = method.allowed_context.field.numeric_value
|
||||
if interface_value < method_value:
|
||||
raise check.CheckException(
|
||||
self.module, "RequireContext={} > AllowedContext={} for method "
|
||||
"`{}` which passes interface `{}`.".format(
|
||||
interface.require_context.GetSpec(),
|
||||
method.allowed_context.GetSpec(), method.mojom_name,
|
||||
interface.mojom_name))
|
||||
return True
|
||||
|
||||
def _GatherReferencedInterfaces(self, field):
|
||||
key = field.kind.spec
|
||||
# structs/unions can nest themselves so we need to bookkeep.
|
||||
if not key in self.kind_to_interfaces:
|
||||
# Might reference ourselves so have to create the list first.
|
||||
self.kind_to_interfaces[key] = set()
|
||||
for param in field.kind.fields:
|
||||
if self._IsPassedInterface(param):
|
||||
self.kind_to_interfaces[key].add(param)
|
||||
elif isinstance(param.kind, (module.Struct, module.Union)):
|
||||
for iface in self._GatherReferencedInterfaces(param):
|
||||
self.kind_to_interfaces[key].add(iface)
|
||||
return self.kind_to_interfaces[key]
|
||||
|
||||
def _CheckParams(self, method, params):
|
||||
# Note: we have to repeat _CheckParams for each method as each might have
|
||||
# different AllowedContext= attributes. We cannot memoize this function,
|
||||
# but can do so for gathering referenced interfaces as their RequireContext
|
||||
# attributes do not change.
|
||||
for param in params:
|
||||
if self._IsPassedInterface(param):
|
||||
self._CheckInterface(method, param)
|
||||
elif isinstance(param.kind, (module.Struct, module.Union)):
|
||||
for interface in self._GatherReferencedInterfaces(param):
|
||||
self._CheckInterface(method, interface)
|
||||
|
||||
def _CheckMethod(self, method):
|
||||
if method.parameters:
|
||||
self._CheckParams(method, method.parameters)
|
||||
if method.response_parameters:
|
||||
self._CheckParams(method, method.response_parameters)
|
||||
|
||||
def CheckModule(self):
|
||||
for interface in self.module.interfaces:
|
||||
for method in interface.methods:
|
||||
self._CheckMethod(method)
|
|
@ -0,0 +1,254 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
import mojom.generate.check as check
|
||||
from mojom_bindings_generator import LoadChecks, _Generate
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
# Mojoms that we will use in multiple tests.
|
||||
basic_mojoms = {
|
||||
'level.mojom':
|
||||
"""
|
||||
module level;
|
||||
enum Level {
|
||||
kHighest,
|
||||
kMiddle,
|
||||
kLowest,
|
||||
};
|
||||
""",
|
||||
'interfaces.mojom':
|
||||
"""
|
||||
module interfaces;
|
||||
import "level.mojom";
|
||||
struct Foo {int32 bar;};
|
||||
[RequireContext=level.Level.kHighest]
|
||||
interface High {
|
||||
DoFoo(Foo foo);
|
||||
};
|
||||
[RequireContext=level.Level.kMiddle]
|
||||
interface Mid {
|
||||
DoFoo(Foo foo);
|
||||
};
|
||||
[RequireContext=level.Level.kLowest]
|
||||
interface Low {
|
||||
DoFoo(Foo foo);
|
||||
};
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
class FakeArgs:
|
||||
"""Fakes args to _Generate - intention is to do just enough to run checks"""
|
||||
|
||||
def __init__(self, tester, files=None):
|
||||
""" `tester` is MojomParserTestCase for paths.
|
||||
`files` will have tester path added."""
|
||||
self.checks_string = 'restrictions'
|
||||
self.depth = tester.GetPath('')
|
||||
self.filelist = None
|
||||
self.filename = [tester.GetPath(x) for x in files]
|
||||
self.gen_directories = tester.GetPath('gen')
|
||||
self.generators_string = ''
|
||||
self.import_directories = []
|
||||
self.output_dir = tester.GetPath('out')
|
||||
self.scrambled_message_id_salt_paths = None
|
||||
self.typemaps = []
|
||||
self.variant = 'none'
|
||||
|
||||
|
||||
class MojoBindingsCheckTest(MojomParserTestCase):
|
||||
def _WriteBasicMojoms(self):
|
||||
for filename, contents in basic_mojoms.items():
|
||||
self.WriteFile(filename, contents)
|
||||
return list(basic_mojoms.keys())
|
||||
|
||||
def _ParseAndGenerate(self, mojoms):
|
||||
self.ParseMojoms(mojoms)
|
||||
args = FakeArgs(self, files=mojoms)
|
||||
_Generate(args, {})
|
||||
|
||||
def testLoads(self):
|
||||
"""Validate that the check is registered under the expected name."""
|
||||
check_modules = LoadChecks('restrictions')
|
||||
self.assertTrue(check_modules['restrictions'])
|
||||
|
||||
def testValidAnnotations(self):
|
||||
mojoms = self._WriteBasicMojoms()
|
||||
|
||||
a = 'a.mojom'
|
||||
self.WriteFile(
|
||||
a, """
|
||||
module a;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
|
||||
interface PassesHigh {
|
||||
[AllowedContext=level.Level.kHighest]
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
interface PassesMedium {
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMedium(pending_receiver<interfaces.Mid> hi);
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMediumRem(pending_remote<interfaces.Mid> hi);
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMediumAssoc(pending_associated_receiver<interfaces.Mid> hi);
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMediumAssocRem(pending_associated_remote<interfaces.Mid> hi);
|
||||
};
|
||||
interface PassesLow {
|
||||
[AllowedContext=level.Level.kLowest]
|
||||
DoLow(pending_receiver<interfaces.Low> hi);
|
||||
};
|
||||
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
struct Two { One one; };
|
||||
interface PassesNestedHigh {
|
||||
[AllowedContext=level.Level.kHighest]
|
||||
DoNestedHigh(Two two);
|
||||
};
|
||||
|
||||
// Allowed as PassesHigh is not itself restricted.
|
||||
interface PassesPassesHigh {
|
||||
DoPass(pending_receiver<PassesHigh> hiho);
|
||||
};
|
||||
""")
|
||||
mojoms.append(a)
|
||||
self._ParseAndGenerate(mojoms)
|
||||
|
||||
def _testThrows(self, filename, content, regexp):
|
||||
mojoms = self._WriteBasicMojoms()
|
||||
self.WriteFile(filename, content)
|
||||
mojoms.append(filename)
|
||||
with self.assertRaisesRegexp(check.CheckException, regexp):
|
||||
self._ParseAndGenerate(mojoms)
|
||||
|
||||
def testMissingAnnotation(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
|
||||
interface PassesHigh {
|
||||
// err: missing annotation.
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
|
||||
|
||||
def testAllowTooLow(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
|
||||
interface PassesHigh {
|
||||
// err: level is worse than required.
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
|
||||
|
||||
def testWrongEnumInAllow(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
enum Blah {
|
||||
kZero,
|
||||
};
|
||||
interface PassesHigh {
|
||||
// err: different enums.
|
||||
[AllowedContext=Blah.kZero]
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'but one of kind')
|
||||
|
||||
def testNotAnEnumInAllow(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
interface PassesHigh {
|
||||
// err: not an enum.
|
||||
[AllowedContext=doopdedoo.mojom.kWhatever]
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'not a valid enum value')
|
||||
|
||||
def testMissingAllowedForNestedStructs(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
struct Two { One one; };
|
||||
interface PassesNestedHigh {
|
||||
// err: missing annotation.
|
||||
DoNestedHigh(Two two);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
|
||||
|
||||
def testMissingAllowedForNestedUnions(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
struct Two { One one; };
|
||||
union Three {One one; Two two; };
|
||||
interface PassesNestedHigh {
|
||||
// err: missing annotation.
|
||||
DoNestedHigh(Three three);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
|
||||
|
||||
def testMultipleInterfacesThrows(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
interface PassesMultipleInterfaces {
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMultiple(
|
||||
pending_remote<interfaces.Mid> mid,
|
||||
pending_receiver<interfaces.High> hi,
|
||||
One one
|
||||
);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
|
||||
|
||||
def testMultipleInterfacesAllowed(self):
|
||||
"""Multiple interfaces can be passed, all satisfy the level."""
|
||||
mojoms = self._WriteBasicMojoms()
|
||||
|
||||
b = "b.mojom"
|
||||
self.WriteFile(
|
||||
b, """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
interface PassesMultipleInterfaces {
|
||||
[AllowedContext=level.Level.kHighest]
|
||||
DoMultiple(
|
||||
pending_receiver<interfaces.High> hi,
|
||||
pending_remote<interfaces.Mid> mid,
|
||||
One one
|
||||
);
|
||||
};
|
||||
""")
|
||||
mojoms.append(b)
|
||||
self._ParseAndGenerate(mojoms)
|
55
utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py
Executable file
55
utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py
Executable file
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2019 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
#
|
||||
# This utility concatenates several files into one. On Unix-like systems
|
||||
# it is equivalent to:
|
||||
# cat file1 file2 file3 ...files... > target
|
||||
#
|
||||
# The reason for writing a separate utility is that 'cat' is not available
|
||||
# on all supported build platforms, but Python is, and hence this provides
|
||||
# us with an easy and uniform way of doing this on all platforms.
|
||||
|
||||
# for py2/py3 compatibility
|
||||
from __future__ import print_function
|
||||
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
|
||||
def Concatenate(filenames):
|
||||
"""Concatenate files.
|
||||
|
||||
Args:
|
||||
files: Array of file names.
|
||||
The last name is the target; all earlier ones are sources.
|
||||
|
||||
Returns:
|
||||
True, if the operation was successful.
|
||||
"""
|
||||
if len(filenames) < 2:
|
||||
print("An error occurred generating %s:\nNothing to do." % filenames[-1])
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(filenames[-1], "wb") as target:
|
||||
for filename in filenames[:-1]:
|
||||
with open(filename, "rb") as current:
|
||||
target.write(current.read())
|
||||
return True
|
||||
except IOError as e:
|
||||
print("An error occurred when writing %s:\n%s" % (filenames[-1], e))
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser()
|
||||
parser.set_usage("""Concatenate several files into one.
|
||||
Equivalent to: cat file1 ... > target.""")
|
||||
(_options, args) = parser.parse_args()
|
||||
sys.exit(0 if Concatenate(args) else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,75 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2018 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Simple utility which concatenates a set of files into a single output file
|
||||
while also stripping any goog.provide or goog.require lines. This allows us to
|
||||
provide a very primitive sort of "compilation" without any extra toolchain
|
||||
support and without having to modify otherwise compilable sources in the tree
|
||||
which use these directives.
|
||||
|
||||
goog.provide lines are replaced with an equivalent invocation of
|
||||
mojo.internal.exportModule, which accomplishes essentially the same thing in an
|
||||
uncompiled context. A singular exception is made for the 'mojo.internal' export,
|
||||
which is instead replaced with an inlined assignment to initialize the
|
||||
namespace.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import optparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
_MOJO_INTERNAL_MODULE_NAME = "mojo.internal"
|
||||
_MOJO_EXPORT_MODULE_SYMBOL = "mojo.internal.exportModule"
|
||||
|
||||
|
||||
def FilterLine(filename, line, output):
|
||||
if line.startswith("goog.require"):
|
||||
return
|
||||
|
||||
if line.startswith("goog.provide"):
|
||||
match = re.match(r"goog.provide\('([^']+)'\);", line)
|
||||
if not match:
|
||||
print("Invalid goog.provide line in %s:\n%s" % (filename, line))
|
||||
sys.exit(1)
|
||||
|
||||
module_name = match.group(1)
|
||||
if module_name == _MOJO_INTERNAL_MODULE_NAME:
|
||||
output.write("self.mojo = { internal: {} };")
|
||||
else:
|
||||
output.write("%s('%s');\n" % (_MOJO_EXPORT_MODULE_SYMBOL, module_name))
|
||||
return
|
||||
|
||||
output.write(line)
|
||||
|
||||
def ConcatenateAndReplaceExports(filenames):
|
||||
if (len(filenames) < 2):
|
||||
print("At least two filenames (one input and the output) are required.")
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(filenames[-1], "w") as target:
|
||||
for filename in filenames[:-1]:
|
||||
with open(filename, "r") as current:
|
||||
for line in current.readlines():
|
||||
FilterLine(filename, line, target)
|
||||
return True
|
||||
except IOError as e:
|
||||
print("Error generating %s\n: %s" % (filenames[-1], e))
|
||||
return False
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser()
|
||||
parser.set_usage("""file1 [file2...] outfile
|
||||
Concatenate several files into one, stripping Closure provide and
|
||||
require directives along the way.""")
|
||||
(_, args) = parser.parse_args()
|
||||
sys.exit(0 if ConcatenateAndReplaceExports(args) else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,48 @@
|
|||
# Copyright 2017 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Generates a list of all files in a directory.
|
||||
|
||||
This script takes in a directory and an output file name as input.
|
||||
It then reads the directory and creates a list of all file names
|
||||
in that directory. The list is written to the output file.
|
||||
There is also an option to pass in '-p' or '--pattern'
|
||||
which will check each file name against a regular expression
|
||||
pattern that is passed in. Only files which match the regex
|
||||
will be written to the list.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
sys.path.insert(
|
||||
0,
|
||||
os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||
|
||||
from mojom.generate.generator import WriteFile
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser()
|
||||
parser.add_option('-d', '--directory', help='Read files from DIRECTORY')
|
||||
parser.add_option('-o', '--output', help='Write list to FILE')
|
||||
parser.add_option('-p',
|
||||
'--pattern',
|
||||
help='Only reads files that name matches PATTERN',
|
||||
default=".")
|
||||
(options, _) = parser.parse_args()
|
||||
pattern = re.compile(options.pattern)
|
||||
files = [f for f in os.listdir(options.directory) if pattern.match(f)]
|
||||
|
||||
contents = '\n'.join(f for f in files) + '\n'
|
||||
WriteFile(contents, options.output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
135
utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py
Executable file
135
utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py
Executable file
|
@ -0,0 +1,135 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2016 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Generates a JSON typemap from its command-line arguments and dependencies.
|
||||
|
||||
Each typemap should be specified in an command-line argument of the form
|
||||
key=value, with an argument of "--start-typemap" preceding each typemap.
|
||||
|
||||
For example,
|
||||
generate_type_mappings.py --output=foo.typemap --start-typemap \\
|
||||
public_headers=foo.h traits_headers=foo_traits.h \\
|
||||
type_mappings=mojom.Foo=FooImpl
|
||||
|
||||
generates a foo.typemap containing
|
||||
{
|
||||
"c++": {
|
||||
"mojom.Foo": {
|
||||
"typename": "FooImpl",
|
||||
"traits_headers": [
|
||||
"foo_traits.h"
|
||||
],
|
||||
"public_headers": [
|
||||
"foo.h"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Then,
|
||||
generate_type_mappings.py --dependency foo.typemap --output=bar.typemap \\
|
||||
--start-typemap public_headers=bar.h traits_headers=bar_traits.h \\
|
||||
type_mappings=mojom.Bar=BarImpl
|
||||
|
||||
generates a bar.typemap containing
|
||||
{
|
||||
"c++": {
|
||||
"mojom.Bar": {
|
||||
"typename": "BarImpl",
|
||||
"traits_headers": [
|
||||
"bar_traits.h"
|
||||
],
|
||||
"public_headers": [
|
||||
"bar.h"
|
||||
]
|
||||
},
|
||||
"mojom.Foo": {
|
||||
"typename": "FooImpl",
|
||||
"traits_headers": [
|
||||
"foo_traits.h"
|
||||
],
|
||||
"public_headers": [
|
||||
"foo.h"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.path.insert(
|
||||
0,
|
||||
os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||
|
||||
from mojom.generate.generator import WriteFile
|
||||
|
||||
def ReadTypemap(path):
|
||||
with open(path) as f:
|
||||
return json.load(f)['c++']
|
||||
|
||||
|
||||
def LoadCppTypemapConfig(path):
|
||||
configs = {}
|
||||
with open(path) as f:
|
||||
for config in json.load(f):
|
||||
for entry in config['types']:
|
||||
configs[entry['mojom']] = {
|
||||
'typename': entry['cpp'],
|
||||
'forward_declaration': entry.get('forward_declaration', None),
|
||||
'public_headers': config.get('traits_headers', []),
|
||||
'traits_headers': config.get('traits_private_headers', []),
|
||||
'copyable_pass_by_value': entry.get('copyable_pass_by_value',
|
||||
False),
|
||||
'default_constructible': entry.get('default_constructible', True),
|
||||
'force_serialize': entry.get('force_serialize', False),
|
||||
'hashable': entry.get('hashable', False),
|
||||
'move_only': entry.get('move_only', False),
|
||||
'nullable_is_same_type': entry.get('nullable_is_same_type', False),
|
||||
'non_copyable_non_movable': False,
|
||||
}
|
||||
return configs
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
parser.add_argument(
|
||||
'--dependency',
|
||||
type=str,
|
||||
action='append',
|
||||
default=[],
|
||||
help=('A path to another JSON typemap to merge into the output. '
|
||||
'This may be repeated to merge multiple typemaps.'))
|
||||
parser.add_argument(
|
||||
'--cpp-typemap-config',
|
||||
type=str,
|
||||
action='store',
|
||||
dest='cpp_config_path',
|
||||
help=('A path to a single JSON-formatted typemap config as emitted by'
|
||||
'GN when processing a mojom_cpp_typemap build rule.'))
|
||||
parser.add_argument('--output',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The path to which to write the generated JSON.')
|
||||
params, _ = parser.parse_known_args()
|
||||
typemaps = {}
|
||||
if params.cpp_config_path:
|
||||
typemaps = LoadCppTypemapConfig(params.cpp_config_path)
|
||||
missing = [path for path in params.dependency if not os.path.exists(path)]
|
||||
if missing:
|
||||
raise IOError('Missing dependencies: %s' % ', '.join(missing))
|
||||
for path in params.dependency:
|
||||
typemaps.update(ReadTypemap(path))
|
||||
|
||||
WriteFile(json.dumps({'c++': typemaps}, indent=2), params.output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
47
utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py
Executable file
47
utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py
Executable file
|
@ -0,0 +1,47 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2023 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
#
|
||||
# This utility minifies JS files with terser.
|
||||
#
|
||||
# Instance of 'node' has no 'RunNode' member (no-member)
|
||||
# pylint: disable=no-member
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
_HERE_PATH = os.path.dirname(__file__)
|
||||
_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
|
||||
_CWD = os.getcwd()
|
||||
sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
|
||||
import node
|
||||
import node_modules
|
||||
|
||||
|
||||
def MinifyFile(input_file, output_file):
|
||||
node.RunNode([
|
||||
node_modules.PathToTerser(), input_file, '--mangle', '--compress',
|
||||
'--comments', 'false', '--output', output_file
|
||||
])
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--input', required=True)
|
||||
parser.add_argument('--output', required=True)
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
# Delete the output file if it already exists. It may be a sym link to the
|
||||
# input, because in non-optimized/pre-Terser builds the input file is copied
|
||||
# to the output location with gn copy().
|
||||
out_path = os.path.join(_CWD, args.output)
|
||||
if (os.path.exists(out_path)):
|
||||
os.remove(out_path)
|
||||
|
||||
MinifyFile(os.path.join(_CWD, args.input), out_path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
2118
utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni
Normal file
2118
utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni
Normal file
File diff suppressed because it is too large
Load diff
424
utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
Executable file
424
utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
Executable file
|
@ -0,0 +1,424 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""The frontend for the Mojo bindings system."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
|
||||
import hashlib
|
||||
import importlib
|
||||
import json
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
import struct
|
||||
import sys
|
||||
|
||||
# Disable lint check for finding modules:
|
||||
# pylint: disable=F0401
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
|
||||
sys.path.insert(
|
||||
0,
|
||||
os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||
|
||||
from mojom.error import Error
|
||||
import mojom.fileutil as fileutil
|
||||
from mojom.generate.module import Module
|
||||
from mojom.generate import template_expander
|
||||
from mojom.generate import translate
|
||||
from mojom.generate.generator import WriteFile
|
||||
|
||||
sys.path.append(
|
||||
os.path.join(_GetDirAbove("mojo"), "tools", "diagnosis"))
|
||||
import crbug_1001171
|
||||
|
||||
|
||||
_BUILTIN_GENERATORS = {
|
||||
"c++": "mojom_cpp_generator",
|
||||
"javascript": "mojom_js_generator",
|
||||
"java": "mojom_java_generator",
|
||||
"mojolpm": "mojom_mojolpm_generator",
|
||||
"typescript": "mojom_ts_generator",
|
||||
}
|
||||
|
||||
_BUILTIN_CHECKS = {
|
||||
"attributes": "mojom_attributes_check",
|
||||
"definitions": "mojom_definitions_check",
|
||||
"features": "mojom_interface_feature_check",
|
||||
"restrictions": "mojom_restrictions_check",
|
||||
}
|
||||
|
||||
|
||||
def LoadGenerators(generators_string):
|
||||
if not generators_string:
|
||||
return {} # No generators.
|
||||
|
||||
generators = {}
|
||||
for generator_name in [s.strip() for s in generators_string.split(",")]:
|
||||
language = generator_name.lower()
|
||||
if language not in _BUILTIN_GENERATORS:
|
||||
print("Unknown generator name %s" % generator_name)
|
||||
sys.exit(1)
|
||||
generator_module = importlib.import_module(
|
||||
"generators.%s" % _BUILTIN_GENERATORS[language])
|
||||
generators[language] = generator_module
|
||||
return generators
|
||||
|
||||
|
||||
def LoadChecks(checks_string):
|
||||
if not checks_string:
|
||||
return {} # No checks.
|
||||
|
||||
checks = {}
|
||||
for check_name in [s.strip() for s in checks_string.split(",")]:
|
||||
check = check_name.lower()
|
||||
if check not in _BUILTIN_CHECKS:
|
||||
print("Unknown check name %s" % check_name)
|
||||
sys.exit(1)
|
||||
check_module = importlib.import_module("checks.%s" % _BUILTIN_CHECKS[check])
|
||||
checks[check] = check_module
|
||||
return checks
|
||||
|
||||
|
||||
def MakeImportStackMessage(imported_filename_stack):
|
||||
"""Make a (human-readable) message listing a chain of imports. (Returned
|
||||
string begins with a newline (if nonempty) and does not end with one.)"""
|
||||
return ''.join(
|
||||
reversed(["\n %s was imported by %s" % (a, b) for (a, b) in \
|
||||
zip(imported_filename_stack[1:], imported_filename_stack)]))
|
||||
|
||||
|
||||
class RelativePath:
|
||||
"""Represents a path relative to the source tree or generated output dir."""
|
||||
|
||||
def __init__(self, path, source_root, output_dir):
|
||||
self.path = path
|
||||
if path.startswith(source_root):
|
||||
self.root = source_root
|
||||
elif path.startswith(output_dir):
|
||||
self.root = output_dir
|
||||
else:
|
||||
raise Exception("Invalid input path %s" % path)
|
||||
|
||||
def relative_path(self):
|
||||
return os.path.relpath(
|
||||
os.path.abspath(self.path), os.path.abspath(self.root))
|
||||
|
||||
|
||||
def _GetModulePath(path, output_dir):
|
||||
return os.path.join(output_dir, path.relative_path() + '-module')
|
||||
|
||||
|
||||
def ScrambleMethodOrdinals(interfaces, salt):
|
||||
already_generated = set()
|
||||
for interface in interfaces:
|
||||
i = 0
|
||||
already_generated.clear()
|
||||
for method in interface.methods:
|
||||
if method.explicit_ordinal is not None:
|
||||
continue
|
||||
while True:
|
||||
i = i + 1
|
||||
if i == 1000000:
|
||||
raise Exception("Could not generate %d method ordinals for %s" %
|
||||
(len(interface.methods), interface.mojom_name))
|
||||
# Generate a scrambled method.ordinal value. The algorithm doesn't have
|
||||
# to be very strong, cryptographically. It just needs to be non-trivial
|
||||
# to guess the results without the secret salt, in order to make it
|
||||
# harder for a compromised process to send fake Mojo messages.
|
||||
sha256 = hashlib.sha256(salt)
|
||||
sha256.update(interface.mojom_name.encode('utf-8'))
|
||||
sha256.update(str(i).encode('utf-8'))
|
||||
# Take the first 4 bytes as a little-endian uint32.
|
||||
ordinal = struct.unpack('<L', sha256.digest()[:4])[0]
|
||||
# Trim to 31 bits, so it always fits into a Java (signed) int.
|
||||
ordinal = ordinal & 0x7fffffff
|
||||
if ordinal in already_generated:
|
||||
continue
|
||||
already_generated.add(ordinal)
|
||||
method.ordinal = ordinal
|
||||
method.ordinal_comment = (
|
||||
'The %s value is based on sha256(salt + "%s%d").' %
|
||||
(ordinal, interface.mojom_name, i))
|
||||
break
|
||||
|
||||
|
||||
def ReadFileContents(filename):
|
||||
with open(filename, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
class MojomProcessor:
|
||||
"""Takes parsed mojom modules and generates language bindings from them.
|
||||
|
||||
Attributes:
|
||||
_processed_files: {Dict[str, mojom.generate.module.Module]} Mapping from
|
||||
relative mojom filename paths to the module AST for that mojom file.
|
||||
"""
|
||||
def __init__(self, should_generate):
|
||||
self._should_generate = should_generate
|
||||
self._processed_files = {}
|
||||
self._typemap = {}
|
||||
|
||||
def LoadTypemaps(self, typemaps):
|
||||
# Support some very simple single-line comments in typemap JSON.
|
||||
comment_expr = r"^\s*//.*$"
|
||||
def no_comments(line):
|
||||
return not re.match(comment_expr, line)
|
||||
for filename in typemaps:
|
||||
with open(filename) as f:
|
||||
typemaps = json.loads("".join(filter(no_comments, f.readlines())))
|
||||
for language, typemap in typemaps.items():
|
||||
language_map = self._typemap.get(language, {})
|
||||
language_map.update(typemap)
|
||||
self._typemap[language] = language_map
|
||||
if 'c++' in self._typemap:
|
||||
self._typemap['mojolpm'] = self._typemap['c++']
|
||||
|
||||
def _GenerateModule(self, args, remaining_args, check_modules,
|
||||
generator_modules, rel_filename, imported_filename_stack):
|
||||
# Return the already-generated module.
|
||||
if rel_filename.path in self._processed_files:
|
||||
return self._processed_files[rel_filename.path]
|
||||
|
||||
if rel_filename.path in imported_filename_stack:
|
||||
print("%s: Error: Circular dependency" % rel_filename.path + \
|
||||
MakeImportStackMessage(imported_filename_stack + [rel_filename.path]))
|
||||
sys.exit(1)
|
||||
|
||||
module_path = _GetModulePath(rel_filename, args.output_dir)
|
||||
with open(module_path, 'rb') as f:
|
||||
module = Module.Load(f)
|
||||
|
||||
if args.scrambled_message_id_salt_paths:
|
||||
salt = b''.join(
|
||||
map(ReadFileContents, args.scrambled_message_id_salt_paths))
|
||||
ScrambleMethodOrdinals(module.interfaces, salt)
|
||||
|
||||
if self._should_generate(rel_filename.path):
|
||||
# Run checks on module first.
|
||||
for check_module in check_modules.values():
|
||||
checker = check_module.Check(module)
|
||||
checker.CheckModule()
|
||||
# Then run generation.
|
||||
for language, generator_module in generator_modules.items():
|
||||
generator = generator_module.Generator(
|
||||
module, args.output_dir, typemap=self._typemap.get(language, {}),
|
||||
variant=args.variant, bytecode_path=args.bytecode_path,
|
||||
for_blink=args.for_blink,
|
||||
js_generate_struct_deserializers=\
|
||||
args.js_generate_struct_deserializers,
|
||||
export_attribute=args.export_attribute,
|
||||
export_header=args.export_header,
|
||||
generate_non_variant_code=args.generate_non_variant_code,
|
||||
support_lazy_serialization=args.support_lazy_serialization,
|
||||
disallow_native_types=args.disallow_native_types,
|
||||
disallow_interfaces=args.disallow_interfaces,
|
||||
generate_message_ids=args.generate_message_ids,
|
||||
generate_fuzzing=args.generate_fuzzing,
|
||||
enable_kythe_annotations=args.enable_kythe_annotations,
|
||||
extra_cpp_template_paths=args.extra_cpp_template_paths,
|
||||
generate_extra_cpp_only=args.generate_extra_cpp_only)
|
||||
filtered_args = []
|
||||
if hasattr(generator_module, 'GENERATOR_PREFIX'):
|
||||
prefix = '--' + generator_module.GENERATOR_PREFIX + '_'
|
||||
filtered_args = [arg for arg in remaining_args
|
||||
if arg.startswith(prefix)]
|
||||
generator.GenerateFiles(filtered_args)
|
||||
|
||||
# Save result.
|
||||
self._processed_files[rel_filename.path] = module
|
||||
return module
|
||||
|
||||
|
||||
def _Generate(args, remaining_args):
|
||||
if args.variant == "none":
|
||||
args.variant = None
|
||||
|
||||
for idx, import_dir in enumerate(args.import_directories):
|
||||
tokens = import_dir.split(":")
|
||||
if len(tokens) >= 2:
|
||||
args.import_directories[idx] = RelativePath(tokens[0], tokens[1],
|
||||
args.output_dir)
|
||||
else:
|
||||
args.import_directories[idx] = RelativePath(tokens[0], args.depth,
|
||||
args.output_dir)
|
||||
generator_modules = LoadGenerators(args.generators_string)
|
||||
check_modules = LoadChecks(args.checks_string)
|
||||
|
||||
fileutil.EnsureDirectoryExists(args.output_dir)
|
||||
|
||||
processor = MojomProcessor(lambda filename: filename in args.filename)
|
||||
processor.LoadTypemaps(set(args.typemaps))
|
||||
|
||||
if args.filelist:
|
||||
with open(args.filelist) as f:
|
||||
args.filename.extend(f.read().split())
|
||||
|
||||
for filename in args.filename:
|
||||
processor._GenerateModule(
|
||||
args, remaining_args, check_modules, generator_modules,
|
||||
RelativePath(filename, args.depth, args.output_dir), [])
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _Precompile(args, _):
|
||||
generator_modules = LoadGenerators(",".join(_BUILTIN_GENERATORS.keys()))
|
||||
|
||||
template_expander.PrecompileTemplates(generator_modules, args.output_dir)
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate bindings from mojom files.")
|
||||
parser.add_argument("--use_bundled_pylibs", action="store_true",
|
||||
help="use Python modules bundled in the SDK")
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output_dir",
|
||||
dest="output_dir",
|
||||
default=".",
|
||||
help="output directory for generated files")
|
||||
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
generate_parser = subparsers.add_parser(
|
||||
"generate", description="Generate bindings from mojom files.")
|
||||
generate_parser.add_argument("filename", nargs="*",
|
||||
help="mojom input file")
|
||||
generate_parser.add_argument("--filelist", help="mojom input file list")
|
||||
generate_parser.add_argument("-d", "--depth", dest="depth", default=".",
|
||||
help="depth from source root")
|
||||
generate_parser.add_argument("-g",
|
||||
"--generators",
|
||||
dest="generators_string",
|
||||
metavar="GENERATORS",
|
||||
default="c++,javascript,java,mojolpm",
|
||||
help="comma-separated list of generators")
|
||||
generate_parser.add_argument("-c",
|
||||
"--checks",
|
||||
dest="checks_string",
|
||||
metavar="CHECKS",
|
||||
default=",".join(_BUILTIN_CHECKS.keys()),
|
||||
help="comma-separated list of checks")
|
||||
generate_parser.add_argument(
|
||||
"--gen_dir", dest="gen_directories", action="append", metavar="directory",
|
||||
default=[], help="add a directory to be searched for the syntax trees.")
|
||||
generate_parser.add_argument(
|
||||
"-I", dest="import_directories", action="append", metavar="directory",
|
||||
default=[],
|
||||
help="add a directory to be searched for import files. The depth from "
|
||||
"source root can be specified for each import by appending it after "
|
||||
"a colon")
|
||||
generate_parser.add_argument("--typemap", action="append", metavar="TYPEMAP",
|
||||
default=[], dest="typemaps",
|
||||
help="apply TYPEMAP to generated output")
|
||||
generate_parser.add_argument("--variant", dest="variant", default=None,
|
||||
help="output a named variant of the bindings")
|
||||
generate_parser.add_argument(
|
||||
"--bytecode_path", required=True, help=(
|
||||
"the path from which to load template bytecode; to generate template "
|
||||
"bytecode, run %s precompile BYTECODE_PATH" % os.path.basename(
|
||||
sys.argv[0])))
|
||||
generate_parser.add_argument("--for_blink", action="store_true",
|
||||
help="Use WTF types as generated types for mojo "
|
||||
"string/array/map.")
|
||||
generate_parser.add_argument(
|
||||
"--js_generate_struct_deserializers", action="store_true",
|
||||
help="Generate javascript deserialize methods for structs in "
|
||||
"mojom-lite.js file")
|
||||
generate_parser.add_argument(
|
||||
"--export_attribute", default="",
|
||||
help="Optional attribute to specify on class declaration to export it "
|
||||
"for the component build.")
|
||||
generate_parser.add_argument(
|
||||
"--export_header", default="",
|
||||
help="Optional header to include in the generated headers to support the "
|
||||
"component build.")
|
||||
generate_parser.add_argument(
|
||||
"--generate_non_variant_code", action="store_true",
|
||||
help="Generate code that is shared by different variants.")
|
||||
generate_parser.add_argument(
|
||||
"--scrambled_message_id_salt_path",
|
||||
dest="scrambled_message_id_salt_paths",
|
||||
help="If non-empty, the path to a file whose contents should be used as"
|
||||
"a salt for generating scrambled message IDs. If this switch is specified"
|
||||
"more than once, the contents of all salt files are concatenated to form"
|
||||
"the salt value.", default=[], action="append")
|
||||
generate_parser.add_argument(
|
||||
"--support_lazy_serialization",
|
||||
help="If set, generated bindings will serialize lazily when possible.",
|
||||
action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--extra_cpp_template_paths",
|
||||
dest="extra_cpp_template_paths",
|
||||
action="append",
|
||||
metavar="path_to_template",
|
||||
default=[],
|
||||
help="Provide a path to a new template (.tmpl) that is used to generate "
|
||||
"additional C++ source/header files ")
|
||||
generate_parser.add_argument(
|
||||
"--generate_extra_cpp_only",
|
||||
help="If set and extra_cpp_template_paths provided, will only generate"
|
||||
"extra_cpp_template related C++ bindings",
|
||||
action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--disallow_native_types",
|
||||
help="Disallows the [Native] attribute to be specified on structs or "
|
||||
"enums within the mojom file.", action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--disallow_interfaces",
|
||||
help="Disallows interface definitions within the mojom file. It is an "
|
||||
"error to specify this flag when processing a mojom file which defines "
|
||||
"any interface.", action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--generate_message_ids",
|
||||
help="Generates only the message IDs header for C++ bindings. Note that "
|
||||
"this flag only matters if --generate_non_variant_code is also "
|
||||
"specified.", action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--generate_fuzzing",
|
||||
action="store_true",
|
||||
help="Generates additional bindings for fuzzing in JS.")
|
||||
generate_parser.add_argument(
|
||||
"--enable_kythe_annotations",
|
||||
action="store_true",
|
||||
help="Adds annotations for kythe metadata generation.")
|
||||
|
||||
generate_parser.set_defaults(func=_Generate)
|
||||
|
||||
precompile_parser = subparsers.add_parser("precompile",
|
||||
description="Precompile templates for the mojom bindings generator.")
|
||||
precompile_parser.set_defaults(func=_Precompile)
|
||||
|
||||
args, remaining_args = parser.parse_known_args()
|
||||
return args.func(args, remaining_args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with crbug_1001171.DumpStateOnLookupError():
|
||||
ret = main()
|
||||
# Exit without running GC, which can save multiple seconds due to the large
|
||||
# number of object created. But flush is necessary as os._exit doesn't do
|
||||
# that.
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
os._exit(ret)
|
|
@ -0,0 +1,62 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from mojom_bindings_generator import MakeImportStackMessage
|
||||
from mojom_bindings_generator import ScrambleMethodOrdinals
|
||||
|
||||
|
||||
class FakeIface:
|
||||
def __init__(self):
|
||||
self.mojom_name = None
|
||||
self.methods = None
|
||||
|
||||
|
||||
class FakeMethod:
|
||||
def __init__(self, explicit_ordinal=None):
|
||||
self.explicit_ordinal = explicit_ordinal
|
||||
self.ordinal = explicit_ordinal
|
||||
self.ordinal_comment = None
|
||||
|
||||
|
||||
class MojoBindingsGeneratorTest(unittest.TestCase):
|
||||
"""Tests mojo_bindings_generator."""
|
||||
|
||||
def testMakeImportStackMessage(self):
|
||||
"""Tests MakeImportStackMessage()."""
|
||||
self.assertEqual(MakeImportStackMessage(["x"]), "")
|
||||
self.assertEqual(MakeImportStackMessage(["x", "y"]),
|
||||
"\n y was imported by x")
|
||||
self.assertEqual(MakeImportStackMessage(["x", "y", "z"]),
|
||||
"\n z was imported by y\n y was imported by x")
|
||||
|
||||
def testScrambleMethodOrdinals(self):
|
||||
"""Tests ScrambleMethodOrdinals()."""
|
||||
interface = FakeIface()
|
||||
interface.mojom_name = 'RendererConfiguration'
|
||||
interface.methods = [
|
||||
FakeMethod(),
|
||||
FakeMethod(),
|
||||
FakeMethod(),
|
||||
FakeMethod(explicit_ordinal=42)
|
||||
]
|
||||
ScrambleMethodOrdinals([interface], "foo".encode('utf-8'))
|
||||
# These next three values are hard-coded. If the generation algorithm
|
||||
# changes from being based on sha256(seed + interface.name + str(i)) then
|
||||
# these numbers will obviously need to change too.
|
||||
#
|
||||
# Note that hashlib.sha256('fooRendererConfiguration1').digest()[:4] is
|
||||
# '\xa5\xbc\xf9\xca' and that hex(1257880741) = '0x4af9bca5'. The
|
||||
# difference in 0x4a vs 0xca is because we only take 31 bits.
|
||||
self.assertEqual(interface.methods[0].ordinal, 1257880741)
|
||||
self.assertEqual(interface.methods[1].ordinal, 631133653)
|
||||
self.assertEqual(interface.methods[2].ordinal, 549336076)
|
||||
|
||||
# Explicit method ordinals should not be scrambled.
|
||||
self.assertEqual(interface.methods[3].ordinal, 42)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
58
utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py
Executable file
58
utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py
Executable file
|
@ -0,0 +1,58 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
def CheckCppTypemapConfigs(target_name, config_filename, out_filename):
|
||||
_SUPPORTED_CONFIG_KEYS = set([
|
||||
'types', 'traits_headers', 'traits_private_headers', 'traits_sources',
|
||||
'traits_deps', 'traits_public_deps'
|
||||
])
|
||||
_SUPPORTED_TYPE_KEYS = set([
|
||||
'mojom', 'cpp', 'copyable_pass_by_value', 'force_serialize', 'hashable',
|
||||
'move_only', 'nullable_is_same_type', 'forward_declaration',
|
||||
'default_constructible'
|
||||
])
|
||||
with open(config_filename, 'r') as f:
|
||||
for config in json.load(f):
|
||||
for key in config.keys():
|
||||
if key not in _SUPPORTED_CONFIG_KEYS:
|
||||
raise ValueError('Invalid typemap property "%s" when processing %s' %
|
||||
(key, target_name))
|
||||
|
||||
types = config.get('types')
|
||||
if not types:
|
||||
raise ValueError('Typemap for %s must specify at least one type to map'
|
||||
% target_name)
|
||||
|
||||
for entry in types:
|
||||
for key in entry.keys():
|
||||
if key not in _SUPPORTED_TYPE_KEYS:
|
||||
raise IOError(
|
||||
'Invalid type property "%s" in typemap for "%s" on target %s' %
|
||||
(key, entry.get('mojom', '(unknown)'), target_name))
|
||||
|
||||
with open(out_filename, 'w') as f:
|
||||
f.truncate(0)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
_, args = parser.parse_known_args()
|
||||
if len(args) != 3:
|
||||
print('Usage: validate_typemap_config.py target_name config_filename '
|
||||
'stamp_filename')
|
||||
sys.exit(1)
|
||||
|
||||
CheckCppTypemapConfigs(args[0], args[1], args[2])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
18
utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn
Normal file
18
utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
group("tests") {
|
||||
data = [
|
||||
"check_stable_mojom_compatibility_unittest.py",
|
||||
"check_stable_mojom_compatibility.py",
|
||||
"const_unittest.py",
|
||||
"enum_unittest.py",
|
||||
"feature_unittest.py",
|
||||
"mojom_parser_test_case.py",
|
||||
"mojom_parser_unittest.py",
|
||||
"mojom_parser.py",
|
||||
"stable_attribute_unittest.py",
|
||||
"version_compatibility_unittest.py",
|
||||
]
|
||||
}
|
14
utils/codegen/ipc/mojo/public/tools/mojom/README.md
Normal file
14
utils/codegen/ipc/mojo/public/tools/mojom/README.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
# The Mojom Parser
|
||||
|
||||
The Mojom format is an interface definition language (IDL) for describing
|
||||
interprocess communication (IPC) messages and data types for use with the
|
||||
low-level cross-platform
|
||||
[Mojo IPC library](https://chromium.googlesource.com/chromium/src/+/main/mojo/public/c/system/README.md).
|
||||
|
||||
This directory consists of a `mojom` Python module, its tests, and supporting
|
||||
command-line tools. The Python module implements the parser used by the
|
||||
command-line tools and exposes an API to help external bindings generators emit
|
||||
useful code from the parser's outputs.
|
||||
|
||||
TODO(https://crbug.com/1060464): Fill out this documentation once the library
|
||||
and tools have stabilized.
|
204
utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
Executable file
204
utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
Executable file
|
@ -0,0 +1,204 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Verifies backward-compatibility of mojom type changes.
|
||||
|
||||
Given a set of pre- and post-diff mojom file contents, and a root directory
|
||||
for a project, this tool verifies that any changes to [Stable] mojom types are
|
||||
backward-compatible with the previous version.
|
||||
|
||||
This can be used e.g. by a presubmit check to prevent developers from making
|
||||
breaking changes to stable mojoms."""
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom.generate import module
|
||||
from mojom.generate import translate
|
||||
from mojom.parse import parser
|
||||
|
||||
# pylint: disable=raise-missing-from
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _ValidateDelta(root, delta):
|
||||
"""Parses all modified mojoms (including all transitive mojom dependencies,
|
||||
even if unmodified) to perform backward-compatibility checks on any types
|
||||
marked with the [Stable] attribute.
|
||||
|
||||
Note that unlike the normal build-time parser in mojom_parser.py, this does
|
||||
not produce or rely on cached module translations, but instead parses the full
|
||||
transitive closure of a mojom's input dependencies all at once.
|
||||
"""
|
||||
|
||||
translate.is_running_backwards_compatibility_check_hack = True
|
||||
|
||||
# First build a map of all files covered by the delta
|
||||
affected_files = set()
|
||||
old_files = {}
|
||||
new_files = {}
|
||||
for change in delta:
|
||||
# TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
|
||||
filename = change['filename'].replace('\\', '/')
|
||||
affected_files.add(filename)
|
||||
if change['old']:
|
||||
old_files[filename] = change['old']
|
||||
if change['new']:
|
||||
new_files[filename] = change['new']
|
||||
|
||||
# Parse and translate all mojoms relevant to the delta, including transitive
|
||||
# imports that weren't modified.
|
||||
unmodified_modules = {}
|
||||
|
||||
def parseMojom(mojom, file_overrides, override_modules):
|
||||
if mojom in unmodified_modules or mojom in override_modules:
|
||||
return
|
||||
|
||||
contents = file_overrides.get(mojom)
|
||||
if contents:
|
||||
modules = override_modules
|
||||
else:
|
||||
modules = unmodified_modules
|
||||
with io.open(os.path.join(root, mojom), encoding='utf-8') as f:
|
||||
contents = f.read()
|
||||
|
||||
try:
|
||||
ast = parser.Parse(contents, mojom)
|
||||
except Exception as e:
|
||||
raise ParseError('encountered exception {0} while parsing {1}'.format(
|
||||
e, mojom))
|
||||
|
||||
# Files which are generated at compile time can't be checked by this script
|
||||
# (at the moment) since they may not exist in the output directory.
|
||||
generated_files_to_skip = {
|
||||
('third_party/blink/public/mojom/runtime_feature_state/'
|
||||
'runtime_feature.mojom'),
|
||||
('third_party/blink/public/mojom/origin_trial_feature/'
|
||||
'origin_trial_feature.mojom'),
|
||||
}
|
||||
|
||||
ast.import_list.items = [
|
||||
x for x in ast.import_list.items
|
||||
if x.import_filename not in generated_files_to_skip
|
||||
]
|
||||
|
||||
for imp in ast.import_list:
|
||||
if (not file_overrides.get(imp.import_filename)
|
||||
and not os.path.exists(os.path.join(root, imp.import_filename))):
|
||||
# Speculatively construct a path prefix to locate the import_filename
|
||||
mojom_path = os.path.dirname(os.path.normpath(mojom)).split(os.sep)
|
||||
test_prefix = ''
|
||||
for path_component in mojom_path:
|
||||
test_prefix = os.path.join(test_prefix, path_component)
|
||||
test_import_filename = os.path.join(test_prefix, imp.import_filename)
|
||||
if os.path.exists(os.path.join(root, test_import_filename)):
|
||||
imp.import_filename = test_import_filename
|
||||
break
|
||||
parseMojom(imp.import_filename, file_overrides, override_modules)
|
||||
|
||||
# Now that the transitive set of dependencies has been imported and parsed
|
||||
# above, translate each mojom AST into a Module so that all types are fully
|
||||
# defined and can be inspected.
|
||||
all_modules = {}
|
||||
all_modules.update(unmodified_modules)
|
||||
all_modules.update(override_modules)
|
||||
modules[mojom] = translate.OrderedModule(ast, mojom, all_modules)
|
||||
|
||||
old_modules = {}
|
||||
for mojom in old_files:
|
||||
parseMojom(mojom, old_files, old_modules)
|
||||
new_modules = {}
|
||||
for mojom in new_files:
|
||||
parseMojom(mojom, new_files, new_modules)
|
||||
|
||||
# At this point we have a complete set of translated Modules from both the
|
||||
# pre- and post-diff mojom contents. Now we can analyze backward-compatibility
|
||||
# of the deltas.
|
||||
#
|
||||
# Note that for backward-compatibility checks we only care about types which
|
||||
# were marked [Stable] before the diff. Types newly marked as [Stable] are not
|
||||
# checked.
|
||||
def collectTypes(modules):
|
||||
types = {}
|
||||
for m in modules.values():
|
||||
for kinds in (m.enums, m.structs, m.unions, m.interfaces):
|
||||
for kind in kinds:
|
||||
types[kind.qualified_name] = kind
|
||||
return types
|
||||
|
||||
old_types = collectTypes(old_modules)
|
||||
new_types = collectTypes(new_modules)
|
||||
|
||||
# Collect any renamed types so they can be compared accordingly.
|
||||
renamed_types = {}
|
||||
for name, kind in new_types.items():
|
||||
old_name = kind.attributes and kind.attributes.get('RenamedFrom')
|
||||
if old_name:
|
||||
renamed_types[old_name] = name
|
||||
|
||||
for qualified_name, kind in old_types.items():
|
||||
if not kind.stable:
|
||||
continue
|
||||
|
||||
new_name = renamed_types.get(qualified_name, qualified_name)
|
||||
if new_name not in new_types:
|
||||
raise Exception(
|
||||
'Stable type %s appears to be deleted by this change. If it was '
|
||||
'renamed, please add a [RenamedFrom] attribute to the new type. This '
|
||||
'can be deleted by a subsequent change.' % qualified_name)
|
||||
|
||||
checker = module.BackwardCompatibilityChecker()
|
||||
try:
|
||||
if not checker.IsBackwardCompatible(new_types[new_name], kind):
|
||||
raise Exception(
|
||||
'Stable type %s appears to have changed in a way which '
|
||||
'breaks backward-compatibility. Please fix!\n\nIf you '
|
||||
'believe this assessment to be incorrect, please file a '
|
||||
'Chromium bug against the "Internals>Mojo>Bindings" '
|
||||
'component.' % qualified_name)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
'Stable type %s appears to have changed in a way which '
|
||||
'breaks backward-compatibility: \n\n%s.\nPlease fix!\n\nIf you '
|
||||
'believe this assessment to be incorrect, please file a '
|
||||
'Chromium bug against the "Internals>Mojo>Bindings" '
|
||||
'component.' % (qualified_name, e))
|
||||
|
||||
|
||||
def Run(command_line, delta=None):
|
||||
"""Runs the tool with the given command_line. Normally this will read the
|
||||
change description from stdin as a JSON-encoded list, but tests may pass a
|
||||
delta directly for convenience."""
|
||||
arg_parser = argparse.ArgumentParser(
|
||||
description='Verifies backward-compatibility of mojom type changes.',
|
||||
epilog="""
|
||||
This tool reads a change description from stdin and verifies that all modified
|
||||
[Stable] mojom types will retain backward-compatibility. The change description
|
||||
must be a JSON-encoded list of objects, each with a "filename" key (path to a
|
||||
changed mojom file, relative to ROOT); an "old" key whose value is a string of
|
||||
the full file contents before the change, or null if the file is being added;
|
||||
and a "new" key whose value is a string of the full file contents after the
|
||||
change, or null if the file is being deleted.""")
|
||||
arg_parser.add_argument(
|
||||
'--src-root',
|
||||
required=True,
|
||||
action='store',
|
||||
metavar='ROOT',
|
||||
help='The root of the source tree in which the checked mojoms live.')
|
||||
|
||||
args, _ = arg_parser.parse_known_args(command_line)
|
||||
if not delta:
|
||||
delta = json.load(sys.stdin)
|
||||
_ValidateDelta(args.src_root, delta)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
Run(sys.argv[1:])
|
|
@ -0,0 +1,339 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import check_stable_mojom_compatibility
|
||||
|
||||
from mojom.generate import module
|
||||
|
||||
|
||||
class Change:
|
||||
"""Helper to clearly define a mojom file delta to be analyzed."""
|
||||
|
||||
def __init__(self, filename, old=None, new=None):
|
||||
"""If old is None, this is a file addition. If new is None, this is a file
|
||||
deletion. Otherwise it's a file change."""
|
||||
self.filename = filename
|
||||
self.old = old
|
||||
self.new = new
|
||||
|
||||
|
||||
class UnchangedFile(Change):
|
||||
def __init__(self, filename, contents):
|
||||
super().__init__(filename, old=contents, new=contents)
|
||||
|
||||
|
||||
class CheckStableMojomCompatibilityTest(unittest.TestCase):
|
||||
"""Tests covering the behavior of the compatibility checking tool. Note that
|
||||
details of different compatibility checks and relevant failure modes are NOT
|
||||
covered by these tests. Those are instead covered by unittests in
|
||||
version_compatibility_unittest.py. Additionally, the tests which ensure a
|
||||
given set of [Stable] mojom definitions are indeed plausibly stable (i.e. they
|
||||
have no unstable dependencies) are covered by stable_attribute_unittest.py.
|
||||
|
||||
These tests cover higher-level concerns of the compatibility checking tool,
|
||||
like file or symbol, renames, changes spread over multiple files, etc."""
|
||||
|
||||
def verifyBackwardCompatibility(self, changes):
|
||||
"""Helper for implementing assertBackwardCompatible and
|
||||
assertNotBackwardCompatible"""
|
||||
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
for change in changes:
|
||||
if change.old:
|
||||
# Populate the old file on disk in our temporary fake source root
|
||||
file_path = os.path.join(temp_dir, change.filename)
|
||||
dir_path = os.path.dirname(file_path)
|
||||
if not os.path.exists(dir_path):
|
||||
os.makedirs(dir_path)
|
||||
with open(file_path, 'w') as f:
|
||||
f.write(change.old)
|
||||
|
||||
delta = []
|
||||
for change in changes:
|
||||
if change.old != change.new:
|
||||
delta.append({
|
||||
'filename': change.filename,
|
||||
'old': change.old,
|
||||
'new': change.new
|
||||
})
|
||||
|
||||
try:
|
||||
check_stable_mojom_compatibility.Run(['--src-root', temp_dir],
|
||||
delta=delta)
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
def assertBackwardCompatible(self, changes):
|
||||
self.verifyBackwardCompatibility(changes)
|
||||
|
||||
def assertNotBackwardCompatible(self, changes):
|
||||
try:
|
||||
self.verifyBackwardCompatibility(changes)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
raise Exception('Change unexpectedly passed a backward-compatibility check')
|
||||
|
||||
def testBasicCompatibility(self):
|
||||
"""Minimal smoke test to verify acceptance of a simple valid change."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new='[Stable] struct S { [MinVersion=1] int32 x; };')
|
||||
])
|
||||
|
||||
def testBasicIncompatibility(self):
|
||||
"""Minimal smoke test to verify rejection of a simple invalid change."""
|
||||
self.assertNotBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new='[Stable] struct S { int32 x; };')
|
||||
])
|
||||
|
||||
def testIgnoreIfNotStable(self):
|
||||
"""We don't care about types not marked [Stable]"""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='struct S {};',
|
||||
new='struct S { int32 x; };')
|
||||
])
|
||||
|
||||
def testRename(self):
|
||||
"""We can do checks for renamed types."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new='[Stable, RenamedFrom="S"] struct T {};')
|
||||
])
|
||||
self.assertNotBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new='[Stable, RenamedFrom="S"] struct T { int32 x; };')
|
||||
])
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new="""\
|
||||
[Stable, RenamedFrom="S"]
|
||||
struct T { [MinVersion=1] int32 x; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testNewlyStable(self):
|
||||
"""We don't care about types newly marked as [Stable]."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='struct S {};',
|
||||
new='[Stable] struct S { int32 x; };')
|
||||
])
|
||||
|
||||
def testFileRename(self):
|
||||
"""Make sure we can still do compatibility checks after a file rename."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
|
||||
Change('bar/bar.mojom',
|
||||
old=None,
|
||||
new='[Stable] struct S { [MinVersion=1] int32 x; };')
|
||||
])
|
||||
self.assertNotBackwardCompatible([
|
||||
Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
|
||||
Change('bar/bar.mojom', old=None, new='[Stable] struct S { int32 x; };')
|
||||
])
|
||||
|
||||
def testWithImport(self):
|
||||
"""Ensure that cross-module dependencies do not break the compatibility
|
||||
checking tool."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old="""\
|
||||
module foo;
|
||||
[Stable] struct S {};
|
||||
""",
|
||||
new="""\
|
||||
module foo;
|
||||
[Stable] struct S { [MinVersion=2] int32 x; };
|
||||
"""),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; [MinVersion=1] int32 y; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testWithMovedDefinition(self):
|
||||
"""If a definition moves from one file to another, we should still be able
|
||||
to check compatibility accurately."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old="""\
|
||||
module foo;
|
||||
[Stable] struct S {};
|
||||
""",
|
||||
new="""\
|
||||
module foo;
|
||||
"""),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable, RenamedFrom="foo.S"] struct S {
|
||||
[MinVersion=2] int32 x;
|
||||
};
|
||||
[Stable] struct T { S s; [MinVersion=1] int32 y; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertNotBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old="""\
|
||||
module foo;
|
||||
[Stable] struct S {};
|
||||
""",
|
||||
new="""\
|
||||
module foo;
|
||||
"""),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable, RenamedFrom="foo.S"] struct S { int32 x; };
|
||||
[Stable] struct T { S s; [MinVersion=1] int32 y; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testWithUnmodifiedImport(self):
|
||||
"""Unchanged files in the filesystem are still parsed by the compatibility
|
||||
checking tool if they're imported by a changed file."""
|
||||
self.assertBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; [MinVersion=1] int32 x; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertNotBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; int32 x; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testWithPartialImport(self):
|
||||
"""The compatibility checking tool correctly parses imports with partial
|
||||
paths."""
|
||||
self.assertBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('foo/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('foo/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertNotBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertNotBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testNewEnumDefault(self):
|
||||
# Should be backwards compatible since it does not affect the wire format.
|
||||
# This specific case also checks that the backwards compatibility checker
|
||||
# does not throw an error due to the older version of the enum not
|
||||
# specifying [Default].
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Extensible] enum E { One };',
|
||||
new='[Extensible] enum E { [Default] One };')
|
||||
])
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Extensible] enum E { [Default] One, Two, };',
|
||||
new='[Extensible] enum E { One, [Default] Two, };')
|
||||
])
|
90
utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py
Normal file
90
utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
from mojom.generate import module as mojom
|
||||
|
||||
|
||||
class ConstTest(MojomParserTestCase):
|
||||
"""Tests constant parsing behavior."""
|
||||
|
||||
def testLiteralInt(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const int32 k = 42;')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(1, len(a.constants))
|
||||
self.assertEqual('k', a.constants[0].mojom_name)
|
||||
self.assertEqual('42', a.constants[0].value)
|
||||
|
||||
def testLiteralFloat(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const float k = 42.5;')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(1, len(a.constants))
|
||||
self.assertEqual('k', a.constants[0].mojom_name)
|
||||
self.assertEqual('42.5', a.constants[0].value)
|
||||
|
||||
def testLiteralString(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const string k = "woot";')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(1, len(a.constants))
|
||||
self.assertEqual('k', a.constants[0].mojom_name)
|
||||
self.assertEqual('"woot"', a.constants[0].value)
|
||||
|
||||
def testEnumConstant(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'module a; enum E { kA = 41, kB };')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(
|
||||
b_mojom, """\
|
||||
import "a.mojom";
|
||||
const a.E kE1 = a.E.kB;
|
||||
|
||||
// We also allow value names to be unqualified, implying scope from the
|
||||
// constant's type.
|
||||
const a.E kE2 = kB;
|
||||
""")
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
b = self.LoadModule(b_mojom)
|
||||
self.assertEqual(1, len(a.enums))
|
||||
self.assertEqual('E', a.enums[0].mojom_name)
|
||||
self.assertEqual(2, len(b.constants))
|
||||
self.assertEqual('kE1', b.constants[0].mojom_name)
|
||||
self.assertEqual(a.enums[0], b.constants[0].kind)
|
||||
self.assertEqual(a.enums[0].fields[1], b.constants[0].value.field)
|
||||
self.assertEqual(42, b.constants[0].value.field.numeric_value)
|
||||
self.assertEqual('kE2', b.constants[1].mojom_name)
|
||||
self.assertEqual(a.enums[0].fields[1], b.constants[1].value.field)
|
||||
self.assertEqual(42, b.constants[1].value.field.numeric_value)
|
||||
|
||||
def testConstantReference(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const int32 kA = 42; const int32 kB = kA;')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(2, len(a.constants))
|
||||
self.assertEqual('kA', a.constants[0].mojom_name)
|
||||
self.assertEqual('42', a.constants[0].value)
|
||||
self.assertEqual('kB', a.constants[1].mojom_name)
|
||||
self.assertEqual('42', a.constants[1].value)
|
||||
|
||||
def testImportedConstantReference(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const int32 kA = 42;')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(b_mojom, 'import "a.mojom"; const int32 kB = kA;')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
b = self.LoadModule(b_mojom)
|
||||
self.assertEqual(1, len(a.constants))
|
||||
self.assertEqual(1, len(b.constants))
|
||||
self.assertEqual('kA', a.constants[0].mojom_name)
|
||||
self.assertEqual('42', a.constants[0].value)
|
||||
self.assertEqual('kB', b.constants[0].mojom_name)
|
||||
self.assertEqual('42', b.constants[0].value)
|
120
utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py
Normal file
120
utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py
Normal file
|
@ -0,0 +1,120 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class EnumTest(MojomParserTestCase):
|
||||
"""Tests enum parsing behavior."""
|
||||
|
||||
def testExplicitValues(self):
|
||||
"""Verifies basic parsing of assigned integral values."""
|
||||
types = self.ExtractTypes('enum E { kFoo=0, kBar=2, kBaz };')
|
||||
self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
|
||||
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||
self.assertEqual('kBar', types['E'].fields[1].mojom_name)
|
||||
self.assertEqual(2, types['E'].fields[1].numeric_value)
|
||||
self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
|
||||
self.assertEqual(3, types['E'].fields[2].numeric_value)
|
||||
|
||||
def testImplicitValues(self):
|
||||
"""Verifies basic automatic assignment of integral values at parse time."""
|
||||
types = self.ExtractTypes('enum E { kFoo, kBar, kBaz };')
|
||||
self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
|
||||
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||
self.assertEqual('kBar', types['E'].fields[1].mojom_name)
|
||||
self.assertEqual(1, types['E'].fields[1].numeric_value)
|
||||
self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
|
||||
self.assertEqual(2, types['E'].fields[2].numeric_value)
|
||||
|
||||
def testSameEnumReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of another
|
||||
field within the same enum."""
|
||||
types = self.ExtractTypes('enum E { kA, kB, kFirst=kA };')
|
||||
self.assertEqual('kA', types['E'].fields[0].mojom_name)
|
||||
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||
self.assertEqual('kB', types['E'].fields[1].mojom_name)
|
||||
self.assertEqual(1, types['E'].fields[1].numeric_value)
|
||||
self.assertEqual('kFirst', types['E'].fields[2].mojom_name)
|
||||
self.assertEqual(0, types['E'].fields[2].numeric_value)
|
||||
|
||||
def testSameModuleOtherEnumReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of a field
|
||||
in another enum within the same module."""
|
||||
types = self.ExtractTypes('enum E { kA, kB }; enum F { kA = E.kB };')
|
||||
self.assertEqual(1, types['F'].fields[0].numeric_value)
|
||||
|
||||
def testImportedEnumReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of a field
|
||||
in another enum within a different module."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'module a; enum E { kFoo=42, kBar };')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(b_mojom,
|
||||
'module b; import "a.mojom"; enum F { kFoo = a.E.kBar };')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
b = self.LoadModule(b_mojom)
|
||||
|
||||
self.assertEqual('F', b.enums[0].mojom_name)
|
||||
self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
|
||||
self.assertEqual(43, b.enums[0].fields[0].numeric_value)
|
||||
|
||||
def testConstantReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of an
|
||||
integral constant within the same module."""
|
||||
types = self.ExtractTypes('const int32 kFoo = 42; enum E { kA = kFoo };')
|
||||
self.assertEqual(42, types['E'].fields[0].numeric_value)
|
||||
|
||||
def testInvalidConstantReference(self):
|
||||
"""Verifies that enum values cannot be assigned from the value of
|
||||
non-integral constants."""
|
||||
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||
self.ExtractTypes('const float kFoo = 1.0; enum E { kA = kFoo };')
|
||||
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||
self.ExtractTypes('const double kFoo = 1.0; enum E { kA = kFoo };')
|
||||
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||
self.ExtractTypes('const string kFoo = "lol"; enum E { kA = kFoo };')
|
||||
|
||||
def testImportedConstantReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of an integral
|
||||
constant within an imported module."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'module a; const int32 kFoo = 37;')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(b_mojom,
|
||||
'module b; import "a.mojom"; enum F { kFoo = a.kFoo };')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
b = self.LoadModule(b_mojom)
|
||||
|
||||
self.assertEqual('F', b.enums[0].mojom_name)
|
||||
self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
|
||||
self.assertEqual(37, b.enums[0].fields[0].numeric_value)
|
||||
|
||||
def testEnumAttributesAreEnums(self):
|
||||
"""Verifies that enum values in attributes are really enum types."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'module a; enum E { kFoo, kBar };')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(
|
||||
b_mojom, 'module b;'
|
||||
'import "a.mojom";'
|
||||
'[MooCow=a.E.kFoo]'
|
||||
'interface Foo { Foo(); };')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
b = self.LoadModule(b_mojom)
|
||||
self.assertEqual(b.interfaces[0].attributes['MooCow'].mojom_name, 'kFoo')
|
||||
|
||||
def testConstantAttributes(self):
|
||||
"""Verifies that constants as attributes are translated to the constant."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(
|
||||
a_mojom, 'module a;'
|
||||
'enum E { kFoo, kBar };'
|
||||
'const E kB = E.kFoo;'
|
||||
'[Attr=kB] interface Hello { Foo(); };')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(a.interfaces[0].attributes['Attr'].mojom_name, 'kB')
|
||||
self.assertEquals(a.interfaces[0].attributes['Attr'].value.mojom_name,
|
||||
'kFoo')
|
|
@ -0,0 +1,84 @@
|
|||
# Copyright 2023 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class FeatureTest(MojomParserTestCase):
|
||||
"""Tests feature parsing behavior."""
|
||||
def testFeatureOff(self):
|
||||
"""Verifies basic parsing of feature types."""
|
||||
types = self.ExtractTypes("""
|
||||
// e.g. BASE_DECLARE_FEATURE(kFeature);
|
||||
[AttributeOne=ValueOne]
|
||||
feature kFeature {
|
||||
// BASE_FEATURE(kFeature,"MyFeature",
|
||||
// base::FEATURE_DISABLED_BY_DEFAULT);
|
||||
const string name = "MyFeature";
|
||||
const bool default_state = false;
|
||||
};
|
||||
""")
|
||||
self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
|
||||
self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
|
||||
self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
|
||||
self.assertEqual('false', types['kFeature'].constants[1].value)
|
||||
|
||||
def testFeatureOn(self):
|
||||
"""Verifies basic parsing of feature types."""
|
||||
types = self.ExtractTypes("""
|
||||
// e.g. BASE_DECLARE_FEATURE(kFeature);
|
||||
feature kFeature {
|
||||
// BASE_FEATURE(kFeature,"MyFeature",
|
||||
// base::FEATURE_ENABLED_BY_DEFAULT);
|
||||
const string name = "MyFeature";
|
||||
const bool default_state = true;
|
||||
};
|
||||
""")
|
||||
self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
|
||||
self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
|
||||
self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
|
||||
self.assertEqual('true', types['kFeature'].constants[1].value)
|
||||
|
||||
def testFeatureWeakKeyword(self):
|
||||
"""Verifies that `feature` is a weak keyword."""
|
||||
types = self.ExtractTypes("""
|
||||
// e.g. BASE_DECLARE_FEATURE(kFeature);
|
||||
[AttributeOne=ValueOne]
|
||||
feature kFeature {
|
||||
// BASE_FEATURE(kFeature,"MyFeature",
|
||||
// base::FEATURE_DISABLED_BY_DEFAULT);
|
||||
const string name = "MyFeature";
|
||||
const bool default_state = false;
|
||||
};
|
||||
struct MyStruct {
|
||||
bool feature = true;
|
||||
};
|
||||
interface InterfaceName {
|
||||
Method(string feature) => (int32 feature);
|
||||
};
|
||||
""")
|
||||
self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
|
||||
self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
|
||||
self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
|
||||
self.assertEqual('false', types['kFeature'].constants[1].value)
|
||||
|
||||
def testFeatureAttributesAreFeatures(self):
|
||||
"""Verifies that feature values in attributes are really feature types."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(
|
||||
a_mojom, 'module a;'
|
||||
'feature F { const string name = "f";'
|
||||
'const bool default_state = false; };')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(
|
||||
b_mojom, 'module b;'
|
||||
'import "a.mojom";'
|
||||
'feature G'
|
||||
'{const string name = "g"; const bool default_state = false;};'
|
||||
'[Attri=a.F] interface Foo { Foo(); };'
|
||||
'[Boink=G] interface Bar {};')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
b = self.LoadModule(b_mojom)
|
||||
self.assertEqual(b.interfaces[0].attributes['Attri'].mojom_name, 'F')
|
||||
self.assertEqual(b.interfaces[1].attributes['Boink'].mojom_name, 'G')
|
43
utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
Normal file
43
utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
Normal file
|
@ -0,0 +1,43 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
group("mojom") {
|
||||
data = [
|
||||
"__init__.py",
|
||||
"error.py",
|
||||
"fileutil.py",
|
||||
"generate/__init__.py",
|
||||
"generate/check.py",
|
||||
"generate/generator.py",
|
||||
"generate/module.py",
|
||||
"generate/pack.py",
|
||||
"generate/template_expander.py",
|
||||
"generate/translate.py",
|
||||
"parse/__init__.py",
|
||||
"parse/ast.py",
|
||||
"parse/conditional_features.py",
|
||||
"parse/lexer.py",
|
||||
"parse/parser.py",
|
||||
|
||||
# Third-party module dependencies
|
||||
"//third_party/jinja2/",
|
||||
"//third_party/ply/",
|
||||
]
|
||||
}
|
||||
|
||||
group("tests") {
|
||||
data = [
|
||||
"fileutil_unittest.py",
|
||||
"generate/generator_unittest.py",
|
||||
"generate/module_unittest.py",
|
||||
"generate/pack_unittest.py",
|
||||
"generate/translate_unittest.py",
|
||||
"parse/ast_unittest.py",
|
||||
"parse/conditional_features_unittest.py",
|
||||
"parse/lexer_unittest.py",
|
||||
"parse/parser_unittest.py",
|
||||
]
|
||||
|
||||
public_deps = [ ":mojom" ]
|
||||
}
|
28
utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py
Normal file
28
utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Base class for Mojo IDL bindings parser/generator errors."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None, addenda=None, **kwargs):
|
||||
"""|filename| is the (primary) file which caused the error, |message| is the
|
||||
error message, |lineno| is the 1-based line number (or |None| if not
|
||||
applicable/available), and |addenda| is a list of additional lines to append
|
||||
to the final error message."""
|
||||
Exception.__init__(self, **kwargs)
|
||||
self.filename = filename
|
||||
self.message = message
|
||||
self.lineno = lineno
|
||||
self.addenda = addenda
|
||||
|
||||
def __str__(self):
|
||||
if self.lineno:
|
||||
s = "%s:%d: Error: %s" % (self.filename, self.lineno, self.message)
|
||||
else:
|
||||
s = "%s: Error: %s" % (self.filename, self.message)
|
||||
return "\n".join([s] + self.addenda) if self.addenda else s
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
44
utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py
Normal file
44
utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# Copyright 2015 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
if not tail:
|
||||
return None
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
|
||||
def EnsureDirectoryExists(path, always_try_to_create=False):
|
||||
"""A wrapper for os.makedirs that does not error if the directory already
|
||||
exists. A different process could be racing to create this directory."""
|
||||
|
||||
if not os.path.exists(path) or always_try_to_create:
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as e:
|
||||
# There may have been a race to create this directory.
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
||||
def AddLocalRepoThirdPartyDirToModulePath():
|
||||
"""Helper function to find the top-level directory of this script's repository
|
||||
assuming the script falls somewhere within a 'mojo' directory, and insert the
|
||||
top-level 'third_party' directory early in the module search path. Used to
|
||||
ensure that third-party dependencies provided within the repository itself
|
||||
(e.g. Chromium sources include snapshots of jinja2 and ply) are preferred over
|
||||
locally installed system library packages."""
|
||||
toplevel_dir = _GetDirAbove('mojo')
|
||||
if toplevel_dir:
|
||||
sys.path.insert(1, os.path.join(toplevel_dir, 'third_party'))
|
|
@ -0,0 +1,37 @@
|
|||
# Copyright 2015 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from mojom import fileutil
|
||||
|
||||
class FileUtilTest(unittest.TestCase):
|
||||
def testEnsureDirectoryExists(self):
|
||||
"""Test that EnsureDirectoryExists functions correctly."""
|
||||
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
self.assertTrue(os.path.exists(temp_dir))
|
||||
|
||||
# Directory does not exist, yet.
|
||||
full = os.path.join(temp_dir, "foo", "bar")
|
||||
self.assertFalse(os.path.exists(full))
|
||||
|
||||
# Create the directory.
|
||||
fileutil.EnsureDirectoryExists(full)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
|
||||
# Trying to create it again does not cause an error.
|
||||
fileutil.EnsureDirectoryExists(full)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
|
||||
# Bypass check for directory existence to tickle error handling that
|
||||
# occurs in response to a race.
|
||||
fileutil.EnsureDirectoryExists(full, always_try_to_create=True)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
|
@ -0,0 +1,26 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Code shared by the various pre-generation mojom checkers."""
|
||||
|
||||
|
||||
class CheckException(Exception):
|
||||
def __init__(self, module, message):
|
||||
self.module = module
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
||||
def __str__(self):
|
||||
return "Failed mojo pre-generation check for {}:\n{}".format(
|
||||
self.module.path, self.message)
|
||||
|
||||
|
||||
class Check:
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
def CheckModule(self):
|
||||
""" Subclass should return True if its Checks pass, and throw an
|
||||
exception otherwise. CheckModule will be called immediately before
|
||||
mojom.generate.Generator.GenerateFiles()"""
|
||||
raise NotImplementedError("Subclasses must override/implement this method")
|
|
@ -0,0 +1,328 @@
|
|||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Code shared by the various language-specific code generators."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from functools import partial
|
||||
import os.path
|
||||
import re
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import pack
|
||||
|
||||
|
||||
def ExpectedArraySize(kind):
|
||||
if mojom.IsArrayKind(kind):
|
||||
return kind.length
|
||||
return None
|
||||
|
||||
|
||||
def SplitCamelCase(identifier):
|
||||
"""Splits a camel-cased |identifier| and returns a list of lower-cased
|
||||
strings.
|
||||
"""
|
||||
# Add underscores after uppercase letters when appropriate. An uppercase
|
||||
# letter is considered the end of a word if it is followed by an upper and a
|
||||
# lower. E.g. URLLoaderFactory -> URL_LoaderFactory
|
||||
identifier = re.sub('([A-Z][0-9]*)(?=[A-Z][0-9]*[a-z])', r'\1_', identifier)
|
||||
# Add underscores after lowercase letters when appropriate. A lowercase letter
|
||||
# is considered the end of a word if it is followed by an upper.
|
||||
# E.g. URLLoaderFactory -> URLLoader_Factory
|
||||
identifier = re.sub('([a-z][0-9]*)(?=[A-Z])', r'\1_', identifier)
|
||||
return [x.lower() for x in identifier.split('_')]
|
||||
|
||||
|
||||
def ToCamel(identifier, lower_initial=False, digits_split=False, delimiter='_'):
|
||||
"""Splits |identifier| using |delimiter|, makes the first character of each
|
||||
word uppercased (but makes the first character of the first word lowercased
|
||||
if |lower_initial| is set to True), and joins the words. Please note that for
|
||||
each word, all the characters except the first one are untouched.
|
||||
"""
|
||||
result = ''
|
||||
capitalize_next = True
|
||||
for i in range(len(identifier)):
|
||||
if identifier[i] == delimiter:
|
||||
capitalize_next = True
|
||||
elif digits_split and identifier[i].isdigit():
|
||||
capitalize_next = True
|
||||
result += identifier[i]
|
||||
elif capitalize_next:
|
||||
capitalize_next = False
|
||||
result += identifier[i].upper()
|
||||
else:
|
||||
result += identifier[i]
|
||||
|
||||
if lower_initial and result:
|
||||
result = result[0].lower() + result[1:]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _ToSnakeCase(identifier, upper=False):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"URL_LOADER_FACTORY" if upper, otherwise "url_loader_factory".
|
||||
"""
|
||||
words = SplitCamelCase(identifier)
|
||||
if words[0] == 'k' and len(words) > 1:
|
||||
words = words[1:]
|
||||
|
||||
# Variables cannot start with a digit
|
||||
if (words[0][0].isdigit()):
|
||||
words[0] = '_' + words[0]
|
||||
|
||||
|
||||
if upper:
|
||||
words = map(lambda x: x.upper(), words)
|
||||
|
||||
return '_'.join(words)
|
||||
|
||||
|
||||
def ToUpperSnakeCase(identifier):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"URL_LOADER_FACTORY".
|
||||
"""
|
||||
return _ToSnakeCase(identifier, upper=True)
|
||||
|
||||
|
||||
def ToLowerSnakeCase(identifier):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"url_loader_factory".
|
||||
"""
|
||||
return _ToSnakeCase(identifier, upper=False)
|
||||
|
||||
|
||||
class Stylizer:
|
||||
"""Stylizers specify naming rules to map mojom names to names in generated
|
||||
code. For example, if you would like method_name in mojom to be mapped to
|
||||
MethodName in the generated code, you need to define a subclass of Stylizer
|
||||
and override StylizeMethod to do the conversion."""
|
||||
|
||||
def StylizeConstant(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeField(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeStruct(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeUnion(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeParameter(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeMethod(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeInterface(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeEnumField(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeEnum(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeFeature(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeModule(self, mojom_namespace):
|
||||
return mojom_namespace
|
||||
|
||||
|
||||
def WriteFile(contents, full_path):
|
||||
# If |contents| is same with the file content, we skip updating.
|
||||
if not isinstance(contents, bytes):
|
||||
data = contents.encode('utf8')
|
||||
else:
|
||||
data = contents
|
||||
|
||||
if os.path.isfile(full_path):
|
||||
with open(full_path, 'rb') as destination_file:
|
||||
if destination_file.read() == data:
|
||||
return
|
||||
|
||||
# Make sure the containing directory exists.
|
||||
full_dir = os.path.dirname(full_path)
|
||||
fileutil.EnsureDirectoryExists(full_dir)
|
||||
|
||||
# Dump the data to disk.
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
def AddComputedData(module):
|
||||
"""Adds computed data to the given module. The data is computed once and
|
||||
used repeatedly in the generation process."""
|
||||
|
||||
def _AddStructComputedData(exported, struct):
|
||||
struct.packed = pack.PackedStruct(struct)
|
||||
struct.bytes = pack.GetByteLayout(struct.packed)
|
||||
struct.versions = pack.GetVersionInfo(struct.packed)
|
||||
struct.exported = exported
|
||||
|
||||
def _AddInterfaceComputedData(interface):
|
||||
interface.version = 0
|
||||
for method in interface.methods:
|
||||
# this field is never scrambled
|
||||
method.sequential_ordinal = method.ordinal
|
||||
|
||||
if method.min_version is not None:
|
||||
interface.version = max(interface.version, method.min_version)
|
||||
|
||||
method.param_struct = _GetStructFromMethod(method)
|
||||
if interface.stable:
|
||||
method.param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
|
||||
if method.explicit_ordinal is None:
|
||||
raise Exception(
|
||||
'Stable interfaces must declare explicit method ordinals. The '
|
||||
'method %s on stable interface %s does not declare an explicit '
|
||||
'ordinal.' % (method.mojom_name, interface.qualified_name))
|
||||
interface.version = max(interface.version,
|
||||
method.param_struct.versions[-1].version)
|
||||
|
||||
if method.response_parameters is not None:
|
||||
method.response_param_struct = _GetResponseStructFromMethod(method)
|
||||
if interface.stable:
|
||||
method.response_param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
|
||||
interface.version = max(
|
||||
interface.version,
|
||||
method.response_param_struct.versions[-1].version)
|
||||
else:
|
||||
method.response_param_struct = None
|
||||
|
||||
def _GetStructFromMethod(method):
|
||||
"""Converts a method's parameters into the fields of a struct."""
|
||||
params_class = "%s_%s_Params" % (method.interface.mojom_name,
|
||||
method.mojom_name)
|
||||
struct = mojom.Struct(params_class,
|
||||
module=method.interface.module,
|
||||
attributes={})
|
||||
for param in method.parameters:
|
||||
struct.AddField(
|
||||
param.mojom_name,
|
||||
param.kind,
|
||||
param.ordinal,
|
||||
attributes=param.attributes)
|
||||
_AddStructComputedData(False, struct)
|
||||
return struct
|
||||
|
||||
def _GetResponseStructFromMethod(method):
|
||||
"""Converts a method's response_parameters into the fields of a struct."""
|
||||
params_class = "%s_%s_ResponseParams" % (method.interface.mojom_name,
|
||||
method.mojom_name)
|
||||
struct = mojom.Struct(params_class,
|
||||
module=method.interface.module,
|
||||
attributes={})
|
||||
for param in method.response_parameters:
|
||||
struct.AddField(
|
||||
param.mojom_name,
|
||||
param.kind,
|
||||
param.ordinal,
|
||||
attributes=param.attributes)
|
||||
_AddStructComputedData(False, struct)
|
||||
return struct
|
||||
|
||||
for struct in module.structs:
|
||||
_AddStructComputedData(True, struct)
|
||||
for interface in module.interfaces:
|
||||
_AddInterfaceComputedData(interface)
|
||||
|
||||
|
||||
class Generator:
|
||||
# Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all
|
||||
# files to stdout.
|
||||
def __init__(self,
|
||||
module,
|
||||
output_dir=None,
|
||||
typemap=None,
|
||||
variant=None,
|
||||
bytecode_path=None,
|
||||
for_blink=False,
|
||||
js_generate_struct_deserializers=False,
|
||||
export_attribute=None,
|
||||
export_header=None,
|
||||
generate_non_variant_code=False,
|
||||
support_lazy_serialization=False,
|
||||
disallow_native_types=False,
|
||||
disallow_interfaces=False,
|
||||
generate_message_ids=False,
|
||||
generate_fuzzing=False,
|
||||
enable_kythe_annotations=False,
|
||||
extra_cpp_template_paths=None,
|
||||
generate_extra_cpp_only=False):
|
||||
self.module = module
|
||||
self.output_dir = output_dir
|
||||
self.typemap = typemap or {}
|
||||
self.variant = variant
|
||||
self.bytecode_path = bytecode_path
|
||||
self.for_blink = for_blink
|
||||
self.js_generate_struct_deserializers = js_generate_struct_deserializers
|
||||
self.export_attribute = export_attribute
|
||||
self.export_header = export_header
|
||||
self.generate_non_variant_code = generate_non_variant_code
|
||||
self.support_lazy_serialization = support_lazy_serialization
|
||||
self.disallow_native_types = disallow_native_types
|
||||
self.disallow_interfaces = disallow_interfaces
|
||||
self.generate_message_ids = generate_message_ids
|
||||
self.generate_fuzzing = generate_fuzzing
|
||||
self.enable_kythe_annotations = enable_kythe_annotations
|
||||
self.extra_cpp_template_paths = extra_cpp_template_paths
|
||||
self.generate_extra_cpp_only = generate_extra_cpp_only
|
||||
|
||||
def Write(self, contents, filename):
|
||||
if self.output_dir is None:
|
||||
print(contents)
|
||||
return
|
||||
full_path = os.path.join(self.output_dir, filename)
|
||||
WriteFile(contents, full_path)
|
||||
|
||||
def OptimizeEmpty(self, contents):
|
||||
# Look for .cc files that contain no actual code. There are many of these
|
||||
# and they collectively take a while to compile.
|
||||
lines = contents.splitlines()
|
||||
|
||||
for line in lines:
|
||||
if line.startswith('#') or line.startswith('//'):
|
||||
continue
|
||||
if re.match(r'namespace .* {', line) or re.match(r'}.*//.*namespace',
|
||||
line):
|
||||
continue
|
||||
if line.strip():
|
||||
# There is some actual code - return the unmodified contents.
|
||||
return contents
|
||||
|
||||
# If we reach here then we have a .cc file with no actual code. The
|
||||
# includes are therefore unneeded and can be removed.
|
||||
new_lines = [line for line in lines if not line.startswith('#include')]
|
||||
if len(new_lines) < len(lines):
|
||||
new_lines.append('')
|
||||
new_lines.append('// Includes removed due to no code being generated.')
|
||||
return '\n'.join(new_lines)
|
||||
|
||||
def WriteWithComment(self, contents, filename):
|
||||
generator_name = "mojom_bindings_generator.py"
|
||||
comment = r"// %s is auto generated by %s, do not edit" % (filename,
|
||||
generator_name)
|
||||
contents = comment + '\n' + '\n' + contents;
|
||||
if filename.endswith('.cc'):
|
||||
contents = self.OptimizeEmpty(contents)
|
||||
self.Write(contents, filename)
|
||||
|
||||
def GenerateFiles(self, args):
|
||||
raise NotImplementedError("Subclasses must override/implement this method")
|
||||
|
||||
def GetJinjaParameters(self):
|
||||
"""Returns default constructor parameters for the jinja environment."""
|
||||
return {}
|
||||
|
||||
def GetGlobals(self):
|
||||
"""Returns global mappings for the template generation."""
|
||||
return {}
|
|
@ -0,0 +1,71 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os.path
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
|
||||
from mojom.generate import generator
|
||||
|
||||
class StringManipulationTest(unittest.TestCase):
|
||||
"""generator contains some string utilities, this tests only those."""
|
||||
|
||||
def testSplitCamelCase(self):
|
||||
self.assertEquals(["camel", "case"], generator.SplitCamelCase("CamelCase"))
|
||||
self.assertEquals(["url", "loader", "factory"],
|
||||
generator.SplitCamelCase('URLLoaderFactory'))
|
||||
self.assertEquals(["get99", "entries"],
|
||||
generator.SplitCamelCase('Get99Entries'))
|
||||
self.assertEquals(["get99entries"],
|
||||
generator.SplitCamelCase('Get99entries'))
|
||||
|
||||
def testToCamel(self):
|
||||
self.assertEquals("CamelCase", generator.ToCamel("camel_case"))
|
||||
self.assertEquals("CAMELCASE", generator.ToCamel("CAMEL_CASE"))
|
||||
self.assertEquals("camelCase",
|
||||
generator.ToCamel("camel_case", lower_initial=True))
|
||||
self.assertEquals("CamelCase", generator.ToCamel(
|
||||
"camel case", delimiter=' '))
|
||||
self.assertEquals("CaMelCaSe", generator.ToCamel("caMel_caSe"))
|
||||
self.assertEquals("L2Tp", generator.ToCamel("l2tp", digits_split=True))
|
||||
self.assertEquals("l2tp", generator.ToCamel("l2tp", lower_initial=True))
|
||||
|
||||
def testToSnakeCase(self):
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCase"))
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("snakeCase"))
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCASE"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("SnakeD3D11Case"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("SnakeD3d11Case"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("snakeD3d11Case"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCase"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("snakeCase"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCASE"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("SnakeD3D11Case"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("SnakeD3d11Case"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("snakeD3d11Case"))
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
2059
utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py
Normal file
2059
utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,31 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
|
||||
|
||||
class ModuleTest(unittest.TestCase):
|
||||
def testNonInterfaceAsInterfaceRequest(self):
|
||||
"""Tests that a non-interface cannot be used for interface requests."""
|
||||
module = mojom.Module('test_module', 'test_namespace')
|
||||
struct = mojom.Struct('TestStruct', module=module)
|
||||
with self.assertRaises(Exception) as e:
|
||||
mojom.InterfaceRequest(struct)
|
||||
self.assertEquals(
|
||||
e.exception.__str__(),
|
||||
'Interface request requires \'x:TestStruct\' to be an interface.')
|
||||
|
||||
def testNonInterfaceAsAssociatedInterface(self):
|
||||
"""Tests that a non-interface type cannot be used for associated interfaces.
|
||||
"""
|
||||
module = mojom.Module('test_module', 'test_namespace')
|
||||
struct = mojom.Struct('TestStruct', module=module)
|
||||
with self.assertRaises(Exception) as e:
|
||||
mojom.AssociatedInterface(struct)
|
||||
self.assertEquals(
|
||||
e.exception.__str__(),
|
||||
'Associated interface requires \'x:TestStruct\' to be an interface.')
|
367
utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
Normal file
367
utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
Normal file
|
@ -0,0 +1,367 @@
|
|||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import copy
|
||||
from mojom.generate import module as mojom
|
||||
|
||||
# This module provides a mechanism for determining the packed order and offsets
|
||||
# of a mojom.Struct.
|
||||
#
|
||||
# ps = pack.PackedStruct(struct)
|
||||
# ps.packed_fields will access a list of PackedField objects, each of which
|
||||
# will have an offset, a size and a bit (for mojom.BOOLs).
|
||||
|
||||
# Size of struct header in bytes: num_bytes [4B] + version [4B].
|
||||
HEADER_SIZE = 8
|
||||
|
||||
|
||||
class PackedField:
|
||||
kind_to_size = {
|
||||
mojom.BOOL: 1,
|
||||
mojom.INT8: 1,
|
||||
mojom.UINT8: 1,
|
||||
mojom.INT16: 2,
|
||||
mojom.UINT16: 2,
|
||||
mojom.INT32: 4,
|
||||
mojom.UINT32: 4,
|
||||
mojom.FLOAT: 4,
|
||||
mojom.HANDLE: 4,
|
||||
mojom.MSGPIPE: 4,
|
||||
mojom.SHAREDBUFFER: 4,
|
||||
mojom.PLATFORMHANDLE: 4,
|
||||
mojom.DCPIPE: 4,
|
||||
mojom.DPPIPE: 4,
|
||||
mojom.NULLABLE_HANDLE: 4,
|
||||
mojom.NULLABLE_MSGPIPE: 4,
|
||||
mojom.NULLABLE_SHAREDBUFFER: 4,
|
||||
mojom.NULLABLE_PLATFORMHANDLE: 4,
|
||||
mojom.NULLABLE_DCPIPE: 4,
|
||||
mojom.NULLABLE_DPPIPE: 4,
|
||||
mojom.INT64: 8,
|
||||
mojom.UINT64: 8,
|
||||
mojom.DOUBLE: 8,
|
||||
mojom.STRING: 8,
|
||||
mojom.NULLABLE_STRING: 8
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def GetSizeForKind(cls, kind):
|
||||
if isinstance(kind, (mojom.Array, mojom.Map, mojom.Struct, mojom.Interface,
|
||||
mojom.AssociatedInterface, mojom.PendingRemote,
|
||||
mojom.PendingAssociatedRemote)):
|
||||
return 8
|
||||
if isinstance(kind, mojom.Union):
|
||||
return 16
|
||||
if isinstance(kind, (mojom.InterfaceRequest, mojom.PendingReceiver)):
|
||||
kind = mojom.MSGPIPE
|
||||
if isinstance(
|
||||
kind,
|
||||
(mojom.AssociatedInterfaceRequest, mojom.PendingAssociatedReceiver)):
|
||||
return 4
|
||||
if isinstance(kind, mojom.Enum):
|
||||
# TODO(mpcomplete): what about big enums?
|
||||
return cls.kind_to_size[mojom.INT32]
|
||||
if not kind in cls.kind_to_size:
|
||||
raise Exception("Undefined type: %s. Did you forget to import the file "
|
||||
"containing the definition?" % kind.spec)
|
||||
return cls.kind_to_size[kind]
|
||||
|
||||
@classmethod
|
||||
def GetAlignmentForKind(cls, kind):
|
||||
if isinstance(kind, (mojom.Interface, mojom.AssociatedInterface,
|
||||
mojom.PendingRemote, mojom.PendingAssociatedRemote)):
|
||||
return 4
|
||||
if isinstance(kind, mojom.Union):
|
||||
return 8
|
||||
return cls.GetSizeForKind(kind)
|
||||
|
||||
def __init__(self,
|
||||
field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=None,
|
||||
sub_ordinal=None,
|
||||
linked_value_packed_field=None):
|
||||
"""
|
||||
Args:
|
||||
field: the original field.
|
||||
index: the position of the original field in the struct.
|
||||
ordinal: the ordinal of the field for serialization.
|
||||
original_field: See below.
|
||||
sub_ordinal: See below.
|
||||
linked_value_packed_field: See below.
|
||||
|
||||
original_field, sub_ordinal, and linked_value_packed_field are used to
|
||||
support nullable ValueKind fields. For legacy reasons, nullable ValueKind
|
||||
fields actually generate two PackedFields. This allows:
|
||||
|
||||
- backwards compatibility prior to Mojo support for nullable ValueKinds.
|
||||
- correct packing of fields for the aforementioned backwards compatibility.
|
||||
|
||||
When translating Fields to PackedFields, the original field is turned into
|
||||
two PackedFields: the first PackedField always has type mojom.BOOL, while
|
||||
the second PackedField has the non-nullable version of the field's kind.
|
||||
|
||||
When constructing these PackedFields, original_field references the field
|
||||
as defined in the mojom; the name as defined in the mojom will be used for
|
||||
all layers above the wire/data layer.
|
||||
|
||||
sub_ordinal is used to sort the two PackedFields correctly with respect to
|
||||
each other: the first mojom.BOOL field always has sub_ordinal 0, while the
|
||||
second field always has sub_ordinal 1.
|
||||
|
||||
Finally, linked_value_packed_field is used by the serialization and
|
||||
deserialization helpers, which generally just iterate over a PackedStruct's
|
||||
PackedField's in ordinal order. This allows the helpers to easily reference
|
||||
any related PackedFields rather than having to lookup related PackedFields
|
||||
by index while iterating.
|
||||
"""
|
||||
self.field = field
|
||||
self.index = index
|
||||
self.ordinal = ordinal
|
||||
self.original_field = original_field
|
||||
self.sub_ordinal = sub_ordinal
|
||||
self.linked_value_packed_field = linked_value_packed_field
|
||||
self.size = self.GetSizeForKind(self.field.kind)
|
||||
self.alignment = self.GetAlignmentForKind(self.field.kind)
|
||||
self.offset = None
|
||||
self.bit = None
|
||||
self.min_version = None
|
||||
|
||||
|
||||
def GetPad(offset, alignment):
|
||||
"""Returns the pad necessary to reserve space so that |offset + pad| equals to
|
||||
some multiple of |alignment|."""
|
||||
return (alignment - (offset % alignment)) % alignment
|
||||
|
||||
|
||||
def GetFieldOffset(field, last_field):
|
||||
"""Returns a 2-tuple of the field offset and bit (for BOOLs)."""
|
||||
if (field.field.kind == mojom.BOOL and last_field.field.kind == mojom.BOOL
|
||||
and last_field.bit < 7):
|
||||
return (last_field.offset, last_field.bit + 1)
|
||||
|
||||
offset = last_field.offset + last_field.size
|
||||
pad = GetPad(offset, field.alignment)
|
||||
return (offset + pad, 0)
|
||||
|
||||
|
||||
def GetPayloadSizeUpToField(field):
|
||||
"""Returns the payload size (not including struct header) if |field| is the
|
||||
last field.
|
||||
"""
|
||||
if not field:
|
||||
return 0
|
||||
offset = field.offset + field.size
|
||||
pad = GetPad(offset, 8)
|
||||
return offset + pad
|
||||
|
||||
|
||||
def IsNullableValueKindPackedField(field):
|
||||
"""Returns true if `field` is derived from a nullable ValueKind field.
|
||||
|
||||
Nullable ValueKind fields often require special handling in the bindings due
|
||||
to the way the implementation is constrained for wire compatibility.
|
||||
"""
|
||||
assert isinstance(field, PackedField)
|
||||
return field.sub_ordinal is not None
|
||||
|
||||
|
||||
def IsPrimaryNullableValueKindPackedField(field):
|
||||
"""Returns true if `field` is derived from a nullable ValueKind mojom field
|
||||
and is the "primary" field.
|
||||
|
||||
The primary field is a bool PackedField that controls if the field should be
|
||||
considered as present or not; it will have a reference to the PackedField that
|
||||
holds the actual value representation if considered present.
|
||||
|
||||
Bindings code that translates between the wire protocol and the higher layers
|
||||
can use this to simplify mapping multiple PackedFields to the single field
|
||||
that is logically exposed to bindings consumers.
|
||||
"""
|
||||
assert isinstance(field, PackedField)
|
||||
return field.linked_value_packed_field is not None
|
||||
|
||||
|
||||
class PackedStruct:
|
||||
def __init__(self, struct):
|
||||
self.struct = struct
|
||||
# |packed_fields| contains all the fields, in increasing offset order.
|
||||
self.packed_fields = []
|
||||
# |packed_fields_in_ordinal_order| refers to the same fields as
|
||||
# |packed_fields|, but in ordinal order.
|
||||
self.packed_fields_in_ordinal_order = []
|
||||
|
||||
# No fields.
|
||||
if (len(struct.fields) == 0):
|
||||
return
|
||||
|
||||
# Start by sorting by ordinal.
|
||||
src_fields = self.packed_fields_in_ordinal_order
|
||||
ordinal = 0
|
||||
for index, field in enumerate(struct.fields):
|
||||
if field.ordinal is not None:
|
||||
ordinal = field.ordinal
|
||||
# Nullable value types are a bit weird: they generate two PackedFields
|
||||
# despite being a single ValueKind. This is for wire compatibility to
|
||||
# ease the transition from legacy mojom syntax where nullable value types
|
||||
# were not supported.
|
||||
if isinstance(field.kind, mojom.ValueKind) and field.kind.is_nullable:
|
||||
# The suffixes intentionally use Unicode codepoints which are considered
|
||||
# valid C++/Java/JavaScript identifiers, yet are unlikely to be used in
|
||||
# actual user code.
|
||||
has_value_field = copy.copy(field)
|
||||
has_value_field.name = f'{field.mojom_name}_$flag'
|
||||
has_value_field.kind = mojom.BOOL
|
||||
|
||||
value_field = copy.copy(field)
|
||||
value_field.name = f'{field.mojom_name}_$value'
|
||||
value_field.kind = field.kind.MakeUnnullableKind()
|
||||
|
||||
value_packed_field = PackedField(value_field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=field,
|
||||
sub_ordinal=1,
|
||||
linked_value_packed_field=None)
|
||||
has_value_packed_field = PackedField(
|
||||
has_value_field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=field,
|
||||
sub_ordinal=0,
|
||||
linked_value_packed_field=value_packed_field)
|
||||
src_fields.append(has_value_packed_field)
|
||||
src_fields.append(value_packed_field)
|
||||
else:
|
||||
src_fields.append(PackedField(field, index, ordinal))
|
||||
ordinal += 1
|
||||
src_fields.sort(key=lambda field: (field.ordinal, field.sub_ordinal))
|
||||
|
||||
# Set |min_version| for each field.
|
||||
next_min_version = 0
|
||||
for packed_field in src_fields:
|
||||
if packed_field.field.min_version is None:
|
||||
assert next_min_version == 0
|
||||
else:
|
||||
assert packed_field.field.min_version >= next_min_version
|
||||
next_min_version = packed_field.field.min_version
|
||||
packed_field.min_version = next_min_version
|
||||
|
||||
if (packed_field.min_version != 0
|
||||
and mojom.IsReferenceKind(packed_field.field.kind)
|
||||
and not packed_field.field.kind.is_nullable):
|
||||
raise Exception(
|
||||
"Non-nullable reference fields are only allowed in version 0 of a "
|
||||
"struct. %s.%s is defined with [MinVersion=%d]." %
|
||||
(self.struct.name, packed_field.field.name,
|
||||
packed_field.min_version))
|
||||
|
||||
src_field = src_fields[0]
|
||||
src_field.offset = 0
|
||||
src_field.bit = 0
|
||||
dst_fields = self.packed_fields
|
||||
dst_fields.append(src_field)
|
||||
|
||||
# Then find first slot that each field will fit.
|
||||
for src_field in src_fields[1:]:
|
||||
last_field = dst_fields[0]
|
||||
for i in range(1, len(dst_fields)):
|
||||
next_field = dst_fields[i]
|
||||
offset, bit = GetFieldOffset(src_field, last_field)
|
||||
if offset + src_field.size <= next_field.offset:
|
||||
# Found hole.
|
||||
src_field.offset = offset
|
||||
src_field.bit = bit
|
||||
dst_fields.insert(i, src_field)
|
||||
break
|
||||
last_field = next_field
|
||||
if src_field.offset is None:
|
||||
# Add to end
|
||||
src_field.offset, src_field.bit = GetFieldOffset(src_field, last_field)
|
||||
dst_fields.append(src_field)
|
||||
|
||||
|
||||
class ByteInfo:
|
||||
def __init__(self):
|
||||
self.is_padding = False
|
||||
self.packed_fields = []
|
||||
|
||||
|
||||
def GetByteLayout(packed_struct):
|
||||
total_payload_size = GetPayloadSizeUpToField(
|
||||
packed_struct.packed_fields[-1] if packed_struct.packed_fields else None)
|
||||
byte_info = [ByteInfo() for i in range(total_payload_size)]
|
||||
|
||||
limit_of_previous_field = 0
|
||||
for packed_field in packed_struct.packed_fields:
|
||||
for i in range(limit_of_previous_field, packed_field.offset):
|
||||
byte_info[i].is_padding = True
|
||||
byte_info[packed_field.offset].packed_fields.append(packed_field)
|
||||
limit_of_previous_field = packed_field.offset + packed_field.size
|
||||
|
||||
for i in range(limit_of_previous_field, len(byte_info)):
|
||||
byte_info[i].is_padding = True
|
||||
|
||||
for byte in byte_info:
|
||||
# A given byte cannot both be padding and have a fields packed into it.
|
||||
assert not (byte.is_padding and byte.packed_fields)
|
||||
|
||||
return byte_info
|
||||
|
||||
|
||||
class VersionInfo:
|
||||
def __init__(self, version, num_fields, num_packed_fields, num_bytes):
|
||||
self.version = version
|
||||
self.num_fields = num_fields
|
||||
self.num_packed_fields = num_packed_fields
|
||||
self.num_bytes = num_bytes
|
||||
|
||||
|
||||
def GetVersionInfo(packed_struct):
|
||||
"""Get version information for a struct.
|
||||
|
||||
Args:
|
||||
packed_struct: A PackedStruct instance.
|
||||
|
||||
Returns:
|
||||
A non-empty list of VersionInfo instances, sorted by version in increasing
|
||||
order.
|
||||
Note: The version numbers may not be consecutive.
|
||||
"""
|
||||
versions = []
|
||||
last_version = 0
|
||||
last_num_fields = 0
|
||||
last_num_packed_fields = 0
|
||||
last_payload_size = 0
|
||||
|
||||
for packed_field in packed_struct.packed_fields_in_ordinal_order:
|
||||
if packed_field.min_version != last_version:
|
||||
versions.append(
|
||||
VersionInfo(last_version, last_num_fields, last_num_packed_fields,
|
||||
last_payload_size + HEADER_SIZE))
|
||||
last_version = packed_field.min_version
|
||||
|
||||
# Nullable numeric fields (e.g. `int32?`) expand to two packed fields, so to
|
||||
# avoid double-counting, only increment if the field is:
|
||||
# - not used for representing a nullable value kind field, or
|
||||
# - the primary field representing the nullable value kind field.
|
||||
last_num_fields += 1 if (
|
||||
not IsNullableValueKindPackedField(packed_field)
|
||||
or IsPrimaryNullableValueKindPackedField(packed_field)) else 0
|
||||
|
||||
last_num_packed_fields += 1
|
||||
|
||||
# The fields are iterated in ordinal order here. However, the size of a
|
||||
# version is determined by the last field of that version in pack order,
|
||||
# instead of ordinal order. Therefore, we need to calculate the max value.
|
||||
last_payload_size = max(GetPayloadSizeUpToField(packed_field),
|
||||
last_payload_size)
|
||||
|
||||
assert len(
|
||||
versions) == 0 or last_num_packed_fields != versions[-1].num_packed_fields
|
||||
versions.append(
|
||||
VersionInfo(last_version, last_num_fields, last_num_packed_fields,
|
||||
last_payload_size + HEADER_SIZE))
|
||||
return versions
|
|
@ -0,0 +1,253 @@
|
|||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import pack
|
||||
|
||||
|
||||
class PackTest(unittest.TestCase):
|
||||
def testOrdinalOrder(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT32, 2)
|
||||
struct.AddField('testfield2', mojom.INT32, 1)
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual(2, len(ps.packed_fields))
|
||||
self.assertEqual('testfield2', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('testfield1', ps.packed_fields[1].field.mojom_name)
|
||||
|
||||
def testZeroFields(self):
|
||||
struct = mojom.Struct('test')
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(0, len(ps.packed_fields))
|
||||
|
||||
def testOneField(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT8)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(1, len(ps.packed_fields))
|
||||
|
||||
def _CheckPackSequence(self, kinds, fields, offsets):
|
||||
"""Checks the pack order and offsets of a sequence of mojom.Kinds.
|
||||
|
||||
Args:
|
||||
kinds: A sequence of mojom.Kinds that specify the fields that are to be
|
||||
created.
|
||||
fields: The expected order of the resulting fields, with the integer "1"
|
||||
first.
|
||||
offsets: The expected order of offsets, with the integer "0" first.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
index = 1
|
||||
for kind in kinds:
|
||||
struct.AddField('%d' % index, kind)
|
||||
index += 1
|
||||
ps = pack.PackedStruct(struct)
|
||||
num_fields = len(ps.packed_fields)
|
||||
self.assertEqual(len(kinds), num_fields)
|
||||
for i in range(num_fields):
|
||||
self.assertEqual('%d' % fields[i], ps.packed_fields[i].field.mojom_name)
|
||||
self.assertEqual(offsets[i], ps.packed_fields[i].offset)
|
||||
|
||||
def testPaddingPackedInOrder(self):
|
||||
return self._CheckPackSequence((mojom.INT8, mojom.UINT8, mojom.INT32),
|
||||
(1, 2, 3), (0, 1, 4))
|
||||
|
||||
def testPaddingPackedOutOfOrder(self):
|
||||
return self._CheckPackSequence((mojom.INT8, mojom.INT32, mojom.UINT8),
|
||||
(1, 3, 2), (0, 1, 4))
|
||||
|
||||
def testPaddingPackedOverflow(self):
|
||||
kinds = (mojom.INT8, mojom.INT32, mojom.INT16, mojom.INT8, mojom.INT8)
|
||||
# 2 bytes should be packed together first, followed by short, then by int.
|
||||
fields = (1, 4, 3, 2, 5)
|
||||
offsets = (0, 1, 2, 4, 8)
|
||||
return self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testNullableTypes(self):
|
||||
kinds = (mojom.STRING.MakeNullableKind(), mojom.HANDLE.MakeNullableKind(),
|
||||
mojom.Struct('test_struct').MakeNullableKind(),
|
||||
mojom.DCPIPE.MakeNullableKind(), mojom.Array().MakeNullableKind(),
|
||||
mojom.DPPIPE.MakeNullableKind(),
|
||||
mojom.Array(length=5).MakeNullableKind(),
|
||||
mojom.MSGPIPE.MakeNullableKind(),
|
||||
mojom.Interface('test_interface').MakeNullableKind(),
|
||||
mojom.SHAREDBUFFER.MakeNullableKind(),
|
||||
mojom.InterfaceRequest().MakeNullableKind())
|
||||
fields = (1, 2, 4, 3, 5, 6, 8, 7, 9, 10, 11)
|
||||
offsets = (0, 8, 12, 16, 24, 32, 36, 40, 48, 56, 60)
|
||||
return self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testAllTypes(self):
|
||||
return self._CheckPackSequence(
|
||||
(mojom.BOOL, mojom.INT8, mojom.STRING, mojom.UINT8, mojom.INT16,
|
||||
mojom.DOUBLE, mojom.UINT16, mojom.INT32, mojom.UINT32, mojom.INT64,
|
||||
mojom.FLOAT, mojom.STRING, mojom.HANDLE, mojom.UINT64,
|
||||
mojom.Struct('test'), mojom.Array(), mojom.STRING.MakeNullableKind()),
|
||||
(1, 2, 4, 5, 7, 3, 6, 8, 9, 10, 11, 13, 12, 14, 15, 16, 17, 18),
|
||||
(0, 1, 2, 4, 6, 8, 16, 24, 28, 32, 40, 44, 48, 56, 64, 72, 80, 88))
|
||||
|
||||
def testPaddingPackedOutOfOrderByOrdinal(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT8)
|
||||
struct.AddField('testfield3', mojom.UINT8, 3)
|
||||
struct.AddField('testfield2', mojom.INT32, 2)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(3, len(ps.packed_fields))
|
||||
|
||||
# Second byte should be packed in behind first, altering order.
|
||||
self.assertEqual('testfield1', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('testfield3', ps.packed_fields[1].field.mojom_name)
|
||||
self.assertEqual('testfield2', ps.packed_fields[2].field.mojom_name)
|
||||
|
||||
# Second byte should be packed with first.
|
||||
self.assertEqual(0, ps.packed_fields[0].offset)
|
||||
self.assertEqual(1, ps.packed_fields[1].offset)
|
||||
self.assertEqual(4, ps.packed_fields[2].offset)
|
||||
|
||||
def testBools(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('bit0', mojom.BOOL)
|
||||
struct.AddField('bit1', mojom.BOOL)
|
||||
struct.AddField('int', mojom.INT32)
|
||||
struct.AddField('bit2', mojom.BOOL)
|
||||
struct.AddField('bit3', mojom.BOOL)
|
||||
struct.AddField('bit4', mojom.BOOL)
|
||||
struct.AddField('bit5', mojom.BOOL)
|
||||
struct.AddField('bit6', mojom.BOOL)
|
||||
struct.AddField('bit7', mojom.BOOL)
|
||||
struct.AddField('bit8', mojom.BOOL)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(10, len(ps.packed_fields))
|
||||
|
||||
# First 8 bits packed together.
|
||||
for i in range(8):
|
||||
pf = ps.packed_fields[i]
|
||||
self.assertEqual(0, pf.offset)
|
||||
self.assertEqual("bit%d" % i, pf.field.mojom_name)
|
||||
self.assertEqual(i, pf.bit)
|
||||
|
||||
# Ninth bit goes into second byte.
|
||||
self.assertEqual("bit8", ps.packed_fields[8].field.mojom_name)
|
||||
self.assertEqual(1, ps.packed_fields[8].offset)
|
||||
self.assertEqual(0, ps.packed_fields[8].bit)
|
||||
|
||||
# int comes last.
|
||||
self.assertEqual("int", ps.packed_fields[9].field.mojom_name)
|
||||
self.assertEqual(4, ps.packed_fields[9].offset)
|
||||
|
||||
def testMinVersion(self):
|
||||
"""Tests that |min_version| is properly set for packed fields."""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('field_2', mojom.BOOL, 2)
|
||||
struct.AddField('field_0', mojom.INT32, 0)
|
||||
struct.AddField('field_1', mojom.INT64, 1)
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual('field_0', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('field_2', ps.packed_fields[1].field.mojom_name)
|
||||
self.assertEqual('field_1', ps.packed_fields[2].field.mojom_name)
|
||||
|
||||
self.assertEqual(0, ps.packed_fields[0].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[1].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[2].min_version)
|
||||
|
||||
struct.fields[0].attributes = {'MinVersion': 1}
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual(0, ps.packed_fields[0].min_version)
|
||||
self.assertEqual(1, ps.packed_fields[1].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[2].min_version)
|
||||
|
||||
def testGetVersionInfoEmptyStruct(self):
|
||||
"""Tests that pack.GetVersionInfo() never returns an empty list, even for
|
||||
empty structs.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
self.assertEqual(1, len(versions))
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(0, versions[0].num_fields)
|
||||
self.assertEqual(8, versions[0].num_bytes)
|
||||
|
||||
def testGetVersionInfoComplexOrder(self):
|
||||
"""Tests pack.GetVersionInfo() using a struct whose definition order,
|
||||
ordinal order and pack order for fields are all different.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField(
|
||||
'field_3', mojom.BOOL, ordinal=3, attributes={'MinVersion': 3})
|
||||
struct.AddField('field_0', mojom.INT32, ordinal=0)
|
||||
struct.AddField(
|
||||
'field_1', mojom.INT64, ordinal=1, attributes={'MinVersion': 2})
|
||||
struct.AddField(
|
||||
'field_2', mojom.INT64, ordinal=2, attributes={'MinVersion': 3})
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
self.assertEqual(3, len(versions))
|
||||
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(1, versions[0].num_fields)
|
||||
self.assertEqual(16, versions[0].num_bytes)
|
||||
|
||||
self.assertEqual(2, versions[1].version)
|
||||
self.assertEqual(2, versions[1].num_fields)
|
||||
self.assertEqual(24, versions[1].num_bytes)
|
||||
|
||||
self.assertEqual(3, versions[2].version)
|
||||
self.assertEqual(4, versions[2].num_fields)
|
||||
self.assertEqual(32, versions[2].num_bytes)
|
||||
|
||||
def testGetVersionInfoPackedStruct(self):
|
||||
"""Tests that pack.GetVersionInfo() correctly sets version, num_fields,
|
||||
and num_packed_fields for a packed struct.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('field_0', mojom.BOOL, ordinal=0)
|
||||
struct.AddField('field_1',
|
||||
mojom.NULLABLE_BOOL,
|
||||
ordinal=1,
|
||||
attributes={'MinVersion': 1})
|
||||
struct.AddField('field_2',
|
||||
mojom.NULLABLE_BOOL,
|
||||
ordinal=2,
|
||||
attributes={'MinVersion': 2})
|
||||
ps = pack.PackedStruct(struct)
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
|
||||
self.assertEqual(3, len(versions))
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(1, versions[1].version)
|
||||
self.assertEqual(2, versions[2].version)
|
||||
self.assertEqual(1, versions[0].num_fields)
|
||||
self.assertEqual(2, versions[1].num_fields)
|
||||
self.assertEqual(3, versions[2].num_fields)
|
||||
self.assertEqual(1, versions[0].num_packed_fields)
|
||||
self.assertEqual(3, versions[1].num_packed_fields)
|
||||
self.assertEqual(5, versions[2].num_packed_fields)
|
||||
|
||||
def testInterfaceAlignment(self):
|
||||
"""Tests that interfaces are aligned on 4-byte boundaries, although the size
|
||||
of an interface is 8 bytes.
|
||||
"""
|
||||
kinds = (mojom.INT32, mojom.Interface('test_interface'))
|
||||
fields = (1, 2)
|
||||
offsets = (0, 4)
|
||||
self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testAssociatedInterfaceAlignment(self):
|
||||
"""Tests that associated interfaces are aligned on 4-byte boundaries,
|
||||
although the size of an associated interface is 8 bytes.
|
||||
"""
|
||||
kinds = (mojom.INT32,
|
||||
mojom.AssociatedInterface(mojom.Interface('test_interface')))
|
||||
fields = (1, 2)
|
||||
offsets = (0, 4)
|
||||
self._CheckPackSequence(kinds, fields, offsets)
|
|
@ -0,0 +1,82 @@
|
|||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# Based on third_party/WebKit/Source/build/scripts/template_expander.py.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
import jinja2
|
||||
|
||||
|
||||
def ApplyTemplate(mojo_generator, path_to_template, params, **kwargs):
|
||||
loader = jinja2.ModuleLoader(
|
||||
os.path.join(mojo_generator.bytecode_path,
|
||||
"%s.zip" % mojo_generator.GetTemplatePrefix()))
|
||||
final_kwargs = dict(mojo_generator.GetJinjaParameters())
|
||||
final_kwargs.update(kwargs)
|
||||
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=loader, keep_trailing_newline=True, **final_kwargs)
|
||||
jinja_env.globals.update(mojo_generator.GetGlobals())
|
||||
jinja_env.filters.update(mojo_generator.GetFilters())
|
||||
template = jinja_env.get_template(path_to_template)
|
||||
return template.render(params)
|
||||
|
||||
|
||||
def UseJinja(path_to_template, **kwargs):
|
||||
def RealDecorator(generator):
|
||||
def GeneratorInternal(*args, **kwargs2):
|
||||
parameters = generator(*args, **kwargs2)
|
||||
return ApplyTemplate(args[0], path_to_template, parameters, **kwargs)
|
||||
|
||||
GeneratorInternal.__name__ = generator.__name__
|
||||
return GeneratorInternal
|
||||
|
||||
return RealDecorator
|
||||
|
||||
|
||||
def ApplyImportedTemplate(mojo_generator, path_to_template, filename, params,
|
||||
**kwargs):
|
||||
loader = jinja2.FileSystemLoader(searchpath=path_to_template)
|
||||
final_kwargs = dict(mojo_generator.GetJinjaParameters())
|
||||
final_kwargs.update(kwargs)
|
||||
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=loader, keep_trailing_newline=True, **final_kwargs)
|
||||
jinja_env.globals.update(mojo_generator.GetGlobals())
|
||||
jinja_env.filters.update(mojo_generator.GetFilters())
|
||||
template = jinja_env.get_template(filename)
|
||||
return template.render(params)
|
||||
|
||||
|
||||
def UseJinjaForImportedTemplate(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
parameters = func(*args, **kwargs)
|
||||
path_to_template = args[1]
|
||||
filename = args[2]
|
||||
return ApplyImportedTemplate(args[0], path_to_template, filename,
|
||||
parameters)
|
||||
|
||||
wrapper.__name__ = func.__name__
|
||||
return wrapper
|
||||
|
||||
|
||||
def PrecompileTemplates(generator_modules, output_dir):
|
||||
for module in generator_modules.values():
|
||||
generator = module.Generator(None)
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader([
|
||||
os.path.join(
|
||||
os.path.dirname(module.__file__), generator.GetTemplatePrefix())
|
||||
]))
|
||||
jinja_env.filters.update(generator.GetFilters())
|
||||
jinja_env.compile_templates(os.path.join(
|
||||
output_dir, "%s.zip" % generator.GetTemplatePrefix()),
|
||||
extensions=["tmpl"],
|
||||
zip="stored",
|
||||
ignore_errors=False)
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,141 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import translate
|
||||
from mojom.parse import ast
|
||||
|
||||
class TranslateTest(unittest.TestCase):
|
||||
"""Tests |parser.Parse()|."""
|
||||
|
||||
def testSimpleArray(self):
|
||||
"""Tests a simple int32[]."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("int32[]"), "a:i32")
|
||||
|
||||
def testAssociativeArray(self):
|
||||
"""Tests a simple uint8{string}."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]")
|
||||
|
||||
def testLeftToRightAssociativeArray(self):
|
||||
"""Makes sure that parsing is done from right to left on the internal kinds
|
||||
in the presence of an associative array."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]")
|
||||
|
||||
def testTranslateSimpleUnions(self):
|
||||
"""Makes sure that a simple union is translated correctly."""
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"SomeUnion", None,
|
||||
ast.UnionBody([
|
||||
ast.UnionField("a", None, None, "int32"),
|
||||
ast.UnionField("b", None, None, "string")
|
||||
]))
|
||||
])
|
||||
|
||||
translation = translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertEqual(1, len(translation.unions))
|
||||
|
||||
union = translation.unions[0]
|
||||
self.assertTrue(isinstance(union, mojom.Union))
|
||||
self.assertEqual("SomeUnion", union.mojom_name)
|
||||
self.assertEqual(2, len(union.fields))
|
||||
self.assertEqual("a", union.fields[0].mojom_name)
|
||||
self.assertEqual(mojom.INT32.spec, union.fields[0].kind.spec)
|
||||
self.assertEqual("b", union.fields[1].mojom_name)
|
||||
self.assertEqual(mojom.STRING.spec, union.fields[1].kind.spec)
|
||||
|
||||
def testMapKindRaisesWithDuplicate(self):
|
||||
"""Verifies _MapTreeForType() raises when passed two values with the same
|
||||
name."""
|
||||
methods = [
|
||||
ast.Method('dup', None, None, ast.ParameterList(), None),
|
||||
ast.Method('dup', None, None, ast.ParameterList(), None)
|
||||
]
|
||||
with self.assertRaises(Exception):
|
||||
translate._ElemsOfType(methods, ast.Method, 'scope')
|
||||
|
||||
def testAssociatedKinds(self):
|
||||
"""Tests type spec translation of associated interfaces and requests."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(
|
||||
translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
|
||||
self.assertEquals(translate._MapKind("rca<SomeInterface>?"),
|
||||
"?rca:x:SomeInterface")
|
||||
|
||||
def testSelfRecursiveUnions(self):
|
||||
"""Verifies _UnionField() raises when a union is self-recursive."""
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union("SomeUnion", None,
|
||||
ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion")]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"SomeUnion", None,
|
||||
ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion?")]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
def testDuplicateAttributesException(self):
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"FakeUnion",
|
||||
ast.AttributeList([
|
||||
ast.Attribute("key1", "value"),
|
||||
ast.Attribute("key1", "value")
|
||||
]),
|
||||
ast.UnionBody([
|
||||
ast.UnionField("a", None, None, "int32"),
|
||||
ast.UnionField("b", None, None, "string")
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
def testEnumWithReservedValues(self):
|
||||
"""Verifies that assigning reserved values to enumerators fails."""
|
||||
# -128 is reserved for the empty representation in WTF::HashTraits.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kReserved', None, '-128'),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
||||
|
||||
# -127 is reserved for the deleted representation in WTF::HashTraits.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kReserved', None, '-127'),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
||||
|
||||
# Implicitly assigning a reserved value should also fail.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kNotReserved', None, '-129'),
|
||||
ast.EnumValue('kImplicitlyReserved', None, None),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
462
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
Normal file
462
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
Normal file
|
@ -0,0 +1,462 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Node classes for the AST for a Mojo IDL file."""
|
||||
|
||||
# Note: For convenience of testing, you probably want to define __eq__() methods
|
||||
# for all node types; it's okay to be slightly lax (e.g., not compare filename
|
||||
# and lineno). You may also define __repr__() to help with analyzing test
|
||||
# failures, especially for more complex types.
|
||||
|
||||
import os.path
|
||||
|
||||
|
||||
# Instance of 'NodeListBase' has no '_list_item_type' member (no-member)
|
||||
# pylint: disable=no-member
|
||||
|
||||
|
||||
class NodeBase:
|
||||
"""Base class for nodes in the AST."""
|
||||
|
||||
def __init__(self, filename=None, lineno=None):
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
|
||||
def __eq__(self, other):
|
||||
# We want strict comparison of the two object's types. Disable pylint's
|
||||
# insistence upon recommending isinstance().
|
||||
# pylint: disable=unidiomatic-typecheck
|
||||
return type(self) == type(other)
|
||||
|
||||
# Make != the inverse of ==. (Subclasses shouldn't have to override this.)
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
# TODO(vtl): Some of this is complicated enough that it should be tested.
|
||||
class NodeListBase(NodeBase):
|
||||
"""Represents a list of other nodes, all having the same type. (This is meant
|
||||
to be subclassed, with subclasses defining _list_item_type to be the class (or
|
||||
classes, in a tuple) of the members of the list.)"""
|
||||
|
||||
def __init__(self, item_or_items=None, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.items = []
|
||||
if item_or_items is None:
|
||||
pass
|
||||
elif isinstance(item_or_items, list):
|
||||
for item in item_or_items:
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.Append(item)
|
||||
else:
|
||||
assert isinstance(item_or_items, self._list_item_type)
|
||||
self.Append(item_or_items)
|
||||
|
||||
# Support iteration. For everything else, users should just access |items|
|
||||
# directly. (We intentionally do NOT supply |__len__()| or |__nonzero__()|, so
|
||||
# |bool(NodeListBase())| is true.)
|
||||
def __iter__(self):
|
||||
return self.items.__iter__()
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.items == other.items
|
||||
|
||||
# Implement this so that on failure, we get slightly more sensible output.
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + "([" + \
|
||||
", ".join([repr(elem) for elem in self.items]) + "])"
|
||||
|
||||
def Insert(self, item):
|
||||
"""Inserts item at the front of the list."""
|
||||
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.items.insert(0, item)
|
||||
self._UpdateFilenameAndLineno()
|
||||
|
||||
def Append(self, item):
|
||||
"""Appends item to the end of the list."""
|
||||
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.items.append(item)
|
||||
self._UpdateFilenameAndLineno()
|
||||
|
||||
def _UpdateFilenameAndLineno(self):
|
||||
if self.items:
|
||||
self.filename = self.items[0].filename
|
||||
self.lineno = self.items[0].lineno
|
||||
|
||||
|
||||
class Definition(NodeBase):
|
||||
"""Represents a definition of anything that has a global name (e.g., enums,
|
||||
enum values, consts, structs, struct fields, interfaces). (This does not
|
||||
include parameter definitions.) This class is meant to be subclassed."""
|
||||
|
||||
def __init__(self, mojom_name, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
NodeBase.__init__(self, **kwargs)
|
||||
self.mojom_name = mojom_name
|
||||
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class Attribute(NodeBase):
|
||||
"""Represents an attribute."""
|
||||
|
||||
def __init__(self, key, value, **kwargs):
|
||||
assert isinstance(key, str)
|
||||
super().__init__(**kwargs)
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.key == other.key and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class AttributeList(NodeListBase):
|
||||
"""Represents a list attributes."""
|
||||
|
||||
_list_item_type = Attribute
|
||||
|
||||
|
||||
class Const(Definition):
|
||||
"""Represents a const definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, typename, value, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
# The typename is currently passed through as a string.
|
||||
assert isinstance(typename, str)
|
||||
# The value is either a literal (currently passed through as a string) or a
|
||||
# "wrapped identifier".
|
||||
assert isinstance(value, (tuple, str))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.typename = typename
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.typename == other.typename and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class Enum(Definition):
|
||||
"""Represents an enum definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, enum_value_list, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert enum_value_list is None or isinstance(enum_value_list, EnumValueList)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.enum_value_list = enum_value_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.enum_value_list == other.enum_value_list
|
||||
|
||||
|
||||
class EnumValue(Definition):
|
||||
"""Represents a definition of an enum value."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, value, **kwargs):
|
||||
# The optional value is either an int (which is current a string) or a
|
||||
# "wrapped identifier".
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert value is None or isinstance(value, (tuple, str))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class EnumValueList(NodeListBase):
|
||||
"""Represents a list of enum value definitions (i.e., the "body" of an enum
|
||||
definition)."""
|
||||
|
||||
_list_item_type = EnumValue
|
||||
|
||||
|
||||
class Feature(Definition):
|
||||
"""Represents a runtime feature definition."""
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, FeatureBody) or body is None
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
def __repr__(self):
|
||||
return "Feature(mojom_name = %s, attribute_list = %s, body = %s)" % (
|
||||
self.mojom_name, self.attribute_list, self.body)
|
||||
|
||||
|
||||
# This needs to be declared after `FeatureConst` and `FeatureField`.
|
||||
class FeatureBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) a feature."""
|
||||
|
||||
# Features are compile time helpers so all fields are initializers/consts
|
||||
# for the underlying platform feature type.
|
||||
_list_item_type = (Const)
|
||||
|
||||
|
||||
class Import(NodeBase):
|
||||
"""Represents an import statement."""
|
||||
|
||||
def __init__(self, attribute_list, import_filename, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(import_filename, str)
|
||||
super().__init__(**kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
# TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
|
||||
self.import_filename = os.path.normpath(import_filename).replace('\\', '/')
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.import_filename == other.import_filename
|
||||
|
||||
|
||||
class ImportList(NodeListBase):
|
||||
"""Represents a list (i.e., sequence) of import statements."""
|
||||
|
||||
_list_item_type = Import
|
||||
|
||||
|
||||
class Interface(Definition):
|
||||
"""Represents an interface definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, InterfaceBody)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
|
||||
class Method(Definition):
|
||||
"""Represents a method definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, parameter_list,
|
||||
response_parameter_list, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(parameter_list, ParameterList)
|
||||
assert response_parameter_list is None or \
|
||||
isinstance(response_parameter_list, ParameterList)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.parameter_list = parameter_list
|
||||
self.response_parameter_list = response_parameter_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.parameter_list == other.parameter_list and \
|
||||
self.response_parameter_list == other.response_parameter_list
|
||||
|
||||
|
||||
# This needs to be declared after |Method|.
|
||||
class InterfaceBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) an interface."""
|
||||
|
||||
_list_item_type = (Const, Enum, Method)
|
||||
|
||||
|
||||
class Module(NodeBase):
|
||||
"""Represents a module statement."""
|
||||
|
||||
def __init__(self, mojom_namespace, attribute_list, **kwargs):
|
||||
# |mojom_namespace| is either none or a "wrapped identifier".
|
||||
assert mojom_namespace is None or isinstance(mojom_namespace, tuple)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
super().__init__(**kwargs)
|
||||
self.mojom_namespace = mojom_namespace
|
||||
self.attribute_list = attribute_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.mojom_namespace == other.mojom_namespace and \
|
||||
self.attribute_list == other.attribute_list
|
||||
|
||||
|
||||
class Mojom(NodeBase):
|
||||
"""Represents an entire .mojom file. (This is the root node.)"""
|
||||
|
||||
def __init__(self, module, import_list, definition_list, **kwargs):
|
||||
assert module is None or isinstance(module, Module)
|
||||
assert isinstance(import_list, ImportList)
|
||||
assert isinstance(definition_list, list)
|
||||
super().__init__(**kwargs)
|
||||
self.module = module
|
||||
self.import_list = import_list
|
||||
self.definition_list = definition_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.module == other.module and \
|
||||
self.import_list == other.import_list and \
|
||||
self.definition_list == other.definition_list
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r, %r, %r)" % (self.__class__.__name__, self.module,
|
||||
self.import_list, self.definition_list)
|
||||
|
||||
|
||||
class Ordinal(NodeBase):
|
||||
"""Represents an ordinal value labeling, e.g., a struct field."""
|
||||
|
||||
def __init__(self, value, **kwargs):
|
||||
assert isinstance(value, int)
|
||||
super().__init__(**kwargs)
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class Parameter(NodeBase):
|
||||
"""Represents a method request or response parameter."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
super().__init__(**kwargs)
|
||||
self.mojom_name = mojom_name
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.mojom_name == other.mojom_name and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename
|
||||
|
||||
|
||||
class ParameterList(NodeListBase):
|
||||
"""Represents a list of (method request or response) parameters."""
|
||||
|
||||
_list_item_type = Parameter
|
||||
|
||||
|
||||
class Struct(Definition):
|
||||
"""Represents a struct definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, StructBody) or body is None
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
def __repr__(self):
|
||||
return "Struct(mojom_name = %s, attribute_list = %s, body = %s)" % (
|
||||
self.mojom_name, self.attribute_list, self.body)
|
||||
|
||||
|
||||
class StructField(Definition):
|
||||
"""Represents a struct field definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename,
|
||||
default_value, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
# The optional default value is currently either a value as a string or a
|
||||
# "wrapped identifier".
|
||||
assert default_value is None or isinstance(default_value, (str, tuple))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
self.default_value = default_value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename and \
|
||||
self.default_value == other.default_value
|
||||
|
||||
def __repr__(self):
|
||||
return ("StructField(mojom_name = %s, attribute_list = %s, ordinal = %s, "
|
||||
"typename = %s, default_value = %s") % (
|
||||
self.mojom_name, self.attribute_list, self.ordinal,
|
||||
self.typename, self.default_value)
|
||||
|
||||
|
||||
# This needs to be declared after |StructField|.
|
||||
class StructBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) a struct."""
|
||||
|
||||
_list_item_type = (Const, Enum, StructField)
|
||||
|
||||
|
||||
class Union(Definition):
|
||||
"""Represents a union definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, UnionBody)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
|
||||
class UnionField(Definition):
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename
|
||||
|
||||
|
||||
class UnionBody(NodeListBase):
|
||||
|
||||
_list_item_type = UnionField
|
|
@ -0,0 +1,115 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from mojom.parse import ast
|
||||
|
||||
class _TestNode(ast.NodeBase):
|
||||
"""Node type for tests."""
|
||||
|
||||
def __init__(self, value, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and self.value == other.value
|
||||
|
||||
class _TestNodeList(ast.NodeListBase):
|
||||
"""Node list type for tests."""
|
||||
|
||||
_list_item_type = _TestNode
|
||||
|
||||
class ASTTest(unittest.TestCase):
|
||||
"""Tests various AST classes."""
|
||||
|
||||
def testNodeBase(self):
|
||||
# Test |__eq__()|; this is only used for testing, where we want to do
|
||||
# comparison by value and ignore filenames/line numbers (for convenience).
|
||||
node1 = ast.NodeBase(filename="hello.mojom", lineno=123)
|
||||
node2 = ast.NodeBase()
|
||||
self.assertEquals(node1, node2)
|
||||
self.assertEquals(node2, node1)
|
||||
|
||||
# Check that |__ne__()| just defers to |__eq__()| properly.
|
||||
self.assertFalse(node1 != node2)
|
||||
self.assertFalse(node2 != node1)
|
||||
|
||||
# Check that |filename| and |lineno| are set properly (and are None by
|
||||
# default).
|
||||
self.assertEquals(node1.filename, "hello.mojom")
|
||||
self.assertEquals(node1.lineno, 123)
|
||||
self.assertIsNone(node2.filename)
|
||||
self.assertIsNone(node2.lineno)
|
||||
|
||||
# |NodeBase|'s |__eq__()| should compare types (and a subclass's |__eq__()|
|
||||
# should first defer to its superclass's).
|
||||
node3 = _TestNode(123)
|
||||
self.assertNotEqual(node1, node3)
|
||||
self.assertNotEqual(node3, node1)
|
||||
# Also test |__eq__()| directly.
|
||||
self.assertFalse(node1 == node3)
|
||||
self.assertFalse(node3 == node1)
|
||||
|
||||
node4 = _TestNode(123, filename="world.mojom", lineno=123)
|
||||
self.assertEquals(node4, node3)
|
||||
node5 = _TestNode(456)
|
||||
self.assertNotEquals(node5, node4)
|
||||
|
||||
def testNodeListBase(self):
|
||||
node1 = _TestNode(1, filename="foo.mojom", lineno=1)
|
||||
# Equal to, but not the same as, |node1|:
|
||||
node1b = _TestNode(1, filename="foo.mojom", lineno=1)
|
||||
node2 = _TestNode(2, filename="foo.mojom", lineno=2)
|
||||
|
||||
nodelist1 = _TestNodeList() # Contains: (empty).
|
||||
self.assertEquals(nodelist1, nodelist1)
|
||||
self.assertEquals(nodelist1.items, [])
|
||||
self.assertIsNone(nodelist1.filename)
|
||||
self.assertIsNone(nodelist1.lineno)
|
||||
|
||||
nodelist2 = _TestNodeList(node1) # Contains: 1.
|
||||
self.assertEquals(nodelist2, nodelist2)
|
||||
self.assertEquals(nodelist2.items, [node1])
|
||||
self.assertNotEqual(nodelist2, nodelist1)
|
||||
self.assertEquals(nodelist2.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist2.lineno, 1)
|
||||
|
||||
nodelist3 = _TestNodeList([node2]) # Contains: 2.
|
||||
self.assertEquals(nodelist3.items, [node2])
|
||||
self.assertNotEqual(nodelist3, nodelist1)
|
||||
self.assertNotEqual(nodelist3, nodelist2)
|
||||
self.assertEquals(nodelist3.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist3.lineno, 2)
|
||||
|
||||
nodelist1.Append(node1b) # Contains: 1.
|
||||
self.assertEquals(nodelist1.items, [node1])
|
||||
self.assertEquals(nodelist1, nodelist2)
|
||||
self.assertNotEqual(nodelist1, nodelist3)
|
||||
self.assertEquals(nodelist1.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist1.lineno, 1)
|
||||
|
||||
nodelist1.Append(node2) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist1.items, [node1, node2])
|
||||
self.assertNotEqual(nodelist1, nodelist2)
|
||||
self.assertNotEqual(nodelist1, nodelist3)
|
||||
self.assertEquals(nodelist1.lineno, 1)
|
||||
|
||||
nodelist2.Append(node2) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist2.items, [node1, node2])
|
||||
self.assertEquals(nodelist2, nodelist1)
|
||||
self.assertNotEqual(nodelist2, nodelist3)
|
||||
self.assertEquals(nodelist2.lineno, 1)
|
||||
|
||||
nodelist3.Insert(node1) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist3.items, [node1, node2])
|
||||
self.assertEquals(nodelist3, nodelist1)
|
||||
self.assertEquals(nodelist3, nodelist2)
|
||||
self.assertEquals(nodelist3.lineno, 1)
|
||||
|
||||
# Test iteration:
|
||||
i = 1
|
||||
for item in nodelist1:
|
||||
self.assertEquals(item.value, i)
|
||||
i += 1
|
|
@ -0,0 +1,83 @@
|
|||
# Copyright 2018 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Helpers for processing conditionally enabled features in a mojom."""
|
||||
|
||||
from mojom.error import Error
|
||||
from mojom.parse import ast
|
||||
|
||||
|
||||
class EnableIfError(Error):
|
||||
""" Class for errors from ."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None):
|
||||
Error.__init__(self, filename, message, lineno=lineno, addenda=None)
|
||||
|
||||
|
||||
def _IsEnabled(definition, enabled_features):
|
||||
"""Returns true if a definition is enabled.
|
||||
|
||||
A definition is enabled if it has no EnableIf/EnableIfNot attribute.
|
||||
It is retained if it has an EnableIf attribute and the attribute is in
|
||||
enabled_features. It is retained if it has an EnableIfNot attribute and the
|
||||
attribute is not in enabled features.
|
||||
"""
|
||||
if not hasattr(definition, "attribute_list"):
|
||||
return True
|
||||
if not definition.attribute_list:
|
||||
return True
|
||||
|
||||
already_defined = False
|
||||
for a in definition.attribute_list:
|
||||
if a.key == 'EnableIf' or a.key == 'EnableIfNot':
|
||||
if already_defined:
|
||||
raise EnableIfError(
|
||||
definition.filename,
|
||||
"EnableIf/EnableIfNot attribute may only be set once per field.",
|
||||
definition.lineno)
|
||||
already_defined = True
|
||||
|
||||
for attribute in definition.attribute_list:
|
||||
if attribute.key == 'EnableIf' and attribute.value not in enabled_features:
|
||||
return False
|
||||
if attribute.key == 'EnableIfNot' and attribute.value in enabled_features:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _FilterDisabledFromNodeList(node_list, enabled_features):
|
||||
if not node_list:
|
||||
return
|
||||
assert isinstance(node_list, ast.NodeListBase)
|
||||
node_list.items = [
|
||||
item for item in node_list.items if _IsEnabled(item, enabled_features)
|
||||
]
|
||||
for item in node_list.items:
|
||||
_FilterDefinition(item, enabled_features)
|
||||
|
||||
|
||||
def _FilterDefinition(definition, enabled_features):
|
||||
"""Filters definitions with a body."""
|
||||
if isinstance(definition, ast.Enum):
|
||||
_FilterDisabledFromNodeList(definition.enum_value_list, enabled_features)
|
||||
elif isinstance(definition, ast.Method):
|
||||
_FilterDisabledFromNodeList(definition.parameter_list, enabled_features)
|
||||
_FilterDisabledFromNodeList(definition.response_parameter_list,
|
||||
enabled_features)
|
||||
elif isinstance(definition,
|
||||
(ast.Interface, ast.Struct, ast.Union, ast.Feature)):
|
||||
_FilterDisabledFromNodeList(definition.body, enabled_features)
|
||||
|
||||
|
||||
def RemoveDisabledDefinitions(mojom, enabled_features):
|
||||
"""Removes conditionally disabled definitions from a Mojom node."""
|
||||
mojom.import_list = ast.ImportList([
|
||||
imported_file for imported_file in mojom.import_list
|
||||
if _IsEnabled(imported_file, enabled_features)
|
||||
])
|
||||
mojom.definition_list = [
|
||||
definition for definition in mojom.definition_list
|
||||
if _IsEnabled(definition, enabled_features)
|
||||
]
|
||||
for definition in mojom.definition_list:
|
||||
_FilterDefinition(definition, enabled_features)
|
|
@ -0,0 +1,376 @@
|
|||
# Copyright 2018 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove('pylib'), 'pylib'))
|
||||
import mojom.parse.ast as ast
|
||||
import mojom.parse.conditional_features as conditional_features
|
||||
import mojom.parse.parser as parser
|
||||
|
||||
ENABLED_FEATURES = frozenset({'red', 'green', 'blue'})
|
||||
|
||||
class ConditionalFeaturesTest(unittest.TestCase):
|
||||
"""Tests |mojom.parse.conditional_features|."""
|
||||
|
||||
def parseAndAssertEqual(self, source, expected_source):
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
conditional_features.RemoveDisabledDefinitions(definition, ENABLED_FEATURES)
|
||||
expected = parser.Parse(expected_source, "my_file.mojom")
|
||||
self.assertEquals(definition, expected)
|
||||
|
||||
def testFilterConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIf=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIf=orange]
|
||||
const double kMyConst2 = 2;
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
const int kMyConst1 = 1;
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterIfNotConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIf=blue]
|
||||
const int kMyConst3 = 3;
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst4 = 4;
|
||||
[EnableIfNot=purple]
|
||||
const int kMyConst5 = 5;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIf=blue]
|
||||
const int kMyConst3 = 3;
|
||||
[EnableIfNot=purple]
|
||||
const int kMyConst5 = 5;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterIfNotMultipleConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIfNot=orange]
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIfNot=orange]
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterEnum(self):
|
||||
"""Test that EnumValues are correctly filtered from an Enum."""
|
||||
enum_source = """
|
||||
enum MyEnum {
|
||||
[EnableIf=purple]
|
||||
VALUE1,
|
||||
[EnableIf=blue]
|
||||
VALUE2,
|
||||
VALUE3,
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
enum MyEnum {
|
||||
[EnableIf=blue]
|
||||
VALUE2,
|
||||
VALUE3
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(enum_source, expected_source)
|
||||
|
||||
def testFilterImport(self):
|
||||
"""Test that imports are correctly filtered from a Mojom."""
|
||||
import_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
import "bar.mojom";
|
||||
[EnableIf=purple]
|
||||
import "baz.mojom";
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
import "bar.mojom";
|
||||
"""
|
||||
self.parseAndAssertEqual(import_source, expected_source)
|
||||
|
||||
def testFilterIfNotImport(self):
|
||||
"""Test that imports are correctly filtered from a Mojom."""
|
||||
import_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
[EnableIfNot=purple]
|
||||
import "bar.mojom";
|
||||
[EnableIfNot=green]
|
||||
import "baz.mojom";
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
[EnableIfNot=purple]
|
||||
import "bar.mojom";
|
||||
"""
|
||||
self.parseAndAssertEqual(import_source, expected_source)
|
||||
|
||||
def testFilterInterface(self):
|
||||
"""Test that definitions are correctly filtered from an Interface."""
|
||||
interface_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
[EnableIf=purple]
|
||||
VALUE1,
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=blue]
|
||||
const int32 kMyConst = 123;
|
||||
[EnableIf=purple]
|
||||
MyMethod();
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=blue]
|
||||
const int32 kMyConst = 123;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(interface_source, expected_source)
|
||||
|
||||
def testFilterMethod(self):
|
||||
"""Test that Parameters are correctly filtered from a Method."""
|
||||
method_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
MyMethod([EnableIf=purple] int32 a) => ([EnableIf=red] int32 b);
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
MyMethod() => ([EnableIf=red] int32 b);
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(method_source, expected_source)
|
||||
|
||||
def testFilterStruct(self):
|
||||
"""Test that definitions are correctly filtered from a Struct."""
|
||||
struct_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
[EnableIf=purple]
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIf=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
[EnableIf=orange]
|
||||
double f;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
};
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(struct_source, expected_source)
|
||||
|
||||
def testFilterIfNotStruct(self):
|
||||
"""Test that definitions are correctly filtered from a Struct."""
|
||||
struct_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
[EnableIfNot=red]
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIfNot=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIfNot=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
[EnableIfNot=red]
|
||||
double f;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
};
|
||||
[EnableIfNot=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIfNot=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(struct_source, expected_source)
|
||||
|
||||
def testFilterUnion(self):
|
||||
"""Test that UnionFields are correctly filtered from a Union."""
|
||||
union_source = """
|
||||
union MyUnion {
|
||||
[EnableIf=yellow]
|
||||
int32 a;
|
||||
[EnableIf=red]
|
||||
bool b;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
union MyUnion {
|
||||
[EnableIf=red]
|
||||
bool b;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(union_source, expected_source)
|
||||
|
||||
def testSameNameFields(self):
|
||||
mojom_source = """
|
||||
enum Foo {
|
||||
[EnableIf=red]
|
||||
VALUE1 = 5,
|
||||
[EnableIf=yellow]
|
||||
VALUE1 = 6,
|
||||
};
|
||||
[EnableIf=red]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=yellow]
|
||||
const double kMyConst = 4.56;
|
||||
"""
|
||||
expected_source = """
|
||||
enum Foo {
|
||||
[EnableIf=red]
|
||||
VALUE1 = 5,
|
||||
};
|
||||
[EnableIf=red]
|
||||
const double kMyConst = 1.23;
|
||||
"""
|
||||
self.parseAndAssertEqual(mojom_source, expected_source)
|
||||
|
||||
def testFeaturesWithEnableIf(self):
|
||||
mojom_source = """
|
||||
feature Foo {
|
||||
const string name = "FooFeature";
|
||||
[EnableIf=red]
|
||||
const bool default_state = false;
|
||||
[EnableIf=yellow]
|
||||
const bool default_state = true;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
feature Foo {
|
||||
const string name = "FooFeature";
|
||||
[EnableIf=red]
|
||||
const bool default_state = false;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(mojom_source, expected_source)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIf=red,EnableIf=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIf=red,EnableIfNot=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIfNot=red,EnableIfNot=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
249
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
Normal file
249
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
Normal file
|
@ -0,0 +1,249 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.error import Error
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
from ply.lex import TOKEN
|
||||
|
||||
|
||||
class LexError(Error):
|
||||
"""Class for errors from the lexer."""
|
||||
|
||||
def __init__(self, filename, message, lineno):
|
||||
Error.__init__(self, filename, message, lineno=lineno)
|
||||
|
||||
|
||||
# We have methods which look like they could be functions:
|
||||
# pylint: disable=R0201
|
||||
class Lexer:
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
|
||||
######################-- PRIVATE --######################
|
||||
|
||||
##
|
||||
## Internal auxiliary methods
|
||||
##
|
||||
def _error(self, msg, token):
|
||||
raise LexError(self.filename, msg, token.lineno)
|
||||
|
||||
##
|
||||
## Reserved keywords
|
||||
##
|
||||
keywords = (
|
||||
'HANDLE',
|
||||
'IMPORT',
|
||||
'MODULE',
|
||||
'STRUCT',
|
||||
'UNION',
|
||||
'INTERFACE',
|
||||
'ENUM',
|
||||
'CONST',
|
||||
'TRUE',
|
||||
'FALSE',
|
||||
'DEFAULT',
|
||||
'ARRAY',
|
||||
'MAP',
|
||||
'ASSOCIATED',
|
||||
'PENDING_REMOTE',
|
||||
'PENDING_RECEIVER',
|
||||
'PENDING_ASSOCIATED_REMOTE',
|
||||
'PENDING_ASSOCIATED_RECEIVER',
|
||||
'FEATURE',
|
||||
)
|
||||
|
||||
keyword_map = {}
|
||||
for keyword in keywords:
|
||||
keyword_map[keyword.lower()] = keyword
|
||||
|
||||
##
|
||||
## All the tokens recognized by the lexer
|
||||
##
|
||||
tokens = keywords + (
|
||||
# Identifiers
|
||||
'NAME',
|
||||
|
||||
# Constants
|
||||
'ORDINAL',
|
||||
'INT_CONST_DEC',
|
||||
'INT_CONST_HEX',
|
||||
'FLOAT_CONST',
|
||||
|
||||
# String literals
|
||||
'STRING_LITERAL',
|
||||
|
||||
# Operators
|
||||
'MINUS',
|
||||
'PLUS',
|
||||
'QSTN',
|
||||
|
||||
# Assignment
|
||||
'EQUALS',
|
||||
|
||||
# Request / response
|
||||
'RESPONSE',
|
||||
|
||||
# Delimiters
|
||||
'LPAREN',
|
||||
'RPAREN', # ( )
|
||||
'LBRACKET',
|
||||
'RBRACKET', # [ ]
|
||||
'LBRACE',
|
||||
'RBRACE', # { }
|
||||
'LANGLE',
|
||||
'RANGLE', # < >
|
||||
'SEMI', # ;
|
||||
'COMMA',
|
||||
'DOT' # , .
|
||||
)
|
||||
|
||||
##
|
||||
## Regexes for use in tokens
|
||||
##
|
||||
|
||||
# valid C identifiers (K&R2: A.2.3)
|
||||
identifier = r'[a-zA-Z_][0-9a-zA-Z_]*'
|
||||
|
||||
hex_prefix = '0[xX]'
|
||||
hex_digits = '[0-9a-fA-F]+'
|
||||
|
||||
# integer constants (K&R2: A.2.5.1)
|
||||
decimal_constant = '0|([1-9][0-9]*)'
|
||||
hex_constant = hex_prefix + hex_digits
|
||||
# Don't allow octal constants (even invalid octal).
|
||||
octal_constant_disallowed = '0[0-9]+'
|
||||
|
||||
# character constants (K&R2: A.2.5.2)
|
||||
# Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
|
||||
# directives with Windows paths as filenames (..\..\dir\file)
|
||||
# For the same reason, decimal_escape allows all digit sequences. We want to
|
||||
# parse all correct code, even if it means to sometimes parse incorrect
|
||||
# code.
|
||||
#
|
||||
simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
|
||||
decimal_escape = r"""(\d+)"""
|
||||
hex_escape = r"""(x[0-9a-fA-F]+)"""
|
||||
bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
|
||||
|
||||
escape_sequence = \
|
||||
r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
|
||||
|
||||
# string literals (K&R2: A.2.6)
|
||||
string_char = r"""([^"\\\n]|""" + escape_sequence + ')'
|
||||
string_literal = '"' + string_char + '*"'
|
||||
bad_string_literal = '"' + string_char + '*' + bad_escape + string_char + '*"'
|
||||
|
||||
# floating constants (K&R2: A.2.5.3)
|
||||
exponent_part = r"""([eE][-+]?[0-9]+)"""
|
||||
fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
|
||||
floating_constant = \
|
||||
'(((('+fractional_constant+')'+ \
|
||||
exponent_part+'?)|([0-9]+'+exponent_part+')))'
|
||||
|
||||
# Ordinals
|
||||
ordinal = r'@[0-9]+'
|
||||
missing_ordinal_value = r'@'
|
||||
# Don't allow ordinal values in octal (even invalid octal, like 09) or
|
||||
# hexadecimal.
|
||||
octal_or_hex_ordinal_disallowed = (
|
||||
r'@((0[0-9]+)|(' + hex_prefix + hex_digits + '))')
|
||||
|
||||
##
|
||||
## Rules for the normal state
|
||||
##
|
||||
t_ignore = ' \t\r'
|
||||
|
||||
# Newlines
|
||||
def t_NEWLINE(self, t):
|
||||
r'\n+'
|
||||
t.lexer.lineno += len(t.value)
|
||||
|
||||
# Operators
|
||||
t_MINUS = r'-'
|
||||
t_PLUS = r'\+'
|
||||
t_QSTN = r'\?'
|
||||
|
||||
# =
|
||||
t_EQUALS = r'='
|
||||
|
||||
# =>
|
||||
t_RESPONSE = r'=>'
|
||||
|
||||
# Delimiters
|
||||
t_LPAREN = r'\('
|
||||
t_RPAREN = r'\)'
|
||||
t_LBRACKET = r'\['
|
||||
t_RBRACKET = r'\]'
|
||||
t_LBRACE = r'\{'
|
||||
t_RBRACE = r'\}'
|
||||
t_LANGLE = r'<'
|
||||
t_RANGLE = r'>'
|
||||
t_COMMA = r','
|
||||
t_DOT = r'\.'
|
||||
t_SEMI = r';'
|
||||
|
||||
t_STRING_LITERAL = string_literal
|
||||
|
||||
# The following floating and integer constants are defined as
|
||||
# functions to impose a strict order (otherwise, decimal
|
||||
# is placed before the others because its regex is longer,
|
||||
# and this is bad)
|
||||
#
|
||||
@TOKEN(floating_constant)
|
||||
def t_FLOAT_CONST(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(hex_constant)
|
||||
def t_INT_CONST_HEX(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(octal_constant_disallowed)
|
||||
def t_OCTAL_CONSTANT_DISALLOWED(self, t):
|
||||
msg = "Octal values not allowed"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(decimal_constant)
|
||||
def t_INT_CONST_DEC(self, t):
|
||||
return t
|
||||
|
||||
# unmatched string literals are caught by the preprocessor
|
||||
|
||||
@TOKEN(bad_string_literal)
|
||||
def t_BAD_STRING_LITERAL(self, t):
|
||||
msg = "String contains invalid escape code"
|
||||
self._error(msg, t)
|
||||
|
||||
# Handle ordinal-related tokens in the right order:
|
||||
@TOKEN(octal_or_hex_ordinal_disallowed)
|
||||
def t_OCTAL_OR_HEX_ORDINAL_DISALLOWED(self, t):
|
||||
msg = "Octal and hexadecimal ordinal values not allowed"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(ordinal)
|
||||
def t_ORDINAL(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(missing_ordinal_value)
|
||||
def t_BAD_ORDINAL(self, t):
|
||||
msg = "Missing ordinal value"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(identifier)
|
||||
def t_NAME(self, t):
|
||||
t.type = self.keyword_map.get(t.value, "NAME")
|
||||
return t
|
||||
|
||||
# Ignore C and C++ style comments
|
||||
def t_COMMENT(self, t):
|
||||
r'(/\*(.|\n)*?\*/)|(//.*(\n[ \t]*//.*)*)'
|
||||
t.lexer.lineno += t.value.count("\n")
|
||||
|
||||
def t_error(self, t):
|
||||
msg = "Illegal character %s" % repr(t.value[0])
|
||||
self._error(msg, t)
|
|
@ -0,0 +1,194 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os.path
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
|
||||
from ply import lex
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
|
||||
import mojom.parse.lexer
|
||||
|
||||
# This (monkey-patching LexToken to make comparison value-based) is evil, but
|
||||
# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
|
||||
# for object identity.)
|
||||
def _LexTokenEq(self, other):
|
||||
return self.type == other.type and self.value == other.value and \
|
||||
self.lineno == other.lineno and self.lexpos == other.lexpos
|
||||
|
||||
|
||||
setattr(lex.LexToken, '__eq__', _LexTokenEq)
|
||||
|
||||
|
||||
def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
|
||||
"""Makes a LexToken with the given parameters. (Note that lineno is 1-based,
|
||||
but lexpos is 0-based.)"""
|
||||
rv = lex.LexToken()
|
||||
rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
|
||||
return rv
|
||||
|
||||
|
||||
def _MakeLexTokenForKeyword(keyword, **kwargs):
|
||||
"""Makes a LexToken for the given keyword."""
|
||||
return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
|
||||
|
||||
|
||||
class LexerTest(unittest.TestCase):
|
||||
"""Tests |mojom.parse.lexer.Lexer|."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
unittest.TestCase.__init__(self, *args, **kwargs)
|
||||
# Clone all lexer instances from this one, since making a lexer is slow.
|
||||
self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
|
||||
|
||||
def testValidKeywords(self):
|
||||
"""Tests valid keywords."""
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("handle"), _MakeLexTokenForKeyword("handle"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("import"), _MakeLexTokenForKeyword("import"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("module"), _MakeLexTokenForKeyword("module"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("struct"), _MakeLexTokenForKeyword("struct"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("union"), _MakeLexTokenForKeyword("union"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("interface"),
|
||||
_MakeLexTokenForKeyword("interface"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("enum"), _MakeLexTokenForKeyword("enum"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("const"), _MakeLexTokenForKeyword("const"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("true"), _MakeLexTokenForKeyword("true"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("false"), _MakeLexTokenForKeyword("false"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("default"),
|
||||
_MakeLexTokenForKeyword("default"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("array"), _MakeLexTokenForKeyword("array"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("map"), _MakeLexTokenForKeyword("map"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("associated"),
|
||||
_MakeLexTokenForKeyword("associated"))
|
||||
|
||||
def testValidIdentifiers(self):
|
||||
"""Tests identifiers."""
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("abcd"), _MakeLexToken("NAME", "abcd"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("AbC_d012_"),
|
||||
_MakeLexToken("NAME", "AbC_d012_"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("_0123"), _MakeLexToken("NAME", "_0123"))
|
||||
|
||||
def testInvalidIdentifiers(self):
|
||||
with self.assertRaisesRegexp(
|
||||
mojom.parse.lexer.LexError,
|
||||
r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
|
||||
self._TokensForInput("$abc")
|
||||
with self.assertRaisesRegexp(
|
||||
mojom.parse.lexer.LexError,
|
||||
r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
|
||||
self._TokensForInput("a$bc")
|
||||
|
||||
def testDecimalIntegerConstants(self):
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("0"), _MakeLexToken("INT_CONST_DEC", "0"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("1"), _MakeLexToken("INT_CONST_DEC", "1"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("123"), _MakeLexToken("INT_CONST_DEC", "123"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("10"), _MakeLexToken("INT_CONST_DEC", "10"))
|
||||
|
||||
def testValidTokens(self):
|
||||
"""Tests valid tokens (which aren't tested elsewhere)."""
|
||||
# Keywords tested in |testValidKeywords|.
|
||||
# NAME tested in |testValidIdentifiers|.
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("@123"), _MakeLexToken("ORDINAL", "@123"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("456"), _MakeLexToken("INT_CONST_DEC", "456"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("0x01aB2eF3"),
|
||||
_MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("123.456"),
|
||||
_MakeLexToken("FLOAT_CONST", "123.456"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("\"hello\""),
|
||||
_MakeLexToken("STRING_LITERAL", "\"hello\""))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("="), _MakeLexToken("EQUALS", "="))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("=>"), _MakeLexToken("RESPONSE", "=>"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("("), _MakeLexToken("LPAREN", "("))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(")"), _MakeLexToken("RPAREN", ")"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("["), _MakeLexToken("LBRACKET", "["))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("]"), _MakeLexToken("RBRACKET", "]"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("{"), _MakeLexToken("LBRACE", "{"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("}"), _MakeLexToken("RBRACE", "}"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("<"), _MakeLexToken("LANGLE", "<"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(">"), _MakeLexToken("RANGLE", ">"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(";"), _MakeLexToken("SEMI", ";"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(","), _MakeLexToken("COMMA", ","))
|
||||
self.assertEquals(self._SingleTokenForInput("."), _MakeLexToken("DOT", "."))
|
||||
|
||||
def _TokensForInput(self, input_string):
|
||||
"""Gets a list of tokens for the given input string."""
|
||||
lexer = self._zygote_lexer.clone()
|
||||
lexer.input(input_string)
|
||||
rv = []
|
||||
while True:
|
||||
tok = lexer.token()
|
||||
if not tok:
|
||||
return rv
|
||||
rv.append(tok)
|
||||
|
||||
def _SingleTokenForInput(self, input_string):
|
||||
"""Gets the single token for the given input string. (Raises an exception if
|
||||
the input string does not result in exactly one token.)"""
|
||||
toks = self._TokensForInput(input_string)
|
||||
assert len(toks) == 1
|
||||
return toks[0]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
510
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
Normal file
510
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
Normal file
|
@ -0,0 +1,510 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Generates a syntax tree from a Mojo IDL file."""
|
||||
|
||||
# Breaking parser stanzas is unhelpful so allow longer lines.
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.error import Error
|
||||
from mojom.parse import ast
|
||||
from mojom.parse.lexer import Lexer
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
from ply import lex
|
||||
from ply import yacc
|
||||
|
||||
_MAX_ORDINAL_VALUE = 0xffffffff
|
||||
_MAX_ARRAY_SIZE = 0xffffffff
|
||||
|
||||
|
||||
class ParseError(Error):
|
||||
"""Class for errors from the parser."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None, snippet=None):
|
||||
Error.__init__(
|
||||
self,
|
||||
filename,
|
||||
message,
|
||||
lineno=lineno,
|
||||
addenda=([snippet] if snippet else None))
|
||||
|
||||
|
||||
# We have methods which look like they could be functions:
|
||||
# pylint: disable=R0201
|
||||
class Parser:
|
||||
def __init__(self, lexer, source, filename):
|
||||
self.tokens = lexer.tokens
|
||||
self.source = source
|
||||
self.filename = filename
|
||||
|
||||
# Names of functions
|
||||
#
|
||||
# In general, we name functions after the left-hand-side of the rule(s) that
|
||||
# they handle. E.g., |p_foo_bar| for a rule |foo_bar : ...|.
|
||||
#
|
||||
# There may be multiple functions handling rules for the same left-hand-side;
|
||||
# then we name the functions |p_foo_bar_N| (for left-hand-side |foo_bar|),
|
||||
# where N is a number (numbered starting from 1). Note that using multiple
|
||||
# functions is actually more efficient than having single functions handle
|
||||
# multiple rules (and, e.g., distinguishing them by examining |len(p)|).
|
||||
#
|
||||
# It's also possible to have a function handling multiple rules with different
|
||||
# left-hand-sides. We do not do this.
|
||||
#
|
||||
# See http://www.dabeaz.com/ply/ply.html#ply_nn25 for more details.
|
||||
|
||||
# TODO(vtl): Get rid of the braces in the module "statement". (Consider
|
||||
# renaming "module" -> "package".) Then we'll be able to have a single rule
|
||||
# for root (by making module "optional").
|
||||
def p_root_1(self, p):
|
||||
"""root : """
|
||||
p[0] = ast.Mojom(None, ast.ImportList(), [])
|
||||
|
||||
def p_root_2(self, p):
|
||||
"""root : root module"""
|
||||
if p[1].module is not None:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Multiple \"module\" statements not allowed:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
if p[1].import_list.items or p[1].definition_list:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"\"module\" statements must precede imports and definitions:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
p[0] = p[1]
|
||||
p[0].module = p[2]
|
||||
|
||||
def p_root_3(self, p):
|
||||
"""root : root import"""
|
||||
if p[1].definition_list:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"\"import\" statements must precede definitions:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
p[0] = p[1]
|
||||
p[0].import_list.Append(p[2])
|
||||
|
||||
def p_root_4(self, p):
|
||||
"""root : root definition"""
|
||||
p[0] = p[1]
|
||||
p[0].definition_list.append(p[2])
|
||||
|
||||
def p_import(self, p):
|
||||
"""import : attribute_section IMPORT STRING_LITERAL SEMI"""
|
||||
# 'eval' the literal to strip the quotes.
|
||||
# TODO(vtl): This eval is dubious. We should unquote/unescape ourselves.
|
||||
p[0] = ast.Import(
|
||||
p[1], eval(p[3]), filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_module(self, p):
|
||||
"""module : attribute_section MODULE identifier_wrapped SEMI"""
|
||||
p[0] = ast.Module(p[3], p[1], filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_definition(self, p):
|
||||
"""definition : struct
|
||||
| union
|
||||
| interface
|
||||
| enum
|
||||
| const
|
||||
| feature"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_attribute_section_1(self, p):
|
||||
"""attribute_section : """
|
||||
p[0] = None
|
||||
|
||||
def p_attribute_section_2(self, p):
|
||||
"""attribute_section : LBRACKET attribute_list RBRACKET"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_attribute_list_1(self, p):
|
||||
"""attribute_list : """
|
||||
p[0] = ast.AttributeList()
|
||||
|
||||
def p_attribute_list_2(self, p):
|
||||
"""attribute_list : nonempty_attribute_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_attribute_list_1(self, p):
|
||||
"""nonempty_attribute_list : attribute"""
|
||||
p[0] = ast.AttributeList(p[1])
|
||||
|
||||
def p_nonempty_attribute_list_2(self, p):
|
||||
"""nonempty_attribute_list : nonempty_attribute_list COMMA attribute"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_attribute_1(self, p):
|
||||
"""attribute : name_wrapped EQUALS identifier_wrapped"""
|
||||
p[0] = ast.Attribute(p[1],
|
||||
p[3][1],
|
||||
filename=self.filename,
|
||||
lineno=p.lineno(1))
|
||||
|
||||
def p_attribute_2(self, p):
|
||||
"""attribute : name_wrapped EQUALS evaled_literal
|
||||
| name_wrapped EQUALS name_wrapped"""
|
||||
p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_attribute_3(self, p):
|
||||
"""attribute : name_wrapped"""
|
||||
p[0] = ast.Attribute(p[1], True, filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_evaled_literal(self, p):
|
||||
"""evaled_literal : literal"""
|
||||
# 'eval' the literal to strip the quotes. Handle keywords "true" and "false"
|
||||
# specially since they cannot directly be evaluated to python boolean
|
||||
# values.
|
||||
if p[1] == "true":
|
||||
p[0] = True
|
||||
elif p[1] == "false":
|
||||
p[0] = False
|
||||
else:
|
||||
p[0] = eval(p[1])
|
||||
|
||||
def p_struct_1(self, p):
|
||||
"""struct : attribute_section STRUCT name_wrapped LBRACE struct_body RBRACE SEMI"""
|
||||
p[0] = ast.Struct(p[3], p[1], p[5])
|
||||
|
||||
def p_struct_2(self, p):
|
||||
"""struct : attribute_section STRUCT name_wrapped SEMI"""
|
||||
p[0] = ast.Struct(p[3], p[1], None)
|
||||
|
||||
def p_struct_body_1(self, p):
|
||||
"""struct_body : """
|
||||
p[0] = ast.StructBody()
|
||||
|
||||
def p_struct_body_2(self, p):
|
||||
"""struct_body : struct_body const
|
||||
| struct_body enum
|
||||
| struct_body struct_field"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_struct_field(self, p):
|
||||
"""struct_field : attribute_section typename name_wrapped ordinal default SEMI"""
|
||||
p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
|
||||
|
||||
def p_feature(self, p):
|
||||
"""feature : attribute_section FEATURE NAME LBRACE feature_body RBRACE SEMI"""
|
||||
p[0] = ast.Feature(p[3], p[1], p[5])
|
||||
|
||||
def p_feature_body_1(self, p):
|
||||
"""feature_body : """
|
||||
p[0] = ast.FeatureBody()
|
||||
|
||||
def p_feature_body_2(self, p):
|
||||
"""feature_body : feature_body const"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_union(self, p):
|
||||
"""union : attribute_section UNION name_wrapped LBRACE union_body RBRACE SEMI"""
|
||||
p[0] = ast.Union(p[3], p[1], p[5])
|
||||
|
||||
def p_union_body_1(self, p):
|
||||
"""union_body : """
|
||||
p[0] = ast.UnionBody()
|
||||
|
||||
def p_union_body_2(self, p):
|
||||
"""union_body : union_body union_field"""
|
||||
p[0] = p[1]
|
||||
p[1].Append(p[2])
|
||||
|
||||
def p_union_field(self, p):
|
||||
"""union_field : attribute_section typename name_wrapped ordinal SEMI"""
|
||||
p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
|
||||
|
||||
def p_default_1(self, p):
|
||||
"""default : """
|
||||
p[0] = None
|
||||
|
||||
def p_default_2(self, p):
|
||||
"""default : EQUALS constant"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_interface(self, p):
|
||||
"""interface : attribute_section INTERFACE name_wrapped LBRACE interface_body RBRACE SEMI"""
|
||||
p[0] = ast.Interface(p[3], p[1], p[5])
|
||||
|
||||
def p_interface_body_1(self, p):
|
||||
"""interface_body : """
|
||||
p[0] = ast.InterfaceBody()
|
||||
|
||||
def p_interface_body_2(self, p):
|
||||
"""interface_body : interface_body const
|
||||
| interface_body enum
|
||||
| interface_body method"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_response_1(self, p):
|
||||
"""response : """
|
||||
p[0] = None
|
||||
|
||||
def p_response_2(self, p):
|
||||
"""response : RESPONSE LPAREN parameter_list RPAREN"""
|
||||
p[0] = p[3]
|
||||
|
||||
def p_method(self, p):
|
||||
"""method : attribute_section name_wrapped ordinal LPAREN parameter_list RPAREN response SEMI"""
|
||||
p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
|
||||
|
||||
def p_parameter_list_1(self, p):
|
||||
"""parameter_list : """
|
||||
p[0] = ast.ParameterList()
|
||||
|
||||
def p_parameter_list_2(self, p):
|
||||
"""parameter_list : nonempty_parameter_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_parameter_list_1(self, p):
|
||||
"""nonempty_parameter_list : parameter"""
|
||||
p[0] = ast.ParameterList(p[1])
|
||||
|
||||
def p_nonempty_parameter_list_2(self, p):
|
||||
"""nonempty_parameter_list : nonempty_parameter_list COMMA parameter"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_parameter(self, p):
|
||||
"""parameter : attribute_section typename name_wrapped ordinal"""
|
||||
p[0] = ast.Parameter(
|
||||
p[3], p[1], p[4], p[2], filename=self.filename, lineno=p.lineno(3))
|
||||
|
||||
def p_typename(self, p):
|
||||
"""typename : nonnullable_typename QSTN
|
||||
| nonnullable_typename"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
p[0] = p[1] + "?"
|
||||
|
||||
def p_nonnullable_typename(self, p):
|
||||
"""nonnullable_typename : basictypename
|
||||
| array
|
||||
| fixed_array
|
||||
| associative_array"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_basictypename(self, p):
|
||||
"""basictypename : remotetype
|
||||
| receivertype
|
||||
| associatedremotetype
|
||||
| associatedreceivertype
|
||||
| identifier
|
||||
| ASSOCIATED identifier
|
||||
| handletype"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
p[0] = "asso<" + p[2] + ">"
|
||||
|
||||
def p_remotetype(self, p):
|
||||
"""remotetype : PENDING_REMOTE LANGLE identifier RANGLE"""
|
||||
p[0] = "rmt<%s>" % p[3]
|
||||
|
||||
def p_receivertype(self, p):
|
||||
"""receivertype : PENDING_RECEIVER LANGLE identifier RANGLE"""
|
||||
p[0] = "rcv<%s>" % p[3]
|
||||
|
||||
def p_associatedremotetype(self, p):
|
||||
"""associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier RANGLE"""
|
||||
p[0] = "rma<%s>" % p[3]
|
||||
|
||||
def p_associatedreceivertype(self, p):
|
||||
"""associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier RANGLE"""
|
||||
p[0] = "rca<%s>" % p[3]
|
||||
|
||||
def p_handletype(self, p):
|
||||
"""handletype : HANDLE
|
||||
| HANDLE LANGLE name_wrapped RANGLE"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
if p[3] not in ('data_pipe_consumer', 'data_pipe_producer',
|
||||
'message_pipe', 'shared_buffer', 'platform'):
|
||||
# Note: We don't enable tracking of line numbers for everything, so we
|
||||
# can't use |p.lineno(3)|.
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Invalid handle type %r:" % p[3],
|
||||
lineno=p.lineno(1),
|
||||
snippet=self._GetSnippet(p.lineno(1)))
|
||||
p[0] = "handle<" + p[3] + ">"
|
||||
|
||||
def p_array(self, p):
|
||||
"""array : ARRAY LANGLE typename RANGLE"""
|
||||
p[0] = p[3] + "[]"
|
||||
|
||||
def p_fixed_array(self, p):
|
||||
"""fixed_array : ARRAY LANGLE typename COMMA INT_CONST_DEC RANGLE"""
|
||||
value = int(p[5])
|
||||
if value == 0 or value > _MAX_ARRAY_SIZE:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Fixed array size %d invalid:" % value,
|
||||
lineno=p.lineno(5),
|
||||
snippet=self._GetSnippet(p.lineno(5)))
|
||||
p[0] = p[3] + "[" + p[5] + "]"
|
||||
|
||||
def p_associative_array(self, p):
|
||||
"""associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
|
||||
p[0] = p[5] + "{" + p[3] + "}"
|
||||
|
||||
def p_ordinal_1(self, p):
|
||||
"""ordinal : """
|
||||
p[0] = None
|
||||
|
||||
def p_ordinal_2(self, p):
|
||||
"""ordinal : ORDINAL"""
|
||||
value = int(p[1][1:])
|
||||
if value > _MAX_ORDINAL_VALUE:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Ordinal value %d too large:" % value,
|
||||
lineno=p.lineno(1),
|
||||
snippet=self._GetSnippet(p.lineno(1)))
|
||||
p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_enum_1(self, p):
|
||||
"""enum : attribute_section ENUM name_wrapped LBRACE enum_value_list RBRACE SEMI
|
||||
| attribute_section ENUM name_wrapped LBRACE \
|
||||
nonempty_enum_value_list COMMA RBRACE SEMI"""
|
||||
p[0] = ast.Enum(
|
||||
p[3], p[1], p[5], filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_enum_2(self, p):
|
||||
"""enum : attribute_section ENUM name_wrapped SEMI"""
|
||||
p[0] = ast.Enum(
|
||||
p[3], p[1], None, filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_enum_value_list_1(self, p):
|
||||
"""enum_value_list : """
|
||||
p[0] = ast.EnumValueList()
|
||||
|
||||
def p_enum_value_list_2(self, p):
|
||||
"""enum_value_list : nonempty_enum_value_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_enum_value_list_1(self, p):
|
||||
"""nonempty_enum_value_list : enum_value"""
|
||||
p[0] = ast.EnumValueList(p[1])
|
||||
|
||||
def p_nonempty_enum_value_list_2(self, p):
|
||||
"""nonempty_enum_value_list : nonempty_enum_value_list COMMA enum_value"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_enum_value(self, p):
|
||||
"""enum_value : attribute_section name_wrapped
|
||||
| attribute_section name_wrapped EQUALS int
|
||||
| attribute_section name_wrapped EQUALS identifier_wrapped"""
|
||||
p[0] = ast.EnumValue(
|
||||
p[2],
|
||||
p[1],
|
||||
p[4] if len(p) == 5 else None,
|
||||
filename=self.filename,
|
||||
lineno=p.lineno(2))
|
||||
|
||||
def p_const(self, p):
|
||||
"""const : attribute_section CONST typename name_wrapped EQUALS constant SEMI"""
|
||||
p[0] = ast.Const(p[4], p[1], p[3], p[6])
|
||||
|
||||
def p_constant(self, p):
|
||||
"""constant : literal
|
||||
| identifier_wrapped"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_identifier_wrapped(self, p):
|
||||
"""identifier_wrapped : identifier"""
|
||||
p[0] = ('IDENTIFIER', p[1])
|
||||
|
||||
# TODO(vtl): Make this produce a "wrapped" identifier (probably as an
|
||||
# |ast.Identifier|, to be added) and get rid of identifier_wrapped.
|
||||
def p_identifier(self, p):
|
||||
"""identifier : name_wrapped
|
||||
| name_wrapped DOT identifier"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
# Allow 'feature' to be a name literal not just a keyword.
|
||||
def p_name_wrapped(self, p):
|
||||
"""name_wrapped : NAME
|
||||
| FEATURE"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_literal(self, p):
|
||||
"""literal : int
|
||||
| float
|
||||
| TRUE
|
||||
| FALSE
|
||||
| DEFAULT
|
||||
| STRING_LITERAL"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_int(self, p):
|
||||
"""int : int_const
|
||||
| PLUS int_const
|
||||
| MINUS int_const"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
def p_int_const(self, p):
|
||||
"""int_const : INT_CONST_DEC
|
||||
| INT_CONST_HEX"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_float(self, p):
|
||||
"""float : FLOAT_CONST
|
||||
| PLUS FLOAT_CONST
|
||||
| MINUS FLOAT_CONST"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
def p_error(self, e):
|
||||
if e is None:
|
||||
# Unexpected EOF.
|
||||
# TODO(vtl): Can we figure out what's missing?
|
||||
raise ParseError(self.filename, "Unexpected end of file")
|
||||
|
||||
if e.value == 'feature':
|
||||
raise ParseError(self.filename,
|
||||
"`feature` is reserved for a future mojom keyword",
|
||||
lineno=e.lineno,
|
||||
snippet=self._GetSnippet(e.lineno))
|
||||
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Unexpected %r:" % e.value,
|
||||
lineno=e.lineno,
|
||||
snippet=self._GetSnippet(e.lineno))
|
||||
|
||||
def _GetSnippet(self, lineno):
|
||||
return self.source.split('\n')[lineno - 1]
|
||||
|
||||
|
||||
def Parse(source, filename):
|
||||
"""Parse source file to AST.
|
||||
|
||||
Args:
|
||||
source: The source text as a str (Python 2 or 3) or unicode (Python 2).
|
||||
filename: The filename that |source| originates from.
|
||||
|
||||
Returns:
|
||||
The AST as a mojom.parse.ast.Mojom object.
|
||||
"""
|
||||
lexer = Lexer(filename)
|
||||
parser = Parser(lexer, source, filename)
|
||||
|
||||
lex.lex(object=lexer)
|
||||
yacc.yacc(module=parser, debug=0, write_tables=0)
|
||||
|
||||
tree = yacc.parse(source)
|
||||
return tree
|
File diff suppressed because it is too large
Load diff
502
utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py
Executable file
502
utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py
Executable file
|
@ -0,0 +1,502 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Parses mojom IDL files.
|
||||
|
||||
This script parses one or more input mojom files and produces corresponding
|
||||
module files fully describing the definitions contained within each mojom. The
|
||||
module data is pickled and can be easily consumed by other tools to, e.g.,
|
||||
generate usable language bindings.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import builtins
|
||||
import codecs
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
|
||||
from mojom.generate import module
|
||||
from mojom.generate import translate
|
||||
from mojom.parse import parser
|
||||
from mojom.parse import conditional_features
|
||||
|
||||
|
||||
# Disable this for easier debugging.
|
||||
_ENABLE_MULTIPROCESSING = True
|
||||
|
||||
# https://docs.python.org/3/library/multiprocessing.html#:~:text=bpo-33725
|
||||
if __name__ == '__main__' and sys.platform == 'darwin':
|
||||
multiprocessing.set_start_method('fork')
|
||||
_MULTIPROCESSING_USES_FORK = multiprocessing.get_start_method() == 'fork'
|
||||
|
||||
|
||||
def _ResolveRelativeImportPath(path, roots):
|
||||
"""Attempts to resolve a relative import path against a set of possible roots.
|
||||
|
||||
Args:
|
||||
path: The relative import path to resolve.
|
||||
roots: A list of absolute paths which will be checked in descending length
|
||||
order for a match against path.
|
||||
|
||||
Returns:
|
||||
A normalized absolute path combining one of the roots with the input path if
|
||||
and only if such a file exists.
|
||||
|
||||
Raises:
|
||||
ValueError: The path could not be resolved against any of the given roots.
|
||||
"""
|
||||
for root in reversed(sorted(roots, key=len)):
|
||||
abs_path = os.path.join(root, path)
|
||||
if os.path.isfile(abs_path):
|
||||
return os.path.normcase(os.path.normpath(abs_path))
|
||||
|
||||
raise ValueError('"%s" does not exist in any of %s' % (path, roots))
|
||||
|
||||
|
||||
def RebaseAbsolutePath(path, roots):
|
||||
"""Rewrites an absolute file path as relative to an absolute directory path in
|
||||
roots.
|
||||
|
||||
Args:
|
||||
path: The absolute path of an existing file.
|
||||
roots: A list of absolute directory paths. The given path argument must fall
|
||||
within one of these directories.
|
||||
|
||||
Returns:
|
||||
A path equivalent to the input path, but relative to one of the provided
|
||||
roots. If the input path falls within multiple roots, the longest root is
|
||||
chosen (and thus the shortest relative path is returned).
|
||||
|
||||
Paths returned by this method always use forward slashes as a separator to
|
||||
mirror mojom import syntax.
|
||||
|
||||
Raises:
|
||||
ValueError if the given path does not fall within any of the listed roots.
|
||||
"""
|
||||
assert os.path.isabs(path)
|
||||
assert os.path.isfile(path)
|
||||
assert all(map(os.path.isabs, roots))
|
||||
|
||||
sorted_roots = list(reversed(sorted(roots, key=len)))
|
||||
|
||||
def try_rebase_path(path, root):
|
||||
head, rebased_path = os.path.split(path)
|
||||
while head != root:
|
||||
head, tail = os.path.split(head)
|
||||
if not tail:
|
||||
return None
|
||||
rebased_path = os.path.join(tail, rebased_path)
|
||||
return rebased_path
|
||||
|
||||
for root in sorted_roots:
|
||||
relative_path = try_rebase_path(path, root)
|
||||
if relative_path:
|
||||
# TODO(crbug.com/953884): Use pathlib for this kind of thing once we're
|
||||
# fully migrated to Python 3.
|
||||
return relative_path.replace('\\', '/')
|
||||
|
||||
raise ValueError('%s does not fall within any of %s' % (path, sorted_roots))
|
||||
|
||||
|
||||
def _GetModuleFilename(mojom_filename):
|
||||
return mojom_filename + '-module'
|
||||
|
||||
|
||||
def _EnsureInputLoaded(mojom_abspath, module_path, abs_paths, asts,
|
||||
dependencies, loaded_modules, module_metadata):
|
||||
"""Recursively ensures that a module and its dependencies are loaded.
|
||||
|
||||
Args:
|
||||
mojom_abspath: An absolute file path pointing to a mojom file to load.
|
||||
module_path: The relative path used to identify mojom_abspath.
|
||||
abs_paths: A mapping from module paths to absolute file paths for all
|
||||
inputs given to this execution of the script.
|
||||
asts: A map from each input mojom's absolute path to its parsed AST.
|
||||
dependencies: A mapping of which input mojoms depend on each other, indexed
|
||||
by absolute file path.
|
||||
loaded_modules: A mapping of all modules loaded so far, including non-input
|
||||
modules that were pulled in as transitive dependencies of the inputs.
|
||||
module_metadata: Metadata to be attached to every module loaded by this
|
||||
helper.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
On return, loaded_modules will be populated with the loaded input mojom's
|
||||
Module as well as the Modules of all of its transitive dependencies."""
|
||||
|
||||
if mojom_abspath in loaded_modules:
|
||||
# Already done.
|
||||
return
|
||||
|
||||
for dep_abspath, dep_path in sorted(dependencies[mojom_abspath]):
|
||||
if dep_abspath not in loaded_modules:
|
||||
_EnsureInputLoaded(dep_abspath, dep_path, abs_paths, asts, dependencies,
|
||||
loaded_modules, module_metadata)
|
||||
|
||||
imports = {}
|
||||
for imp in asts[mojom_abspath].import_list:
|
||||
path = imp.import_filename
|
||||
imports[path] = loaded_modules[abs_paths[path]]
|
||||
loaded_modules[mojom_abspath] = translate.OrderedModule(
|
||||
asts[mojom_abspath], module_path, imports)
|
||||
loaded_modules[mojom_abspath].metadata = dict(module_metadata)
|
||||
|
||||
|
||||
def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
|
||||
allowed_imports = set()
|
||||
processed_deps = set()
|
||||
|
||||
def collect(metadata_filename):
|
||||
processed_deps.add(metadata_filename)
|
||||
|
||||
# Paths in the metadata file are relative to the metadata file's dir.
|
||||
metadata_dir = os.path.abspath(os.path.dirname(metadata_filename))
|
||||
|
||||
def to_abs(s):
|
||||
return os.path.normpath(os.path.join(metadata_dir, s))
|
||||
|
||||
with open(metadata_filename) as f:
|
||||
metadata = json.load(f)
|
||||
allowed_imports.update(
|
||||
[os.path.normcase(to_abs(s)) for s in metadata['sources']])
|
||||
for dep_metadata in metadata['deps']:
|
||||
dep_metadata = to_abs(dep_metadata)
|
||||
if dep_metadata not in processed_deps:
|
||||
collect(dep_metadata)
|
||||
|
||||
collect(build_metadata_filename)
|
||||
return allowed_imports
|
||||
|
||||
|
||||
# multiprocessing helper.
|
||||
def _ParseAstHelper(mojom_abspath, enabled_features):
|
||||
with codecs.open(mojom_abspath, encoding='utf-8') as f:
|
||||
ast = parser.Parse(f.read(), mojom_abspath)
|
||||
conditional_features.RemoveDisabledDefinitions(ast, enabled_features)
|
||||
return mojom_abspath, ast
|
||||
|
||||
|
||||
# multiprocessing helper.
|
||||
def _SerializeHelper(mojom_abspath, mojom_path):
|
||||
module_path = os.path.join(_SerializeHelper.output_root_path,
|
||||
_GetModuleFilename(mojom_path))
|
||||
module_dir = os.path.dirname(module_path)
|
||||
if not os.path.exists(module_dir):
|
||||
try:
|
||||
# Python 2 doesn't support exist_ok on makedirs(), so we just ignore
|
||||
# that failure if it happens. It's possible during build due to races
|
||||
# among build steps with module outputs in the same directory.
|
||||
os.makedirs(module_dir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
with open(module_path, 'wb') as f:
|
||||
_SerializeHelper.loaded_modules[mojom_abspath].Dump(f)
|
||||
|
||||
|
||||
class _ExceptionWrapper:
|
||||
def __init__(self):
|
||||
# Do not capture exception object to ensure pickling works.
|
||||
self.formatted_trace = traceback.format_exc()
|
||||
|
||||
|
||||
class _FuncWrapper:
|
||||
"""Marshals exceptions and spreads args."""
|
||||
|
||||
def __init__(self, func):
|
||||
self._func = func
|
||||
|
||||
def __call__(self, args):
|
||||
# multiprocessing does not gracefully handle excptions.
|
||||
# https://crbug.com/1219044
|
||||
try:
|
||||
return self._func(*args)
|
||||
except: # pylint: disable=bare-except
|
||||
return _ExceptionWrapper()
|
||||
|
||||
|
||||
def _Shard(target_func, arg_list, processes=None):
|
||||
arg_list = list(arg_list)
|
||||
if processes is None:
|
||||
processes = multiprocessing.cpu_count()
|
||||
# Seems optimal to have each process perform at least 2 tasks.
|
||||
processes = min(processes, len(arg_list) // 2)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# TODO(crbug.com/1190269) - we can't use more than 56
|
||||
# cores on Windows or Python3 may hang.
|
||||
processes = min(processes, 56)
|
||||
|
||||
# Don't spin up processes unless there is enough work to merit doing so.
|
||||
if not _ENABLE_MULTIPROCESSING or processes < 2:
|
||||
for arg_tuple in arg_list:
|
||||
yield target_func(*arg_tuple)
|
||||
return
|
||||
|
||||
pool = multiprocessing.Pool(processes=processes)
|
||||
try:
|
||||
wrapped_func = _FuncWrapper(target_func)
|
||||
for result in pool.imap_unordered(wrapped_func, arg_list):
|
||||
if isinstance(result, _ExceptionWrapper):
|
||||
sys.stderr.write(result.formatted_trace)
|
||||
sys.exit(1)
|
||||
yield result
|
||||
finally:
|
||||
pool.close()
|
||||
pool.join() # Needed on Windows to avoid WindowsError during terminate.
|
||||
pool.terminate()
|
||||
|
||||
|
||||
def _ParseMojoms(mojom_files,
|
||||
input_root_paths,
|
||||
output_root_path,
|
||||
module_root_paths,
|
||||
enabled_features,
|
||||
module_metadata,
|
||||
allowed_imports=None):
|
||||
"""Parses a set of mojom files and produces serialized module outputs.
|
||||
|
||||
Args:
|
||||
mojom_files: A list of mojom files to process. Paths must be absolute paths
|
||||
which fall within one of the input or output root paths.
|
||||
input_root_paths: A list of absolute filesystem paths which may be used to
|
||||
resolve relative mojom file paths.
|
||||
output_root_path: An absolute filesystem path which will service as the root
|
||||
for all emitted artifacts. Artifacts produced from a given mojom file
|
||||
are based on the mojom's relative path, rebased onto this path.
|
||||
Additionally, the script expects this root to contain already-generated
|
||||
modules for any transitive dependencies not listed in mojom_files.
|
||||
module_root_paths: A list of absolute filesystem paths which contain
|
||||
already-generated modules for any non-transitive dependencies.
|
||||
enabled_features: A list of enabled feature names, controlling which AST
|
||||
nodes are filtered by [EnableIf] or [EnableIfNot] attributes.
|
||||
module_metadata: A list of 2-tuples representing metadata key-value pairs to
|
||||
attach to each compiled module output.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
Upon completion, a mojom-module file will be saved for each input mojom.
|
||||
"""
|
||||
assert input_root_paths
|
||||
assert output_root_path
|
||||
|
||||
loaded_mojom_asts = {}
|
||||
loaded_modules = {}
|
||||
input_dependencies = defaultdict(set)
|
||||
mojom_files_to_parse = dict((os.path.normcase(abs_path),
|
||||
RebaseAbsolutePath(abs_path, input_root_paths))
|
||||
for abs_path in mojom_files)
|
||||
abs_paths = dict(
|
||||
(path, abs_path) for abs_path, path in mojom_files_to_parse.items())
|
||||
|
||||
logging.info('Parsing %d .mojom into ASTs', len(mojom_files_to_parse))
|
||||
map_args = ((mojom_abspath, enabled_features)
|
||||
for mojom_abspath in mojom_files_to_parse)
|
||||
for mojom_abspath, ast in _Shard(_ParseAstHelper, map_args):
|
||||
loaded_mojom_asts[mojom_abspath] = ast
|
||||
|
||||
logging.info('Processing dependencies')
|
||||
for mojom_abspath, ast in sorted(loaded_mojom_asts.items()):
|
||||
invalid_imports = []
|
||||
for imp in ast.import_list:
|
||||
import_abspath = _ResolveRelativeImportPath(imp.import_filename,
|
||||
input_root_paths)
|
||||
if allowed_imports and import_abspath not in allowed_imports:
|
||||
invalid_imports.append(imp.import_filename)
|
||||
|
||||
abs_paths[imp.import_filename] = import_abspath
|
||||
if import_abspath in mojom_files_to_parse:
|
||||
# This import is in the input list, so we're going to translate it
|
||||
# into a module below; however it's also a dependency of another input
|
||||
# module. We retain record of dependencies to help with input
|
||||
# processing later.
|
||||
input_dependencies[mojom_abspath].add(
|
||||
(import_abspath, imp.import_filename))
|
||||
elif import_abspath not in loaded_modules:
|
||||
# We have an import that isn't being parsed right now. It must already
|
||||
# be parsed and have a module file sitting in a corresponding output
|
||||
# location.
|
||||
module_path = _GetModuleFilename(imp.import_filename)
|
||||
module_abspath = _ResolveRelativeImportPath(
|
||||
module_path, module_root_paths + [output_root_path])
|
||||
with open(module_abspath, 'rb') as module_file:
|
||||
loaded_modules[import_abspath] = module.Module.Load(module_file)
|
||||
|
||||
if invalid_imports:
|
||||
raise ValueError(
|
||||
'\nThe file %s imports the following files not allowed by build '
|
||||
'dependencies:\n\n%s\n' % (mojom_abspath, '\n'.join(invalid_imports)))
|
||||
logging.info('Loaded %d modules from dependencies', len(loaded_modules))
|
||||
|
||||
# At this point all transitive imports not listed as inputs have been loaded
|
||||
# and we have a complete dependency tree of the unprocessed inputs. Now we can
|
||||
# load all the inputs, resolving dependencies among them recursively as we go.
|
||||
logging.info('Ensuring inputs are loaded')
|
||||
num_existing_modules_loaded = len(loaded_modules)
|
||||
for mojom_abspath, mojom_path in mojom_files_to_parse.items():
|
||||
_EnsureInputLoaded(mojom_abspath, mojom_path, abs_paths, loaded_mojom_asts,
|
||||
input_dependencies, loaded_modules, module_metadata)
|
||||
assert (num_existing_modules_loaded +
|
||||
len(mojom_files_to_parse) == len(loaded_modules))
|
||||
|
||||
# Now we have fully translated modules for every input and every transitive
|
||||
# dependency. We can dump the modules to disk for other tools to use.
|
||||
logging.info('Serializing %d modules', len(mojom_files_to_parse))
|
||||
|
||||
# Windows does not use fork() for multiprocessing, so we'd need to pass
|
||||
# loaded_module via IPC rather than via globals. Doing so is slower than not
|
||||
# using multiprocessing.
|
||||
_SerializeHelper.loaded_modules = loaded_modules
|
||||
_SerializeHelper.output_root_path = output_root_path
|
||||
# Doesn't seem to help past 4. Perhaps IO bound here?
|
||||
processes = 4 if _MULTIPROCESSING_USES_FORK else 0
|
||||
map_args = mojom_files_to_parse.items()
|
||||
for _ in _Shard(_SerializeHelper, map_args, processes=processes):
|
||||
pass
|
||||
|
||||
|
||||
def Run(command_line):
|
||||
debug_logging = os.environ.get('MOJOM_PARSER_DEBUG', '0') != '0'
|
||||
logging.basicConfig(level=logging.DEBUG if debug_logging else logging.WARNING,
|
||||
format='%(levelname).1s %(relativeCreated)6d %(message)s')
|
||||
logging.info('Started (%s)', os.path.basename(sys.argv[0]))
|
||||
|
||||
arg_parser = argparse.ArgumentParser(
|
||||
description="""
|
||||
Parses one or more mojom files and produces corresponding module outputs fully
|
||||
describing the definitions therein. The output is exhaustive, stable, and
|
||||
sufficient for another tool to consume and emit e.g. usable language
|
||||
bindings based on the original mojoms.""",
|
||||
epilog="""
|
||||
Note that each transitive import dependency reachable from the input mojoms must
|
||||
either also be listed as an input or must have its corresponding compiled module
|
||||
already present in the provided output root.""")
|
||||
|
||||
arg_parser.add_argument(
|
||||
'--input-root',
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='ROOT',
|
||||
dest='input_root_paths',
|
||||
help='Adds ROOT to the set of root paths against which relative input '
|
||||
'paths should be resolved. Provided root paths are always searched '
|
||||
'in order from longest absolute path to shortest.')
|
||||
arg_parser.add_argument(
|
||||
'--output-root',
|
||||
action='store',
|
||||
required=True,
|
||||
dest='output_root_path',
|
||||
metavar='ROOT',
|
||||
help='Use ROOT as the root path in which the parser should emit compiled '
|
||||
'modules for each processed input mojom. The path of emitted module is '
|
||||
'based on the relative input path, rebased onto this root. Note that '
|
||||
'ROOT is also searched for existing modules of any transitive imports '
|
||||
'which were not included in the set of inputs.')
|
||||
arg_parser.add_argument(
|
||||
'--module-root',
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='ROOT',
|
||||
dest='module_root_paths',
|
||||
help='Adds ROOT to the set of root paths to search for existing modules '
|
||||
'of non-transitive imports. Provided root paths are always searched in '
|
||||
'order from longest absolute path to shortest.')
|
||||
arg_parser.add_argument(
|
||||
'--mojoms',
|
||||
nargs='+',
|
||||
dest='mojom_files',
|
||||
default=[],
|
||||
metavar='MOJOM_FILE',
|
||||
help='Input mojom filename(s). Each filename must be either an absolute '
|
||||
'path which falls within one of the given input or output roots, or a '
|
||||
'relative path the parser will attempt to resolve using each of those '
|
||||
'roots in unspecified order.')
|
||||
arg_parser.add_argument(
|
||||
'--mojom-file-list',
|
||||
action='store',
|
||||
metavar='LIST_FILENAME',
|
||||
help='Input file whose contents are a list of mojoms to process. This '
|
||||
'may be provided in lieu of --mojoms to avoid hitting command line '
|
||||
'length limtations')
|
||||
arg_parser.add_argument(
|
||||
'--enable-feature',
|
||||
dest='enabled_features',
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='FEATURE',
|
||||
help='Enables a named feature when parsing the given mojoms. Features '
|
||||
'are identified by arbitrary string values. Specifying this flag with a '
|
||||
'given FEATURE name will cause the parser to process any syntax elements '
|
||||
'tagged with an [EnableIf=FEATURE] or [EnableIfNot] attribute. If this '
|
||||
'flag is not provided for a given FEATURE, such tagged elements are '
|
||||
'discarded by the parser and will not be present in the compiled output.')
|
||||
arg_parser.add_argument(
|
||||
'--check-imports',
|
||||
dest='build_metadata_filename',
|
||||
action='store',
|
||||
metavar='METADATA_FILENAME',
|
||||
help='Instructs the parser to check imports against a set of allowed '
|
||||
'imports. Allowed imports are based on build metadata within '
|
||||
'METADATA_FILENAME. This is a JSON file with a `sources` key listing '
|
||||
'paths to the set of input mojom files being processed by this parser '
|
||||
'run, and a `deps` key listing paths to metadata files for any '
|
||||
'dependencies of these inputs. This feature can be used to implement '
|
||||
'build-time dependency checking for mojom imports, where each build '
|
||||
'metadata file corresponds to a build target in the dependency graph of '
|
||||
'a typical build system.')
|
||||
arg_parser.add_argument(
|
||||
'--add-module-metadata',
|
||||
dest='module_metadata',
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='KEY=VALUE',
|
||||
help='Adds a metadata key-value pair to the output module. This can be '
|
||||
'used by build toolchains to augment parsed mojom modules with product-'
|
||||
'specific metadata for later extraction and use by custom bindings '
|
||||
'generators.')
|
||||
|
||||
args, _ = arg_parser.parse_known_args(command_line)
|
||||
if args.mojom_file_list:
|
||||
with open(args.mojom_file_list) as f:
|
||||
args.mojom_files.extend(f.read().split())
|
||||
|
||||
if not args.mojom_files:
|
||||
raise ValueError(
|
||||
'Must list at least one mojom file via --mojoms or --mojom-file-list')
|
||||
|
||||
mojom_files = list(map(os.path.abspath, args.mojom_files))
|
||||
input_roots = list(map(os.path.abspath, args.input_root_paths))
|
||||
output_root = os.path.abspath(args.output_root_path)
|
||||
module_roots = list(map(os.path.abspath, args.module_root_paths))
|
||||
|
||||
if args.build_metadata_filename:
|
||||
allowed_imports = _CollectAllowedImportsFromBuildMetadata(
|
||||
args.build_metadata_filename)
|
||||
else:
|
||||
allowed_imports = None
|
||||
|
||||
module_metadata = list(
|
||||
map(lambda kvp: tuple(kvp.split('=')), args.module_metadata))
|
||||
_ParseMojoms(mojom_files, input_roots, output_root, module_roots,
|
||||
args.enabled_features, module_metadata, allowed_imports)
|
||||
logging.info('Finished')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
Run(sys.argv[1:])
|
||||
# Exit without running GC, which can save multiple seconds due to the large
|
||||
# number of object created. But flush is necessary as os._exit doesn't do
|
||||
# that.
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
os._exit(0)
|
|
@ -0,0 +1,73 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import mojom_parser
|
||||
|
||||
from mojom.generate import module
|
||||
|
||||
|
||||
class MojomParserTestCase(unittest.TestCase):
|
||||
"""Tests covering the behavior defined by the main mojom_parser.py script.
|
||||
This includes behavior around input and output path manipulation, dependency
|
||||
resolution, and module serialization and deserialization."""
|
||||
|
||||
def __init__(self, method_name):
|
||||
super().__init__(method_name)
|
||||
self._temp_dir = None
|
||||
|
||||
def setUp(self):
|
||||
self._temp_dir = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._temp_dir)
|
||||
self._temp_dir = None
|
||||
|
||||
def GetPath(self, path):
|
||||
assert not os.path.isabs(path)
|
||||
return os.path.join(self._temp_dir, path)
|
||||
|
||||
def GetModulePath(self, path):
|
||||
assert not os.path.isabs(path)
|
||||
return os.path.join(self.GetPath('out'), path) + '-module'
|
||||
|
||||
def WriteFile(self, path, contents):
|
||||
full_path = self.GetPath(path)
|
||||
dirname = os.path.dirname(full_path)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
with open(full_path, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
def LoadModule(self, mojom_path):
|
||||
with open(self.GetModulePath(mojom_path), 'rb') as f:
|
||||
return module.Module.Load(f)
|
||||
|
||||
def ParseMojoms(self, mojoms, metadata=None):
|
||||
"""Parse all input mojoms relative the temp dir."""
|
||||
out_dir = self.GetPath('out')
|
||||
args = [
|
||||
'--input-root', self._temp_dir, '--input-root', out_dir,
|
||||
'--output-root', out_dir, '--mojoms'
|
||||
] + list(map(lambda mojom: os.path.join(self._temp_dir, mojom), mojoms))
|
||||
if metadata:
|
||||
args.extend(['--check-imports', self.GetPath(metadata)])
|
||||
mojom_parser.Run(args)
|
||||
|
||||
def ExtractTypes(self, mojom):
|
||||
filename = 'test.mojom'
|
||||
self.WriteFile(filename, mojom)
|
||||
self.ParseMojoms([filename])
|
||||
m = self.LoadModule(filename)
|
||||
definitions = {}
|
||||
for kinds in (m.enums, m.structs, m.unions, m.interfaces, m.features):
|
||||
for kind in kinds:
|
||||
definitions[kind.mojom_name] = kind
|
||||
return definitions
|
|
@ -0,0 +1,186 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import json
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class MojomParserTest(MojomParserTestCase):
|
||||
"""Tests covering the behavior defined by the main mojom_parser.py script.
|
||||
This includes behavior around input and output path manipulation, dependency
|
||||
resolution, and module serialization and deserialization."""
|
||||
|
||||
def testBasicParse(self):
|
||||
"""Basic test to verify that we can parse a mojom file and get a module."""
|
||||
mojom = 'foo/bar.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
module test;
|
||||
enum TestEnum { kFoo };
|
||||
""")
|
||||
self.ParseMojoms([mojom])
|
||||
|
||||
m = self.LoadModule(mojom)
|
||||
self.assertEqual('foo/bar.mojom', m.path)
|
||||
self.assertEqual('test', m.mojom_namespace)
|
||||
self.assertEqual(1, len(m.enums))
|
||||
|
||||
def testBasicParseWithAbsolutePaths(self):
|
||||
"""Verifies that we can parse a mojom file given an absolute path input."""
|
||||
mojom = 'foo/bar.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
module test;
|
||||
enum TestEnum { kFoo };
|
||||
""")
|
||||
self.ParseMojoms([self.GetPath(mojom)])
|
||||
|
||||
m = self.LoadModule(mojom)
|
||||
self.assertEqual('foo/bar.mojom', m.path)
|
||||
self.assertEqual('test', m.mojom_namespace)
|
||||
self.assertEqual(1, len(m.enums))
|
||||
|
||||
def testImport(self):
|
||||
"""Verify imports within the same set of mojom inputs."""
|
||||
a = 'a.mojom'
|
||||
b = 'b.mojom'
|
||||
self.WriteFile(
|
||||
a, """\
|
||||
module a;
|
||||
import "b.mojom";
|
||||
struct Foo { b.Bar bar; };""")
|
||||
self.WriteFile(b, """\
|
||||
module b;
|
||||
struct Bar {};""")
|
||||
self.ParseMojoms([a, b])
|
||||
|
||||
ma = self.LoadModule(a)
|
||||
mb = self.LoadModule(b)
|
||||
self.assertEqual('a.mojom', ma.path)
|
||||
self.assertEqual('b.mojom', mb.path)
|
||||
self.assertEqual(1, len(ma.imports))
|
||||
self.assertEqual(mb, ma.imports[0])
|
||||
|
||||
def testPreProcessedImport(self):
|
||||
"""Verify imports processed by a previous parser execution can be loaded
|
||||
properly when parsing a dependent mojom."""
|
||||
a = 'a.mojom'
|
||||
self.WriteFile(a, """\
|
||||
module a;
|
||||
struct Bar {};""")
|
||||
self.ParseMojoms([a])
|
||||
|
||||
b = 'b.mojom'
|
||||
self.WriteFile(
|
||||
b, """\
|
||||
module b;
|
||||
import "a.mojom";
|
||||
struct Foo { a.Bar bar; };""")
|
||||
self.ParseMojoms([b])
|
||||
|
||||
def testMissingImport(self):
|
||||
"""Verify that an import fails if the imported mojom does not exist."""
|
||||
a = 'a.mojom'
|
||||
self.WriteFile(
|
||||
a, """\
|
||||
module a;
|
||||
import "non-existent.mojom";
|
||||
struct Bar {};""")
|
||||
with self.assertRaisesRegexp(ValueError, "does not exist"):
|
||||
self.ParseMojoms([a])
|
||||
|
||||
def testUnparsedImport(self):
|
||||
"""Verify that an import fails if the imported mojom is not in the set of
|
||||
mojoms provided to the parser on this execution AND there is no pre-existing
|
||||
parsed output module already on disk for it."""
|
||||
a = 'a.mojom'
|
||||
b = 'b.mojom'
|
||||
self.WriteFile(a, """\
|
||||
module a;
|
||||
struct Bar {};""")
|
||||
self.WriteFile(
|
||||
b, """\
|
||||
module b;
|
||||
import "a.mojom";
|
||||
struct Foo { a.Bar bar; };""")
|
||||
|
||||
# a.mojom has not been parsed yet, so its import will fail when processing
|
||||
# b.mojom here.
|
||||
with self.assertRaisesRegexp(ValueError, "does not exist"):
|
||||
self.ParseMojoms([b])
|
||||
|
||||
def testCheckImportsBasic(self):
|
||||
"""Verify that the parser can handle --check-imports with a valid set of
|
||||
inputs, including support for transitive dependency resolution."""
|
||||
a = 'a.mojom'
|
||||
a_metadata = 'out/a.build_metadata'
|
||||
b = 'b.mojom'
|
||||
b_metadata = 'out/b.build_metadata'
|
||||
c = 'c.mojom'
|
||||
c_metadata = 'out/c.build_metadata'
|
||||
self.WriteFile(a_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(a)],
|
||||
"deps": []
|
||||
}))
|
||||
self.WriteFile(
|
||||
b_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(b)],
|
||||
"deps": [self.GetPath(a_metadata)]
|
||||
}))
|
||||
self.WriteFile(
|
||||
c_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(c)],
|
||||
"deps": [self.GetPath(b_metadata)]
|
||||
}))
|
||||
self.WriteFile(a, """\
|
||||
module a;
|
||||
struct Bar {};""")
|
||||
self.WriteFile(
|
||||
b, """\
|
||||
module b;
|
||||
import "a.mojom";
|
||||
struct Foo { a.Bar bar; };""")
|
||||
self.WriteFile(
|
||||
c, """\
|
||||
module c;
|
||||
import "a.mojom";
|
||||
import "b.mojom";
|
||||
struct Baz { b.Foo foo; };""")
|
||||
self.ParseMojoms([a], metadata=a_metadata)
|
||||
self.ParseMojoms([b], metadata=b_metadata)
|
||||
self.ParseMojoms([c], metadata=c_metadata)
|
||||
|
||||
def testCheckImportsMissing(self):
|
||||
"""Verify that the parser rejects valid input mojoms when imports don't
|
||||
agree with build metadata given via --check-imports."""
|
||||
a = 'a.mojom'
|
||||
a_metadata = 'out/a.build_metadata'
|
||||
b = 'b.mojom'
|
||||
b_metadata = 'out/b.build_metadata'
|
||||
self.WriteFile(a_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(a)],
|
||||
"deps": []
|
||||
}))
|
||||
self.WriteFile(b_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(b)],
|
||||
"deps": []
|
||||
}))
|
||||
self.WriteFile(a, """\
|
||||
module a;
|
||||
struct Bar {};""")
|
||||
self.WriteFile(
|
||||
b, """\
|
||||
module b;
|
||||
import "a.mojom";
|
||||
struct Foo { a.Bar bar; };""")
|
||||
|
||||
self.ParseMojoms([a], metadata=a_metadata)
|
||||
with self.assertRaisesRegexp(ValueError, "not allowed by build"):
|
||||
self.ParseMojoms([b], metadata=b_metadata)
|
|
@ -0,0 +1,127 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
from mojom.generate import module
|
||||
|
||||
|
||||
class StableAttributeTest(MojomParserTestCase):
|
||||
"""Tests covering usage of the [Stable] attribute."""
|
||||
|
||||
def testStableAttributeTagging(self):
|
||||
"""Verify that we recognize the [Stable] attribute on relevant definitions
|
||||
and the resulting parser outputs are tagged accordingly."""
|
||||
mojom = 'test.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
[Stable] enum TestEnum { kFoo };
|
||||
enum UnstableEnum { kBar };
|
||||
[Stable] struct TestStruct { TestEnum a; };
|
||||
struct UnstableStruct { UnstableEnum a; };
|
||||
[Stable] union TestUnion { TestEnum a; TestStruct b; };
|
||||
union UnstableUnion { UnstableEnum a; UnstableStruct b; };
|
||||
[Stable] interface TestInterface { Foo@0(TestUnion x) => (); };
|
||||
interface UnstableInterface { Foo(UnstableUnion x) => (); };
|
||||
""")
|
||||
self.ParseMojoms([mojom])
|
||||
|
||||
m = self.LoadModule(mojom)
|
||||
self.assertEqual(2, len(m.enums))
|
||||
self.assertTrue(m.enums[0].stable)
|
||||
self.assertFalse(m.enums[1].stable)
|
||||
self.assertEqual(2, len(m.structs))
|
||||
self.assertTrue(m.structs[0].stable)
|
||||
self.assertFalse(m.structs[1].stable)
|
||||
self.assertEqual(2, len(m.unions))
|
||||
self.assertTrue(m.unions[0].stable)
|
||||
self.assertFalse(m.unions[1].stable)
|
||||
self.assertEqual(2, len(m.interfaces))
|
||||
self.assertTrue(m.interfaces[0].stable)
|
||||
self.assertFalse(m.interfaces[1].stable)
|
||||
|
||||
def testStableStruct(self):
|
||||
"""A [Stable] struct is valid if all its fields are also stable."""
|
||||
self.ExtractTypes('[Stable] struct S {};')
|
||||
self.ExtractTypes('[Stable] struct S { int32 x; bool b; };')
|
||||
self.ExtractTypes('[Stable] enum E { A }; [Stable] struct S { E e; };')
|
||||
self.ExtractTypes('[Stable] struct S {}; [Stable] struct T { S s; };')
|
||||
self.ExtractTypes(
|
||||
'[Stable] struct S {}; [Stable] struct T { array<S> ss; };')
|
||||
self.ExtractTypes(
|
||||
'[Stable] interface F {}; [Stable] struct T { pending_remote<F> f; };')
|
||||
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||
self.ExtractTypes('enum E { A }; [Stable] struct S { E e; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on X'):
|
||||
self.ExtractTypes('struct X {}; [Stable] struct S { X x; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] struct S { array<T> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] struct S { map<int32, T> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] struct S { map<T, int32> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||
self.ExtractTypes(
|
||||
'interface F {}; [Stable] struct S { pending_remote<F> f; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||
self.ExtractTypes(
|
||||
'interface F {}; [Stable] struct S { pending_receiver<F> f; };')
|
||||
|
||||
def testStableUnion(self):
|
||||
"""A [Stable] union is valid if all its fields' types are also stable."""
|
||||
self.ExtractTypes('[Stable] union U {};')
|
||||
self.ExtractTypes('[Stable] union U { int32 x; bool b; };')
|
||||
self.ExtractTypes('[Stable] enum E { A }; [Stable] union U { E e; };')
|
||||
self.ExtractTypes('[Stable] struct S {}; [Stable] union U { S s; };')
|
||||
self.ExtractTypes(
|
||||
'[Stable] struct S {}; [Stable] union U { array<S> ss; };')
|
||||
self.ExtractTypes(
|
||||
'[Stable] interface F {}; [Stable] union U { pending_remote<F> f; };')
|
||||
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||
self.ExtractTypes('enum E { A }; [Stable] union U { E e; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on X'):
|
||||
self.ExtractTypes('struct X {}; [Stable] union U { X x; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] union U { array<T> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] union U { map<int32, T> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] union U { map<T, int32> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||
self.ExtractTypes(
|
||||
'interface F {}; [Stable] union U { pending_remote<F> f; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||
self.ExtractTypes(
|
||||
'interface F {}; [Stable] union U { pending_receiver<F> f; };')
|
||||
|
||||
def testStableInterface(self):
|
||||
"""A [Stable] interface is valid if all its methods' parameter types are
|
||||
stable, including response parameters where applicable."""
|
||||
self.ExtractTypes('[Stable] interface F {};')
|
||||
self.ExtractTypes('[Stable] interface F { A@0(int32 x); };')
|
||||
self.ExtractTypes('[Stable] interface F { A@0(int32 x) => (bool b); };')
|
||||
self.ExtractTypes("""\
|
||||
[Stable] enum E { A, B, C };
|
||||
[Stable] struct S {};
|
||||
[Stable] interface F { A@0(E e, S s) => (bool b, array<S> s); };
|
||||
""")
|
||||
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||
self.ExtractTypes(
|
||||
'enum E { A, B, C }; [Stable] interface F { A@0(E e); };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||
self.ExtractTypes(
|
||||
'enum E { A, B, C }; [Stable] interface F { A@0(int32 x) => (E e); };'
|
||||
)
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on S'):
|
||||
self.ExtractTypes(
|
||||
'struct S {}; [Stable] interface F { A@0(int32 x) => (S s); };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on S'):
|
||||
self.ExtractTypes(
|
||||
'struct S {}; [Stable] interface F { A@0(S s) => (bool b); };')
|
||||
|
||||
with self.assertRaisesRegexp(Exception, 'explicit method ordinals'):
|
||||
self.ExtractTypes('[Stable] interface F { A() => (); };')
|
44
utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py
Normal file
44
utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class UnionTest(MojomParserTestCase):
|
||||
"""Tests union parsing behavior."""
|
||||
|
||||
def testExtensibleMustHaveDefault(self):
|
||||
"""Verifies that extensible unions must have a default field."""
|
||||
mojom = 'foo.mojom'
|
||||
self.WriteFile(mojom, 'module foo; [Extensible] union U { bool x; };')
|
||||
with self.assertRaisesRegexp(Exception, 'must specify a \[Default\]'):
|
||||
self.ParseMojoms([mojom])
|
||||
|
||||
def testExtensibleSingleDefault(self):
|
||||
"""Verifies that extensible unions must not have multiple default fields."""
|
||||
mojom = 'foo.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
module foo;
|
||||
[Extensible] union U {
|
||||
[Default] bool x;
|
||||
[Default] bool y;
|
||||
};
|
||||
""")
|
||||
with self.assertRaisesRegexp(Exception, 'Multiple \[Default\] fields'):
|
||||
self.ParseMojoms([mojom])
|
||||
|
||||
def testExtensibleDefaultTypeValid(self):
|
||||
"""Verifies that an extensible union's default field must be nullable or
|
||||
integral type."""
|
||||
mojom = 'foo.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
module foo;
|
||||
[Extensible] union U {
|
||||
[Default] handle<message_pipe> p;
|
||||
};
|
||||
""")
|
||||
with self.assertRaisesRegexp(Exception, 'must be nullable or integral'):
|
||||
self.ParseMojoms([mojom])
|
|
@ -0,0 +1,458 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom.generate import module
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class VersionCompatibilityTest(MojomParserTestCase):
|
||||
"""Tests covering compatibility between two versions of the same mojom type
|
||||
definition. This coverage ensures that we can reliably detect unsafe changes
|
||||
to definitions that are expected to tolerate version skew in production
|
||||
environments."""
|
||||
|
||||
def _GetTypeCompatibilityMap(self, old_mojom, new_mojom):
|
||||
"""Helper to support the implementation of assertBackwardCompatible and
|
||||
assertNotBackwardCompatible."""
|
||||
|
||||
old = self.ExtractTypes(old_mojom)
|
||||
new = self.ExtractTypes(new_mojom)
|
||||
self.assertEqual(set(old.keys()), set(new.keys()),
|
||||
'Old and new test mojoms should use the same type names.')
|
||||
|
||||
checker = module.BackwardCompatibilityChecker()
|
||||
compatibility_map = {}
|
||||
for name in old:
|
||||
try:
|
||||
compatibility_map[name] = checker.IsBackwardCompatible(
|
||||
new[name], old[name])
|
||||
except Exception:
|
||||
compatibility_map[name] = False
|
||||
return compatibility_map
|
||||
|
||||
def assertBackwardCompatible(self, old_mojom, new_mojom):
|
||||
compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
|
||||
for name, compatible in compatibility_map.items():
|
||||
if not compatible:
|
||||
raise AssertionError(
|
||||
'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n'
|
||||
'The new definition of %s should pass a backward-compatibiity '
|
||||
'check, but it does not.' % (old_mojom, new_mojom, name))
|
||||
|
||||
def assertNotBackwardCompatible(self, old_mojom, new_mojom):
|
||||
compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
|
||||
if all(compatibility_map.values()):
|
||||
raise AssertionError(
|
||||
'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n'
|
||||
'The new mojom should fail a backward-compatibility check, but it '
|
||||
'does not.' % (old_mojom, new_mojom))
|
||||
|
||||
def testNewNonExtensibleEnumValue(self):
|
||||
"""Adding a value to a non-extensible enum breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
|
||||
'enum E { kFoo, kBar, kBaz };')
|
||||
|
||||
def testNewNonExtensibleEnumValueWithMinVersion(self):
|
||||
"""Adding a value to a non-extensible enum breaks backward-compatibility,
|
||||
even with a new [MinVersion] specified for the value."""
|
||||
self.assertNotBackwardCompatible(
|
||||
'enum E { kFoo, kBar };', 'enum E { kFoo, kBar, [MinVersion=1] kBaz };')
|
||||
|
||||
def testNewValueInExistingVersion(self):
|
||||
"""Adding a value to an existing version is not allowed, even if the old
|
||||
enum was marked [Extensible]. Note that it is irrelevant whether or not the
|
||||
new enum is marked [Extensible]."""
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kFoo, kBar };',
|
||||
'enum E { kFoo, kBar, kBaz };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kFoo, kBar };',
|
||||
'[Extensible] enum E { [Default] kFoo, kBar, kBaz };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kFoo, [MinVersion=1] kBar };',
|
||||
'enum E { kFoo, [MinVersion=1] kBar, [MinVersion=1] kBaz };')
|
||||
|
||||
def testEnumValueRemoval(self):
|
||||
"""Removal of an enum value is never valid even for [Extensible] enums."""
|
||||
self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
|
||||
'enum E { kFoo };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kFoo, kBar };',
|
||||
'[Extensible] enum E { [Default] kFoo };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
|
||||
'[Extensible] enum E { [Default] kA, };')
|
||||
self.assertNotBackwardCompatible(
|
||||
"""[Extensible] enum E {
|
||||
[Default] kA,
|
||||
[MinVersion=1] kB,
|
||||
[MinVersion=1] kZ };""",
|
||||
'[Extensible] enum E { [Default] kA, [MinVersion=1] kB };')
|
||||
|
||||
def testNewExtensibleEnumValueWithMinVersion(self):
|
||||
"""Adding a new and properly [MinVersion]'d value to an [Extensible] enum
|
||||
is a backward-compatible change. Note that it is irrelevant whether or not
|
||||
the new enum is marked [Extensible]."""
|
||||
self.assertBackwardCompatible('[Extensible] enum E { [Default] kA, kB };',
|
||||
'enum E { kA, kB, [MinVersion=1] kC };')
|
||||
self.assertBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA, kB };',
|
||||
'[Extensible] enum E { [Default] kA, kB, [MinVersion=1] kC };')
|
||||
self.assertBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
|
||||
"""[Extensible] enum E {
|
||||
[Default] kA,
|
||||
[MinVersion=1] kB,
|
||||
[MinVersion=2] kC };""")
|
||||
|
||||
def testRenameEnumValue(self):
|
||||
"""Renaming an enum value does not affect backward-compatibility. Only
|
||||
numeric value is relevant."""
|
||||
self.assertBackwardCompatible('enum E { kA, kB };', 'enum E { kX, kY };')
|
||||
|
||||
def testAddEnumValueAlias(self):
|
||||
"""Adding new enum fields does not affect backward-compatibility if it does
|
||||
not introduce any new numeric values."""
|
||||
self.assertBackwardCompatible(
|
||||
'enum E { kA, kB };', 'enum E { kA, kB, kC = kA, kD = 1, kE = kD };')
|
||||
|
||||
def testEnumIdentity(self):
|
||||
"""An unchanged enum is obviously backward-compatible."""
|
||||
self.assertBackwardCompatible('enum E { kA, kB, kC };',
|
||||
'enum E { kA, kB, kC };')
|
||||
|
||||
def testNewStructFieldUnversioned(self):
|
||||
"""Adding a new field to a struct without a new (i.e. higher than any
|
||||
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||
'struct S { string a; string b; };')
|
||||
|
||||
def testStructFieldRemoval(self):
|
||||
"""Removing a field from a struct breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; string b; };',
|
||||
'struct S { string a; };')
|
||||
|
||||
def testStructFieldTypeChange(self):
|
||||
"""Changing the type of an existing field always breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||
'struct S { array<int32> a; };')
|
||||
|
||||
def testStructFieldBecomingOptional(self):
|
||||
"""Changing a field from non-optional to optional breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||
'struct S { string? a; };')
|
||||
|
||||
def testStructFieldBecomingNonOptional(self):
|
||||
"""Changing a field from optional to non-optional breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string? a; };',
|
||||
'struct S { string a; };')
|
||||
|
||||
def testStructFieldOrderChange(self):
|
||||
"""Changing the order of fields breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; bool b; };',
|
||||
'struct S { bool b; string a; };')
|
||||
self.assertNotBackwardCompatible('struct S { string a@0; bool b@1; };',
|
||||
'struct S { string a@1; bool b@0; };')
|
||||
|
||||
def testStructFieldMinVersionChange(self):
|
||||
"""Changing the MinVersion of a field breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible(
|
||||
'struct S { string a; [MinVersion=1] string? b; };',
|
||||
'struct S { string a; [MinVersion=2] string? b; };')
|
||||
|
||||
def testStructFieldTypeChange(self):
|
||||
"""If a struct field's own type definition changes, the containing struct
|
||||
is backward-compatible if and only if the field type's change is
|
||||
backward-compatible."""
|
||||
self.assertBackwardCompatible(
|
||||
'struct S {}; struct T { S s; };',
|
||||
'struct S { [MinVersion=1] int32 x; }; struct T { S s; };')
|
||||
self.assertBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA }; struct S { E e; };',
|
||||
"""[Extensible] enum E {
|
||||
[Default] kA,
|
||||
[MinVersion=1] kB };
|
||||
struct S { E e; };""")
|
||||
self.assertNotBackwardCompatible(
|
||||
'struct S {}; struct T { S s; };',
|
||||
'struct S { int32 x; }; struct T { S s; };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA }; struct S { E e; };',
|
||||
'[Extensible] enum E { [Default] kA, kB }; struct S { E e; };')
|
||||
|
||||
def testNewStructFieldWithInvalidMinVersion(self):
|
||||
"""Adding a new field using an existing MinVersion breaks backward-
|
||||
compatibility."""
|
||||
self.assertNotBackwardCompatible(
|
||||
"""\
|
||||
struct S {
|
||||
string a;
|
||||
[MinVersion=1] string? b;
|
||||
};
|
||||
""", """\
|
||||
struct S {
|
||||
string a;
|
||||
[MinVersion=1] string? b;
|
||||
[MinVersion=1] string? c;
|
||||
};""")
|
||||
|
||||
def testNewStructFieldWithValidMinVersion(self):
|
||||
"""Adding a new field is safe if tagged with a MinVersion greater than any
|
||||
previously used MinVersion in the struct."""
|
||||
self.assertBackwardCompatible(
|
||||
'struct S { int32 a; };',
|
||||
'struct S { int32 a; [MinVersion=1] int32 b; };')
|
||||
self.assertBackwardCompatible(
|
||||
'struct S { int32 a; [MinVersion=1] int32 b; };',
|
||||
'struct S { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
|
||||
|
||||
def testNewStructFieldNullableReference(self):
|
||||
"""Adding a new nullable reference-typed field is fine if versioned
|
||||
properly."""
|
||||
self.assertBackwardCompatible(
|
||||
'struct S { int32 a; };',
|
||||
'struct S { int32 a; [MinVersion=1] string? b; };')
|
||||
|
||||
def testStructFieldRename(self):
|
||||
"""Renaming a field has no effect on backward-compatibility."""
|
||||
self.assertBackwardCompatible('struct S { int32 x; bool b; };',
|
||||
'struct S { int32 a; bool b; };')
|
||||
|
||||
def testStructFieldReorderWithExplicitOrdinals(self):
|
||||
"""Reordering fields has no effect on backward-compatibility when field
|
||||
ordinals are explicitly labeled and remain unchanged."""
|
||||
self.assertBackwardCompatible('struct S { bool b@1; int32 a@0; };',
|
||||
'struct S { int32 a@0; bool b@1; };')
|
||||
|
||||
def testNewUnionFieldUnversioned(self):
|
||||
"""Adding a new field to a union without a new (i.e. higher than any
|
||||
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; };',
|
||||
'union U { string a; string b; };')
|
||||
|
||||
def testUnionFieldRemoval(self):
|
||||
"""Removing a field from a union breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; string b; };',
|
||||
'union U { string a; };')
|
||||
|
||||
def testUnionFieldTypeChange(self):
|
||||
"""Changing the type of an existing field always breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; };',
|
||||
'union U { array<int32> a; };')
|
||||
|
||||
def testUnionFieldBecomingOptional(self):
|
||||
"""Changing a field from non-optional to optional breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; };',
|
||||
'union U { string? a; };')
|
||||
|
||||
def testFieldNestedTypeChanged(self):
|
||||
"""Changing the definition of a nested type within a field (such as an array
|
||||
element or interface endpoint type) should only break backward-compatibility
|
||||
if the changes to that type are not backward-compatible."""
|
||||
self.assertBackwardCompatible(
|
||||
"""\
|
||||
struct S { string a; };
|
||||
struct T { array<S> ss; };
|
||||
""", """\
|
||||
struct S {
|
||||
string a;
|
||||
[MinVersion=1] string? b;
|
||||
};
|
||||
struct T { array<S> ss; };
|
||||
""")
|
||||
self.assertBackwardCompatible(
|
||||
"""\
|
||||
interface F { Do(); };
|
||||
struct S { pending_receiver<F> r; };
|
||||
""", """\
|
||||
interface F {
|
||||
Do();
|
||||
[MinVersion=1] Say();
|
||||
};
|
||||
struct S { pending_receiver<F> r; };
|
||||
""")
|
||||
|
||||
def testRecursiveTypeChange(self):
|
||||
"""Recursive types do not break the compatibility checker."""
|
||||
self.assertBackwardCompatible(
|
||||
"""\
|
||||
struct S {
|
||||
string a;
|
||||
array<S> others;
|
||||
};""", """\
|
||||
struct S {
|
||||
string a;
|
||||
array<S> others;
|
||||
[MinVersion=1] string? b;
|
||||
};""")
|
||||
|
||||
def testUnionFieldBecomingNonOptional(self):
|
||||
"""Changing a field from optional to non-optional breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string? a; };',
|
||||
'union U { string a; };')
|
||||
|
||||
def testUnionFieldOrderChange(self):
|
||||
"""Changing the order of fields breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; bool b; };',
|
||||
'union U { bool b; string a; };')
|
||||
self.assertNotBackwardCompatible('union U { string a@0; bool b@1; };',
|
||||
'union U { string a@1; bool b@0; };')
|
||||
|
||||
def testUnionFieldMinVersionChange(self):
|
||||
"""Changing the MinVersion of a field breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible(
|
||||
'union U { string a; [MinVersion=1] string b; };',
|
||||
'union U { string a; [MinVersion=2] string b; };')
|
||||
|
||||
def testUnionFieldTypeChange(self):
|
||||
"""If a union field's own type definition changes, the containing union
|
||||
is backward-compatible if and only if the field type's change is
|
||||
backward-compatible."""
|
||||
self.assertBackwardCompatible(
|
||||
'struct S {}; union U { S s; };',
|
||||
'struct S { [MinVersion=1] int32 x; }; union U { S s; };')
|
||||
self.assertBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA }; union U { E e; };',
|
||||
"""[Extensible] enum E {
|
||||
[Default] kA,
|
||||
[MinVersion=1] kB };
|
||||
union U { E e; };""")
|
||||
self.assertNotBackwardCompatible(
|
||||
'struct S {}; union U { S s; };',
|
||||
'struct S { int32 x; }; union U { S s; };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA }; union U { E e; };',
|
||||
'[Extensible] enum E { [Default] kA, kB }; union U { E e; };')
|
||||
|
||||
def testNewUnionFieldWithInvalidMinVersion(self):
|
||||
"""Adding a new field using an existing MinVersion breaks backward-
|
||||
compatibility."""
|
||||
self.assertNotBackwardCompatible(
|
||||
"""\
|
||||
union U {
|
||||
string a;
|
||||
[MinVersion=1] string b;
|
||||
};
|
||||
""", """\
|
||||
union U {
|
||||
string a;
|
||||
[MinVersion=1] string b;
|
||||
[MinVersion=1] string c;
|
||||
};""")
|
||||
|
||||
def testNewUnionFieldWithValidMinVersion(self):
|
||||
"""Adding a new field is safe if tagged with a MinVersion greater than any
|
||||
previously used MinVersion in the union."""
|
||||
self.assertBackwardCompatible(
|
||||
'union U { int32 a; };',
|
||||
'union U { int32 a; [MinVersion=1] int32 b; };')
|
||||
self.assertBackwardCompatible(
|
||||
'union U { int32 a; [MinVersion=1] int32 b; };',
|
||||
'union U { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
|
||||
|
||||
def testUnionFieldRename(self):
|
||||
"""Renaming a field has no effect on backward-compatibility."""
|
||||
self.assertBackwardCompatible('union U { int32 x; bool b; };',
|
||||
'union U { int32 a; bool b; };')
|
||||
|
||||
def testUnionFieldReorderWithExplicitOrdinals(self):
|
||||
"""Reordering fields has no effect on backward-compatibility when field
|
||||
ordinals are explicitly labeled and remain unchanged."""
|
||||
self.assertBackwardCompatible('union U { bool b@1; int32 a@0; };',
|
||||
'union U { int32 a@0; bool b@1; };')
|
||||
|
||||
def testNewInterfaceMethodUnversioned(self):
|
||||
"""Adding a new method to an interface without a new (i.e. higher than any
|
||||
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||
'interface F { A(); B(); };')
|
||||
|
||||
def testInterfaceMethodRemoval(self):
|
||||
"""Removing a method from an interface breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('interface F { A(); B(); };',
|
||||
'interface F { A(); };')
|
||||
|
||||
def testInterfaceMethodParamsChanged(self):
|
||||
"""Changes to the parameter list are only backward-compatible if they meet
|
||||
backward-compatibility requirements of an equivalent struct definition."""
|
||||
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||
'interface F { A(int32 x); };')
|
||||
self.assertNotBackwardCompatible('interface F { A(int32 x); };',
|
||||
'interface F { A(bool x); };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'interface F { A(int32 x, [MinVersion=1] string? s); };', """\
|
||||
interface F {
|
||||
A(int32 x, [MinVersion=1] string? s, [MinVersion=1] int32 y);
|
||||
};""")
|
||||
|
||||
self.assertBackwardCompatible('interface F { A(int32 x); };',
|
||||
'interface F { A(int32 a); };')
|
||||
self.assertBackwardCompatible(
|
||||
'interface F { A(int32 x); };',
|
||||
'interface F { A(int32 x, [MinVersion=1] string? s); };')
|
||||
|
||||
self.assertBackwardCompatible(
|
||||
'struct S {}; interface F { A(S s); };',
|
||||
'struct S { [MinVersion=1] int32 x; }; interface F { A(S s); };')
|
||||
self.assertBackwardCompatible(
|
||||
'struct S {}; struct T {}; interface F { A(S s); };',
|
||||
'struct S {}; struct T {}; interface F { A(T s); };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'struct S {}; struct T { int32 x; }; interface F { A(S s); };',
|
||||
'struct S {}; struct T { int32 x; }; interface F { A(T t); };')
|
||||
|
||||
def testInterfaceMethodReplyAdded(self):
|
||||
"""Adding a reply to a message breaks backward-compatibilty."""
|
||||
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||
'interface F { A() => (); };')
|
||||
|
||||
def testInterfaceMethodReplyRemoved(self):
|
||||
"""Removing a reply from a message breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('interface F { A() => (); };',
|
||||
'interface F { A(); };')
|
||||
|
||||
def testInterfaceMethodReplyParamsChanged(self):
|
||||
"""Similar to request parameters, a change to reply parameters is considered
|
||||
backward-compatible if it meets the same backward-compatibility
|
||||
requirements imposed on equivalent struct changes."""
|
||||
self.assertNotBackwardCompatible('interface F { A() => (); };',
|
||||
'interface F { A() => (int32 x); };')
|
||||
self.assertNotBackwardCompatible('interface F { A() => (int32 x); };',
|
||||
'interface F { A() => (); };')
|
||||
self.assertNotBackwardCompatible('interface F { A() => (bool x); };',
|
||||
'interface F { A() => (int32 x); };')
|
||||
|
||||
self.assertBackwardCompatible('interface F { A() => (int32 a); };',
|
||||
'interface F { A() => (int32 x); };')
|
||||
self.assertBackwardCompatible(
|
||||
'interface F { A() => (int32 x); };',
|
||||
'interface F { A() => (int32 x, [MinVersion] string? s); };')
|
||||
|
||||
def testNewInterfaceMethodWithInvalidMinVersion(self):
|
||||
"""Adding a new method to an existing version is not backward-compatible."""
|
||||
self.assertNotBackwardCompatible(
|
||||
"""\
|
||||
interface F {
|
||||
A();
|
||||
[MinVersion=1] B();
|
||||
};
|
||||
""", """\
|
||||
interface F {
|
||||
A();
|
||||
[MinVersion=1] B();
|
||||
[MinVersion=1] C();
|
||||
};
|
||||
""")
|
||||
|
||||
def testNewInterfaceMethodWithValidMinVersion(self):
|
||||
"""Adding a new method is fine as long as its MinVersion exceeds that of any
|
||||
method on the old interface definition."""
|
||||
self.assertBackwardCompatible('interface F { A(); };',
|
||||
'interface F { A(); [MinVersion=1] B(); };')
|
30
utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py
Executable file
30
utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
_TOOLS_DIR = os.path.dirname(__file__)
|
||||
_MOJOM_DIR = os.path.join(_TOOLS_DIR, 'mojom')
|
||||
_BINDINGS_DIR = os.path.join(_TOOLS_DIR, 'bindings')
|
||||
_SRC_DIR = os.path.join(_TOOLS_DIR, os.path.pardir, os.path.pardir,
|
||||
os.path.pardir)
|
||||
|
||||
# Ensure that the mojom library is discoverable.
|
||||
sys.path.append(_MOJOM_DIR)
|
||||
sys.path.append(_BINDINGS_DIR)
|
||||
|
||||
# Help Python find typ in //third_party/catapult/third_party/typ/
|
||||
sys.path.append(
|
||||
os.path.join(_SRC_DIR, 'third_party', 'catapult', 'third_party', 'typ'))
|
||||
import typ
|
||||
|
||||
|
||||
def Main():
|
||||
return typ.main(top_level_dirs=[_MOJOM_DIR, _BINDINGS_DIR])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(Main())
|
20
utils/codegen/ipc/parser.py
Executable file
20
utils/codegen/ipc/parser.py
Executable file
|
@ -0,0 +1,20 @@
|
|||
#!/usr/bin/env python3
|
||||
# SPDX-License-Identifier: BSD-3-Clause
|
||||
# Copyright (C) 2020, Google Inc.
|
||||
#
|
||||
# Author: Paul Elder <paul.elder@ideasonboard.com>
|
||||
#
|
||||
# Run mojo parser with python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
# TODO set sys.pycache_prefix for >= python3.8
|
||||
sys.dont_write_bytecode = True
|
||||
|
||||
# Make sure that mojom_parser.py can import mojom
|
||||
sys.path.insert(0, f'{os.path.dirname(__file__)}/mojo/public/tools/mojom')
|
||||
|
||||
import mojo.public.tools.mojom.mojom_parser as parser
|
||||
|
||||
parser.Run(sys.argv[1:])
|
4
utils/codegen/ipc/tools/README
Normal file
4
utils/codegen/ipc/tools/README
Normal file
|
@ -0,0 +1,4 @@
|
|||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
Files in this directory are imported from 9be4263648d7 of Chromium. Do not
|
||||
modify them manually.
|
51
utils/codegen/ipc/tools/diagnosis/crbug_1001171.py
Normal file
51
utils/codegen/ipc/tools/diagnosis/crbug_1001171.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
# Copyright 2019 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Helper context wrapper for diagnosing crbug.com/1001171.
|
||||
|
||||
This module and all uses thereof can and should be removed once
|
||||
crbug.com/1001171 has been resolved.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def DumpStateOnLookupError():
|
||||
"""Prints potentially useful state info in the event of a LookupError."""
|
||||
try:
|
||||
yield
|
||||
except LookupError:
|
||||
print('LookupError diagnosis for crbug.com/1001171:')
|
||||
for path_index, path_entry in enumerate(sys.path):
|
||||
desc = 'unknown'
|
||||
if not os.path.exists(path_entry):
|
||||
desc = 'missing'
|
||||
elif os.path.islink(path_entry):
|
||||
desc = 'link -> %s' % os.path.realpath(path_entry)
|
||||
elif os.path.isfile(path_entry):
|
||||
desc = 'file'
|
||||
elif os.path.isdir(path_entry):
|
||||
desc = 'dir'
|
||||
print(' sys.path[%d]: %s (%s)' % (path_index, path_entry, desc))
|
||||
|
||||
real_path_entry = os.path.realpath(path_entry)
|
||||
if (path_entry.endswith(os.path.join('lib', 'python2.7'))
|
||||
and os.path.isdir(real_path_entry)):
|
||||
encodings_dir = os.path.realpath(
|
||||
os.path.join(real_path_entry, 'encodings'))
|
||||
if os.path.exists(encodings_dir):
|
||||
if os.path.isdir(encodings_dir):
|
||||
print(' %s contents: %s' % (encodings_dir,
|
||||
str(os.listdir(encodings_dir))))
|
||||
else:
|
||||
print(' %s exists but is not a directory' % encodings_dir)
|
||||
else:
|
||||
print(' %s missing' % encodings_dir)
|
||||
|
||||
raise
|
13
utils/codegen/meson.build
Normal file
13
utils/codegen/meson.build
Normal file
|
@ -0,0 +1,13 @@
|
|||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
## Code generation
|
||||
|
||||
py_modules += ['jinja2', 'yaml']
|
||||
|
||||
gen_controls = files('gen-controls.py')
|
||||
gen_formats = files('gen-formats.py')
|
||||
gen_header = files('gen-header.sh')
|
||||
gen_ipa_pub_key = files('gen-ipa-pub-key.py')
|
||||
gen_tracepoints = files('gen-tp-header.py')
|
||||
|
||||
subdir('ipc')
|
Loading…
Add table
Add a link
Reference in a new issue