meson: Move all code generation scripts to utils/codegen/
We have multiple code generation scripts in utils/, mixed with other miscellaneous utilities, as well as a larger code base based on mojom in utils/ipc/. To make code sharing easier between the generator scripts, without creating a mess in the utils/ directory, move all the code generation code to utils/codegen/. Signed-off-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Reviewed-by: Daniel Scally <dan.scally@ideasonboard.com> Reviewed-by: Paul Elder <paul.elder@ideasonboard.com>
This commit is contained in:
parent
d3bf27180e
commit
50c92cc7e2
91 changed files with 15 additions and 15 deletions
27
utils/codegen/ipc/mojo/public/LICENSE
Normal file
27
utils/codegen/ipc/mojo/public/LICENSE
Normal file
|
@ -0,0 +1,27 @@
|
|||
// Copyright 2014 The Chromium Authors
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
6
utils/codegen/ipc/mojo/public/tools/.style.yapf
Normal file
6
utils/codegen/ipc/mojo/public/tools/.style.yapf
Normal file
|
@ -0,0 +1,6 @@
|
|||
[style]
|
||||
based_on_style = pep8
|
||||
|
||||
# New directories should use a .style.yapf that does not include the following:
|
||||
column_limit = 80
|
||||
indent_width = 2
|
22
utils/codegen/ipc/mojo/public/tools/BUILD.gn
Normal file
22
utils/codegen/ipc/mojo/public/tools/BUILD.gn
Normal file
|
@ -0,0 +1,22 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# The main target used to aggregate all unit tests for Python-based Mojo tools.
|
||||
# This is used to generate a complete isolate which can be pushed to bots to run
|
||||
# the tests.
|
||||
group("mojo_python_unittests") {
|
||||
data = [
|
||||
"run_all_python_unittests.py",
|
||||
"//testing/scripts/run_isolated_script_test.py",
|
||||
]
|
||||
deps = [
|
||||
"//mojo/public/tools/bindings:tests",
|
||||
"//mojo/public/tools/mojom:tests",
|
||||
"//mojo/public/tools/mojom/mojom:tests",
|
||||
]
|
||||
data_deps = [
|
||||
"//testing:test_scripts_shared",
|
||||
"//third_party/catapult/third_party/typ/",
|
||||
]
|
||||
}
|
131
utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn
Normal file
131
utils/codegen/ipc/mojo/public/tools/bindings/BUILD.gn
Normal file
|
@ -0,0 +1,131 @@
|
|||
# Copyright 2016 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import("//mojo/public/tools/bindings/mojom.gni")
|
||||
import("//third_party/jinja2/jinja2.gni")
|
||||
|
||||
action("precompile_templates") {
|
||||
sources = mojom_generator_sources
|
||||
sources += [
|
||||
"$mojom_generator_root/generators/cpp_templates/cpp_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/enum_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/enum_serialization_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/feature_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/feature_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_feature_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_proxy_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_request_validator_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_response_validator_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/interface_stub_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-features.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-forward.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-import-headers.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-params-data.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-shared-internal.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-shared-message-ids.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-shared.cc.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-shared.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module-test-utils.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module.cc.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/module.h.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_data_view_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_data_view_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_serialization_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_traits_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_traits_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/struct_unserialized_message_context.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_data_view_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_data_view_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_serialization_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_traits_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/union_traits_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/validation_macros.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_class_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_class_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_class_template_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_declaration.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_definition.tmpl",
|
||||
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_template_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/constant_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/constants.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/data_types_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/enum.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/enum_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/header.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/interface.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/interface_internal.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/struct.java.tmpl",
|
||||
"$mojom_generator_root/generators/java_templates/union.java.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/enum_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/fuzzing.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/enum_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/enum_definition_for_module.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/interface_definition_for_module.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/module_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/mojom-lite.js.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/mojom.m.js.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/struct_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/struct_definition_for_module.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/union_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/lite/union_definition_for_module.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/module.amd.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/module_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/struct_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/union_definition.tmpl",
|
||||
"$mojom_generator_root/generators/js_templates/validation_macros.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.cc.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.h.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.proto.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_from_proto_macros.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_macros.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_to_proto_macros.tmpl",
|
||||
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_traits_specialization_macros.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/enum_definition.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/interface_definition.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/module_definition.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/struct_definition.tmpl",
|
||||
"$mojom_generator_root/generators/ts_templates/union_definition.tmpl",
|
||||
]
|
||||
script = mojom_generator_script
|
||||
|
||||
inputs = jinja2_sources
|
||||
outputs = [
|
||||
"$target_gen_dir/cpp_templates.zip",
|
||||
"$target_gen_dir/java_templates.zip",
|
||||
"$target_gen_dir/js_templates.zip",
|
||||
"$target_gen_dir/mojolpm_templates.zip",
|
||||
"$target_gen_dir/ts_templates.zip",
|
||||
]
|
||||
args = [
|
||||
"-o",
|
||||
rebase_path(target_gen_dir, root_build_dir),
|
||||
"--use_bundled_pylibs",
|
||||
"precompile",
|
||||
]
|
||||
}
|
||||
|
||||
group("tests") {
|
||||
data = [
|
||||
mojom_generator_script,
|
||||
"checks/mojom_attributes_check_unittest.py",
|
||||
"checks/mojom_interface_feature_check_unittest.py",
|
||||
"checks/mojom_restrictions_checks_unittest.py",
|
||||
"mojom_bindings_generator_unittest.py",
|
||||
"//tools/diagnosis/crbug_1001171.py",
|
||||
"//third_party/markupsafe/",
|
||||
]
|
||||
data += mojom_generator_sources
|
||||
data += jinja2_sources
|
||||
}
|
1014
utils/codegen/ipc/mojo/public/tools/bindings/README.md
Normal file
1014
utils/codegen/ipc/mojo/public/tools/bindings/README.md
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,170 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Validate mojo attributes are allowed in Chrome before generation."""
|
||||
|
||||
import mojom.generate.check as check
|
||||
import mojom.generate.module as module
|
||||
|
||||
_COMMON_ATTRIBUTES = {
|
||||
'EnableIf',
|
||||
'EnableIfNot',
|
||||
}
|
||||
|
||||
# For struct, union & parameter lists.
|
||||
_COMMON_FIELD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'MinVersion',
|
||||
'RenamedFrom',
|
||||
}
|
||||
|
||||
# Note: `Default`` goes on the default _value_, not on the enum.
|
||||
# Note: [Stable] without [Extensible] is not allowed.
|
||||
_ENUM_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'Extensible',
|
||||
'Native',
|
||||
'Stable',
|
||||
'RenamedFrom',
|
||||
'Uuid',
|
||||
}
|
||||
|
||||
# TODO(crbug.com/1234883) MinVersion is not needed for EnumVal.
|
||||
_ENUMVAL_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'Default',
|
||||
'MinVersion',
|
||||
}
|
||||
|
||||
_INTERFACE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'RenamedFrom',
|
||||
'RequireContext',
|
||||
'RuntimeFeature',
|
||||
'ServiceSandbox',
|
||||
'Stable',
|
||||
'Uuid',
|
||||
}
|
||||
|
||||
_METHOD_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'AllowedContext',
|
||||
'MinVersion',
|
||||
'NoInterrupt',
|
||||
'RuntimeFeature',
|
||||
'SupportsUrgent',
|
||||
'Sync',
|
||||
'UnlimitedSize',
|
||||
}
|
||||
|
||||
_MODULE_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'JavaConstantsClassName',
|
||||
'JavaPackage',
|
||||
}
|
||||
|
||||
_PARAMETER_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
|
||||
|
||||
_STRUCT_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'CustomSerializer',
|
||||
'JavaClassName',
|
||||
'Native',
|
||||
'Stable',
|
||||
'RenamedFrom',
|
||||
'Uuid',
|
||||
}
|
||||
|
||||
_STRUCT_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES
|
||||
|
||||
_UNION_ATTRIBUTES = _COMMON_ATTRIBUTES | {
|
||||
'Extensible',
|
||||
'Stable',
|
||||
'RenamedFrom',
|
||||
'Uuid',
|
||||
}
|
||||
|
||||
_UNION_FIELD_ATTRIBUTES = _COMMON_FIELD_ATTRIBUTES | {
|
||||
'Default',
|
||||
}
|
||||
|
||||
# TODO(https://crbug.com/1193875) empty this set and remove the allowlist.
|
||||
_STABLE_ONLY_ALLOWLISTED_ENUMS = {
|
||||
'crosapi.mojom.OptionalBool',
|
||||
'crosapi.mojom.TriState',
|
||||
}
|
||||
|
||||
|
||||
class Check(check.Check):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Check, self).__init__(*args, **kwargs)
|
||||
|
||||
def _Respell(self, allowed, attribute):
|
||||
for a in allowed:
|
||||
if a.lower() == attribute.lower():
|
||||
return f" - Did you mean: {a}?"
|
||||
return ""
|
||||
|
||||
def _CheckAttributes(self, context, allowed, attributes):
|
||||
if not attributes:
|
||||
return
|
||||
for attribute in attributes:
|
||||
if not attribute in allowed:
|
||||
# Is there a close misspelling?
|
||||
hint = self._Respell(allowed, attribute)
|
||||
raise check.CheckException(
|
||||
self.module,
|
||||
f"attribute {attribute} not allowed on {context}{hint}")
|
||||
|
||||
def _CheckEnumAttributes(self, enum):
|
||||
if enum.attributes:
|
||||
self._CheckAttributes("enum", _ENUM_ATTRIBUTES, enum.attributes)
|
||||
if 'Stable' in enum.attributes and not 'Extensible' in enum.attributes:
|
||||
full_name = f"{self.module.mojom_namespace}.{enum.mojom_name}"
|
||||
if full_name not in _STABLE_ONLY_ALLOWLISTED_ENUMS:
|
||||
raise check.CheckException(
|
||||
self.module,
|
||||
f"[Extensible] required on [Stable] enum {full_name}")
|
||||
for enumval in enum.fields:
|
||||
self._CheckAttributes("enum value", _ENUMVAL_ATTRIBUTES,
|
||||
enumval.attributes)
|
||||
|
||||
def _CheckInterfaceAttributes(self, interface):
|
||||
self._CheckAttributes("interface", _INTERFACE_ATTRIBUTES,
|
||||
interface.attributes)
|
||||
for method in interface.methods:
|
||||
self._CheckAttributes("method", _METHOD_ATTRIBUTES, method.attributes)
|
||||
for param in method.parameters:
|
||||
self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
|
||||
param.attributes)
|
||||
if method.response_parameters:
|
||||
for param in method.response_parameters:
|
||||
self._CheckAttributes("parameter", _PARAMETER_ATTRIBUTES,
|
||||
param.attributes)
|
||||
for enum in interface.enums:
|
||||
self._CheckEnumAttributes(enum)
|
||||
|
||||
def _CheckModuleAttributes(self):
|
||||
self._CheckAttributes("module", _MODULE_ATTRIBUTES, self.module.attributes)
|
||||
|
||||
def _CheckStructAttributes(self, struct):
|
||||
self._CheckAttributes("struct", _STRUCT_ATTRIBUTES, struct.attributes)
|
||||
for field in struct.fields:
|
||||
self._CheckAttributes("struct field", _STRUCT_FIELD_ATTRIBUTES,
|
||||
field.attributes)
|
||||
for enum in struct.enums:
|
||||
self._CheckEnumAttributes(enum)
|
||||
|
||||
def _CheckUnionAttributes(self, union):
|
||||
self._CheckAttributes("union", _UNION_ATTRIBUTES, union.attributes)
|
||||
for field in union.fields:
|
||||
self._CheckAttributes("union field", _UNION_FIELD_ATTRIBUTES,
|
||||
field.attributes)
|
||||
|
||||
def CheckModule(self):
|
||||
"""Note that duplicate attributes are forbidden at the parse phase.
|
||||
We also do not need to look at the types of any parameters, as they will be
|
||||
checked where they are defined. Consts do not have attributes so can be
|
||||
skipped."""
|
||||
self._CheckModuleAttributes()
|
||||
for interface in self.module.interfaces:
|
||||
self._CheckInterfaceAttributes(interface)
|
||||
for enum in self.module.enums:
|
||||
self._CheckEnumAttributes(enum)
|
||||
for struct in self.module.structs:
|
||||
self._CheckStructAttributes(struct)
|
||||
for union in self.module.unions:
|
||||
self._CheckUnionAttributes(union)
|
|
@ -0,0 +1,194 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
import mojom.generate.check as check
|
||||
from mojom_bindings_generator import LoadChecks, _Generate
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class FakeArgs:
|
||||
"""Fakes args to _Generate - intention is to do just enough to run checks"""
|
||||
|
||||
def __init__(self, tester, files=None):
|
||||
""" `tester` is MojomParserTestCase for paths.
|
||||
`files` will have tester path added."""
|
||||
self.checks_string = 'attributes'
|
||||
self.depth = tester.GetPath('')
|
||||
self.filelist = None
|
||||
self.filename = [tester.GetPath(x) for x in files]
|
||||
self.gen_directories = tester.GetPath('gen')
|
||||
self.generators_string = ''
|
||||
self.import_directories = []
|
||||
self.output_dir = tester.GetPath('out')
|
||||
self.scrambled_message_id_salt_paths = None
|
||||
self.typemaps = []
|
||||
self.variant = 'none'
|
||||
|
||||
|
||||
class MojoBindingsCheckTest(MojomParserTestCase):
|
||||
def _ParseAndGenerate(self, mojoms):
|
||||
self.ParseMojoms(mojoms)
|
||||
args = FakeArgs(self, files=mojoms)
|
||||
_Generate(args, {})
|
||||
|
||||
def _testValid(self, filename, content):
|
||||
self.WriteFile(filename, content)
|
||||
self._ParseAndGenerate([filename])
|
||||
|
||||
def _testThrows(self, filename, content, regexp):
|
||||
mojoms = []
|
||||
self.WriteFile(filename, content)
|
||||
mojoms.append(filename)
|
||||
with self.assertRaisesRegexp(check.CheckException, regexp):
|
||||
self._ParseAndGenerate(mojoms)
|
||||
|
||||
def testLoads(self):
|
||||
"""Validate that the check is registered under the expected name."""
|
||||
check_modules = LoadChecks('attributes')
|
||||
self.assertTrue(check_modules['attributes'])
|
||||
|
||||
def testNoAnnotations(self):
|
||||
# Undecorated mojom should be fine.
|
||||
self._testValid(
|
||||
"a.mojom", """
|
||||
module a;
|
||||
struct Bar { int32 a; };
|
||||
enum Hello { kValue };
|
||||
union Thingy { Bar b; Hello hi; };
|
||||
interface Foo {
|
||||
Foo(int32 a, Hello hi, Thingy t) => (Bar b);
|
||||
};
|
||||
""")
|
||||
|
||||
def testValidAnnotations(self):
|
||||
# Obviously this is meaningless and won't generate, but it should pass
|
||||
# the attribute check's validation.
|
||||
self._testValid(
|
||||
"a.mojom", """
|
||||
[JavaConstantsClassName="FakeClass",JavaPackage="org.chromium.Fake"]
|
||||
module a;
|
||||
[Stable, Extensible]
|
||||
enum Hello { [Default] kValue, kValue2, [MinVersion=2] kValue3 };
|
||||
[Native]
|
||||
enum NativeEnum {};
|
||||
[Stable,Extensible]
|
||||
union Thingy { Bar b; [Default]int32 c; Hello hi; };
|
||||
|
||||
[Stable,RenamedFrom="module.other.Foo",
|
||||
Uuid="4C178401-4B07-4C2E-9255-5401A943D0C7"]
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
[ServiceSandbox=Hello.kValue,RequireContext=Hello.kValue,Stable,
|
||||
Uuid="2F17D7DD-865A-4B1C-9394-9C94E035E82F"]
|
||||
interface Foo {
|
||||
[AllowedContext=Hello.kValue]
|
||||
Foo@0(int32 a) => (int32 b);
|
||||
[MinVersion=2,Sync,UnlimitedSize,NoInterrupt]
|
||||
Bar@1(int32 b, [MinVersion=2]Structure? s) => (bool c);
|
||||
};
|
||||
|
||||
[RuntimeFeature=test.mojom.FeatureName]
|
||||
interface FooFeatureControlled {};
|
||||
|
||||
interface FooMethodFeatureControlled {
|
||||
[RuntimeFeature=test.mojom.FeatureName]
|
||||
MethodWithFeature() => (bool c);
|
||||
};
|
||||
""")
|
||||
|
||||
def testWrongModuleStable(self):
|
||||
contents = """
|
||||
// err: module cannot be Stable
|
||||
[Stable]
|
||||
module a;
|
||||
enum Hello { kValue, kValue2, kValue3 };
|
||||
enum NativeEnum {};
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
interface Foo {
|
||||
Foo(int32 a) => (int32 b);
|
||||
Bar(int32 b, Structure? s) => (bool c);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'attribute Stable not allowed on module')
|
||||
|
||||
def testWrongEnumDefault(self):
|
||||
contents = """
|
||||
module a;
|
||||
// err: default should go on EnumValue not Enum.
|
||||
[Default=kValue]
|
||||
enum Hello { kValue, kValue2, kValue3 };
|
||||
enum NativeEnum {};
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
interface Foo {
|
||||
Foo(int32 a) => (int32 b);
|
||||
Bar(int32 b, Structure? s) => (bool c);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'attribute Default not allowed on enum')
|
||||
|
||||
def testWrongStructMinVersion(self):
|
||||
contents = """
|
||||
module a;
|
||||
enum Hello { kValue, kValue2, kValue3 };
|
||||
enum NativeEnum {};
|
||||
// err: struct cannot have MinVersion.
|
||||
[MinVersion=2]
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
interface Foo {
|
||||
Foo(int32 a) => (int32 b);
|
||||
Bar(int32 b, Structure? s) => (bool c);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'attribute MinVersion not allowed on struct')
|
||||
|
||||
def testWrongMethodRequireContext(self):
|
||||
contents = """
|
||||
module a;
|
||||
enum Hello { kValue, kValue2, kValue3 };
|
||||
enum NativeEnum {};
|
||||
struct Structure { Hello hi; };
|
||||
|
||||
interface Foo {
|
||||
// err: RequireContext is for interfaces.
|
||||
[RequireContext=Hello.kValue]
|
||||
Foo(int32 a) => (int32 b);
|
||||
Bar(int32 b, Structure? s) => (bool c);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'RequireContext not allowed on method')
|
||||
|
||||
def testWrongMethodRequireContext(self):
|
||||
# crbug.com/1230122
|
||||
contents = """
|
||||
module a;
|
||||
interface Foo {
|
||||
// err: sync not Sync.
|
||||
[sync]
|
||||
Foo(int32 a) => (int32 b);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'attribute sync not allowed.*Did you mean: Sync')
|
||||
|
||||
def testStableExtensibleEnum(self):
|
||||
# crbug.com/1193875
|
||||
contents = """
|
||||
module a;
|
||||
[Stable]
|
||||
enum Foo {
|
||||
kDefaultVal,
|
||||
kOtherVal = 2,
|
||||
};
|
||||
"""
|
||||
self._testThrows('a.mojom', contents,
|
||||
'Extensible.*?required.*?Stable.*?enum')
|
|
@ -0,0 +1,34 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Ensure no duplicate type definitions before generation."""
|
||||
|
||||
import mojom.generate.check as check
|
||||
import mojom.generate.module as module
|
||||
|
||||
|
||||
class Check(check.Check):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Check, self).__init__(*args, **kwargs)
|
||||
|
||||
def CheckModule(self):
|
||||
kinds = dict()
|
||||
for module in self.module.imports:
|
||||
for kind in module.enums + module.structs + module.unions:
|
||||
kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
|
||||
if kind_name in kinds:
|
||||
previous_module = kinds[kind_name]
|
||||
if previous_module.path != module.path:
|
||||
raise check.CheckException(
|
||||
self.module, f"multiple-definition for type {kind_name}" +
|
||||
f"(defined in both {previous_module} and {module})")
|
||||
kinds[kind_name] = kind.module
|
||||
|
||||
for kind in self.module.enums + self.module.structs + self.module.unions:
|
||||
kind_name = f'{kind.module.mojom_namespace}.{kind.mojom_name}'
|
||||
if kind_name in kinds:
|
||||
previous_module = kinds[kind_name]
|
||||
raise check.CheckException(
|
||||
self.module, f"multiple-definition for type {kind_name}" +
|
||||
f"(previous definition in {previous_module})")
|
||||
return True
|
|
@ -0,0 +1,62 @@
|
|||
# Copyright 2023 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Validate mojo runtime feature guarded interfaces are nullable."""
|
||||
|
||||
import mojom.generate.check as check
|
||||
import mojom.generate.module as module
|
||||
|
||||
|
||||
class Check(check.Check):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Check, self).__init__(*args, **kwargs)
|
||||
|
||||
# `param` is an Interface of some sort.
|
||||
def _CheckNonNullableFeatureGuardedInterface(self, kind):
|
||||
# Only need to validate interface if it has a RuntimeFeature
|
||||
if not kind.kind.runtime_feature:
|
||||
return
|
||||
# Nullable (optional) is ok as the interface expects they might not be sent.
|
||||
if kind.is_nullable:
|
||||
return
|
||||
interface = kind.kind.mojom_name
|
||||
raise check.CheckException(
|
||||
self.module,
|
||||
f"interface {interface} has a RuntimeFeature but is not nullable")
|
||||
|
||||
# `param` can be a lot of things so check if it is a remote/receiver.
|
||||
# Array/Map must be recursed into.
|
||||
def _CheckFieldOrParam(self, kind):
|
||||
if module.IsAnyInterfaceKind(kind):
|
||||
self._CheckNonNullableFeatureGuardedInterface(kind)
|
||||
if module.IsArrayKind(kind):
|
||||
self._CheckFieldOrParam(kind.kind)
|
||||
if module.IsMapKind(kind):
|
||||
self._CheckFieldOrParam(kind.key_kind)
|
||||
self._CheckFieldOrParam(kind.value_kind)
|
||||
|
||||
def _CheckInterfaceFeatures(self, interface):
|
||||
for method in interface.methods:
|
||||
for param in method.parameters:
|
||||
self._CheckFieldOrParam(param.kind)
|
||||
if method.response_parameters:
|
||||
for param in method.response_parameters:
|
||||
self._CheckFieldOrParam(param.kind)
|
||||
|
||||
def _CheckStructFeatures(self, struct):
|
||||
for field in struct.fields:
|
||||
self._CheckFieldOrParam(field.kind)
|
||||
|
||||
def _CheckUnionFeatures(self, union):
|
||||
for field in union.fields:
|
||||
self._CheckFieldOrParam(field.kind)
|
||||
|
||||
def CheckModule(self):
|
||||
"""Validate that any runtime feature guarded interfaces that might be passed
|
||||
over mojo are nullable."""
|
||||
for interface in self.module.interfaces:
|
||||
self._CheckInterfaceFeatures(interface)
|
||||
for struct in self.module.structs:
|
||||
self._CheckStructFeatures(struct)
|
||||
for union in self.module.unions:
|
||||
self._CheckUnionFeatures(union)
|
|
@ -0,0 +1,173 @@
|
|||
# Copyright 2023 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
import mojom.generate.check as check
|
||||
from mojom_bindings_generator import LoadChecks, _Generate
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class FakeArgs:
|
||||
"""Fakes args to _Generate - intention is to do just enough to run checks"""
|
||||
def __init__(self, tester, files=None):
|
||||
""" `tester` is MojomParserTestCase for paths.
|
||||
`files` will have tester path added."""
|
||||
self.checks_string = 'features'
|
||||
self.depth = tester.GetPath('')
|
||||
self.filelist = None
|
||||
self.filename = [tester.GetPath(x) for x in files]
|
||||
self.gen_directories = tester.GetPath('gen')
|
||||
self.generators_string = ''
|
||||
self.import_directories = []
|
||||
self.output_dir = tester.GetPath('out')
|
||||
self.scrambled_message_id_salt_paths = None
|
||||
self.typemaps = []
|
||||
self.variant = 'none'
|
||||
|
||||
|
||||
class MojoBindingsCheckTest(MojomParserTestCase):
|
||||
def _ParseAndGenerate(self, mojoms):
|
||||
self.ParseMojoms(mojoms)
|
||||
args = FakeArgs(self, files=mojoms)
|
||||
_Generate(args, {})
|
||||
|
||||
def assertValid(self, filename, content):
|
||||
self.WriteFile(filename, content)
|
||||
self._ParseAndGenerate([filename])
|
||||
|
||||
def assertThrows(self, filename, content, regexp):
|
||||
mojoms = []
|
||||
self.WriteFile(filename, content)
|
||||
mojoms.append(filename)
|
||||
with self.assertRaisesRegexp(check.CheckException, regexp):
|
||||
self._ParseAndGenerate(mojoms)
|
||||
|
||||
def testLoads(self):
|
||||
"""Validate that the check is registered under the expected name."""
|
||||
check_modules = LoadChecks('features')
|
||||
self.assertTrue(check_modules['features'])
|
||||
|
||||
def testNullableOk(self):
|
||||
self.assertValid(
|
||||
"a.mojom", """
|
||||
module a;
|
||||
// Scaffolding.
|
||||
feature kFeature {
|
||||
const string name = "Hello";
|
||||
const bool enabled_state = false;
|
||||
};
|
||||
[RuntimeFeature=kFeature]
|
||||
interface Guarded {
|
||||
};
|
||||
|
||||
// Unguarded interfaces should be ok everywhere.
|
||||
interface NotGuarded { };
|
||||
|
||||
// Optional (nullable) interfaces should be ok everywhere:
|
||||
struct Bar {
|
||||
pending_remote<Guarded>? remote;
|
||||
pending_receiver<Guarded>? receiver;
|
||||
};
|
||||
union Thingy {
|
||||
pending_remote<Guarded>? remote;
|
||||
pending_receiver<Guarded>? receiver;
|
||||
};
|
||||
interface Foo {
|
||||
Foo(
|
||||
pending_remote<Guarded>? remote,
|
||||
pending_receiver<Guarded>? receiver,
|
||||
pending_associated_remote<Guarded>? a_remote,
|
||||
pending_associated_receiver<Guarded>? a_receiver,
|
||||
// Unguarded interfaces do not have to be nullable.
|
||||
pending_remote<NotGuarded> remote,
|
||||
pending_receiver<NotGuarded> receiver,
|
||||
pending_associated_remote<NotGuarded> a_remote,
|
||||
pending_associated_receiver<NotGuarded> a_receiver
|
||||
) => (
|
||||
pending_remote<Guarded>? remote,
|
||||
pending_receiver<Guarded>? receiver
|
||||
);
|
||||
Bar(array<pending_remote<Guarded>?> remote)
|
||||
=> (map<string, pending_receiver<Guarded>?> a);
|
||||
};
|
||||
""")
|
||||
|
||||
def testMethodParamsMustBeNullable(self):
|
||||
prelude = """
|
||||
module a;
|
||||
// Scaffolding.
|
||||
feature kFeature {
|
||||
const string name = "Hello";
|
||||
const bool enabled_state = false;
|
||||
};
|
||||
[RuntimeFeature=kFeature]
|
||||
interface Guarded { };
|
||||
"""
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(pending_remote<Guarded> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(bool foo) => (pending_receiver<Guarded> a);
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(pending_receiver<Guarded> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(pending_associated_remote<Guarded> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(pending_associated_receiver<Guarded> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(array<pending_associated_receiver<Guarded>> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
interface Trial {
|
||||
Method(map<string, pending_associated_receiver<Guarded>> a) => ();
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
|
||||
def testStructUnionMembersMustBeNullable(self):
|
||||
prelude = """
|
||||
module a;
|
||||
// Scaffolding.
|
||||
feature kFeature {
|
||||
const string name = "Hello";
|
||||
const bool enabled_state = false;
|
||||
};
|
||||
[RuntimeFeature=kFeature]
|
||||
interface Guarded { };
|
||||
"""
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
struct Trial {
|
||||
pending_remote<Guarded> a;
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
||||
self.assertThrows(
|
||||
'a.mojom', prelude + """
|
||||
union Trial {
|
||||
pending_remote<Guarded> a;
|
||||
};
|
||||
""", 'interface Guarded has a RuntimeFeature')
|
|
@ -0,0 +1,102 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Validate RequireContext and AllowedContext annotations before generation."""
|
||||
|
||||
import mojom.generate.check as check
|
||||
import mojom.generate.module as module
|
||||
|
||||
|
||||
class Check(check.Check):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.kind_to_interfaces = dict()
|
||||
super(Check, self).__init__(*args, **kwargs)
|
||||
|
||||
def _IsPassedInterface(self, candidate):
|
||||
if isinstance(
|
||||
candidate.kind,
|
||||
(module.PendingReceiver, module.PendingRemote,
|
||||
module.PendingAssociatedReceiver, module.PendingAssociatedRemote)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _CheckInterface(self, method, param):
|
||||
# |param| is a pending_x<Interface> so need .kind.kind to get Interface.
|
||||
interface = param.kind.kind
|
||||
if interface.require_context:
|
||||
if method.allowed_context is None:
|
||||
raise check.CheckException(
|
||||
self.module, "method `{}` has parameter `{}` which passes interface"
|
||||
" `{}` that requires an AllowedContext annotation but none exists.".
|
||||
format(
|
||||
method.mojom_name,
|
||||
param.mojom_name,
|
||||
interface.mojom_name,
|
||||
))
|
||||
# If a string was provided, or if an enum was not imported, this will
|
||||
# be a string and we cannot validate that it is in range.
|
||||
if not isinstance(method.allowed_context, module.EnumValue):
|
||||
raise check.CheckException(
|
||||
self.module,
|
||||
"method `{}` has AllowedContext={} which is not a valid enum value."
|
||||
.format(method.mojom_name, method.allowed_context))
|
||||
# EnumValue must be from the same enum to be compared.
|
||||
if interface.require_context.enum != method.allowed_context.enum:
|
||||
raise check.CheckException(
|
||||
self.module, "method `{}` has parameter `{}` which passes interface"
|
||||
" `{}` that requires AllowedContext={} but one of kind `{}` was "
|
||||
"provided.".format(
|
||||
method.mojom_name,
|
||||
param.mojom_name,
|
||||
interface.mojom_name,
|
||||
interface.require_context.enum,
|
||||
method.allowed_context.enum,
|
||||
))
|
||||
# RestrictContext enums have most privileged field first (lowest value).
|
||||
interface_value = interface.require_context.field.numeric_value
|
||||
method_value = method.allowed_context.field.numeric_value
|
||||
if interface_value < method_value:
|
||||
raise check.CheckException(
|
||||
self.module, "RequireContext={} > AllowedContext={} for method "
|
||||
"`{}` which passes interface `{}`.".format(
|
||||
interface.require_context.GetSpec(),
|
||||
method.allowed_context.GetSpec(), method.mojom_name,
|
||||
interface.mojom_name))
|
||||
return True
|
||||
|
||||
def _GatherReferencedInterfaces(self, field):
|
||||
key = field.kind.spec
|
||||
# structs/unions can nest themselves so we need to bookkeep.
|
||||
if not key in self.kind_to_interfaces:
|
||||
# Might reference ourselves so have to create the list first.
|
||||
self.kind_to_interfaces[key] = set()
|
||||
for param in field.kind.fields:
|
||||
if self._IsPassedInterface(param):
|
||||
self.kind_to_interfaces[key].add(param)
|
||||
elif isinstance(param.kind, (module.Struct, module.Union)):
|
||||
for iface in self._GatherReferencedInterfaces(param):
|
||||
self.kind_to_interfaces[key].add(iface)
|
||||
return self.kind_to_interfaces[key]
|
||||
|
||||
def _CheckParams(self, method, params):
|
||||
# Note: we have to repeat _CheckParams for each method as each might have
|
||||
# different AllowedContext= attributes. We cannot memoize this function,
|
||||
# but can do so for gathering referenced interfaces as their RequireContext
|
||||
# attributes do not change.
|
||||
for param in params:
|
||||
if self._IsPassedInterface(param):
|
||||
self._CheckInterface(method, param)
|
||||
elif isinstance(param.kind, (module.Struct, module.Union)):
|
||||
for interface in self._GatherReferencedInterfaces(param):
|
||||
self._CheckInterface(method, interface)
|
||||
|
||||
def _CheckMethod(self, method):
|
||||
if method.parameters:
|
||||
self._CheckParams(method, method.parameters)
|
||||
if method.response_parameters:
|
||||
self._CheckParams(method, method.response_parameters)
|
||||
|
||||
def CheckModule(self):
|
||||
for interface in self.module.interfaces:
|
||||
for method in interface.methods:
|
||||
self._CheckMethod(method)
|
|
@ -0,0 +1,254 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
import mojom.generate.check as check
|
||||
from mojom_bindings_generator import LoadChecks, _Generate
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
# Mojoms that we will use in multiple tests.
|
||||
basic_mojoms = {
|
||||
'level.mojom':
|
||||
"""
|
||||
module level;
|
||||
enum Level {
|
||||
kHighest,
|
||||
kMiddle,
|
||||
kLowest,
|
||||
};
|
||||
""",
|
||||
'interfaces.mojom':
|
||||
"""
|
||||
module interfaces;
|
||||
import "level.mojom";
|
||||
struct Foo {int32 bar;};
|
||||
[RequireContext=level.Level.kHighest]
|
||||
interface High {
|
||||
DoFoo(Foo foo);
|
||||
};
|
||||
[RequireContext=level.Level.kMiddle]
|
||||
interface Mid {
|
||||
DoFoo(Foo foo);
|
||||
};
|
||||
[RequireContext=level.Level.kLowest]
|
||||
interface Low {
|
||||
DoFoo(Foo foo);
|
||||
};
|
||||
"""
|
||||
}
|
||||
|
||||
|
||||
class FakeArgs:
|
||||
"""Fakes args to _Generate - intention is to do just enough to run checks"""
|
||||
|
||||
def __init__(self, tester, files=None):
|
||||
""" `tester` is MojomParserTestCase for paths.
|
||||
`files` will have tester path added."""
|
||||
self.checks_string = 'restrictions'
|
||||
self.depth = tester.GetPath('')
|
||||
self.filelist = None
|
||||
self.filename = [tester.GetPath(x) for x in files]
|
||||
self.gen_directories = tester.GetPath('gen')
|
||||
self.generators_string = ''
|
||||
self.import_directories = []
|
||||
self.output_dir = tester.GetPath('out')
|
||||
self.scrambled_message_id_salt_paths = None
|
||||
self.typemaps = []
|
||||
self.variant = 'none'
|
||||
|
||||
|
||||
class MojoBindingsCheckTest(MojomParserTestCase):
|
||||
def _WriteBasicMojoms(self):
|
||||
for filename, contents in basic_mojoms.items():
|
||||
self.WriteFile(filename, contents)
|
||||
return list(basic_mojoms.keys())
|
||||
|
||||
def _ParseAndGenerate(self, mojoms):
|
||||
self.ParseMojoms(mojoms)
|
||||
args = FakeArgs(self, files=mojoms)
|
||||
_Generate(args, {})
|
||||
|
||||
def testLoads(self):
|
||||
"""Validate that the check is registered under the expected name."""
|
||||
check_modules = LoadChecks('restrictions')
|
||||
self.assertTrue(check_modules['restrictions'])
|
||||
|
||||
def testValidAnnotations(self):
|
||||
mojoms = self._WriteBasicMojoms()
|
||||
|
||||
a = 'a.mojom'
|
||||
self.WriteFile(
|
||||
a, """
|
||||
module a;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
|
||||
interface PassesHigh {
|
||||
[AllowedContext=level.Level.kHighest]
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
interface PassesMedium {
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMedium(pending_receiver<interfaces.Mid> hi);
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMediumRem(pending_remote<interfaces.Mid> hi);
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMediumAssoc(pending_associated_receiver<interfaces.Mid> hi);
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMediumAssocRem(pending_associated_remote<interfaces.Mid> hi);
|
||||
};
|
||||
interface PassesLow {
|
||||
[AllowedContext=level.Level.kLowest]
|
||||
DoLow(pending_receiver<interfaces.Low> hi);
|
||||
};
|
||||
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
struct Two { One one; };
|
||||
interface PassesNestedHigh {
|
||||
[AllowedContext=level.Level.kHighest]
|
||||
DoNestedHigh(Two two);
|
||||
};
|
||||
|
||||
// Allowed as PassesHigh is not itself restricted.
|
||||
interface PassesPassesHigh {
|
||||
DoPass(pending_receiver<PassesHigh> hiho);
|
||||
};
|
||||
""")
|
||||
mojoms.append(a)
|
||||
self._ParseAndGenerate(mojoms)
|
||||
|
||||
def _testThrows(self, filename, content, regexp):
|
||||
mojoms = self._WriteBasicMojoms()
|
||||
self.WriteFile(filename, content)
|
||||
mojoms.append(filename)
|
||||
with self.assertRaisesRegexp(check.CheckException, regexp):
|
||||
self._ParseAndGenerate(mojoms)
|
||||
|
||||
def testMissingAnnotation(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
|
||||
interface PassesHigh {
|
||||
// err: missing annotation.
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
|
||||
|
||||
def testAllowTooLow(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
|
||||
interface PassesHigh {
|
||||
// err: level is worse than required.
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
|
||||
|
||||
def testWrongEnumInAllow(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
enum Blah {
|
||||
kZero,
|
||||
};
|
||||
interface PassesHigh {
|
||||
// err: different enums.
|
||||
[AllowedContext=Blah.kZero]
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'but one of kind')
|
||||
|
||||
def testNotAnEnumInAllow(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
interface PassesHigh {
|
||||
// err: not an enum.
|
||||
[AllowedContext=doopdedoo.mojom.kWhatever]
|
||||
DoHigh(pending_receiver<interfaces.High> hi);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'not a valid enum value')
|
||||
|
||||
def testMissingAllowedForNestedStructs(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
struct Two { One one; };
|
||||
interface PassesNestedHigh {
|
||||
// err: missing annotation.
|
||||
DoNestedHigh(Two two);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
|
||||
|
||||
def testMissingAllowedForNestedUnions(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
struct Two { One one; };
|
||||
union Three {One one; Two two; };
|
||||
interface PassesNestedHigh {
|
||||
// err: missing annotation.
|
||||
DoNestedHigh(Three three);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents, 'require.*?AllowedContext')
|
||||
|
||||
def testMultipleInterfacesThrows(self):
|
||||
contents = """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
interface PassesMultipleInterfaces {
|
||||
[AllowedContext=level.Level.kMiddle]
|
||||
DoMultiple(
|
||||
pending_remote<interfaces.Mid> mid,
|
||||
pending_receiver<interfaces.High> hi,
|
||||
One one
|
||||
);
|
||||
};
|
||||
"""
|
||||
self._testThrows('b.mojom', contents,
|
||||
'RequireContext=.*?kHighest > AllowedContext=.*?kMiddle')
|
||||
|
||||
def testMultipleInterfacesAllowed(self):
|
||||
"""Multiple interfaces can be passed, all satisfy the level."""
|
||||
mojoms = self._WriteBasicMojoms()
|
||||
|
||||
b = "b.mojom"
|
||||
self.WriteFile(
|
||||
b, """
|
||||
module b;
|
||||
import "level.mojom";
|
||||
import "interfaces.mojom";
|
||||
struct One { pending_receiver<interfaces.High> hi; };
|
||||
interface PassesMultipleInterfaces {
|
||||
[AllowedContext=level.Level.kHighest]
|
||||
DoMultiple(
|
||||
pending_receiver<interfaces.High> hi,
|
||||
pending_remote<interfaces.Mid> mid,
|
||||
One one
|
||||
);
|
||||
};
|
||||
""")
|
||||
mojoms.append(b)
|
||||
self._ParseAndGenerate(mojoms)
|
55
utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py
Executable file
55
utils/codegen/ipc/mojo/public/tools/bindings/concatenate-files.py
Executable file
|
@ -0,0 +1,55 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2019 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
#
|
||||
# This utility concatenates several files into one. On Unix-like systems
|
||||
# it is equivalent to:
|
||||
# cat file1 file2 file3 ...files... > target
|
||||
#
|
||||
# The reason for writing a separate utility is that 'cat' is not available
|
||||
# on all supported build platforms, but Python is, and hence this provides
|
||||
# us with an easy and uniform way of doing this on all platforms.
|
||||
|
||||
# for py2/py3 compatibility
|
||||
from __future__ import print_function
|
||||
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
|
||||
def Concatenate(filenames):
|
||||
"""Concatenate files.
|
||||
|
||||
Args:
|
||||
files: Array of file names.
|
||||
The last name is the target; all earlier ones are sources.
|
||||
|
||||
Returns:
|
||||
True, if the operation was successful.
|
||||
"""
|
||||
if len(filenames) < 2:
|
||||
print("An error occurred generating %s:\nNothing to do." % filenames[-1])
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(filenames[-1], "wb") as target:
|
||||
for filename in filenames[:-1]:
|
||||
with open(filename, "rb") as current:
|
||||
target.write(current.read())
|
||||
return True
|
||||
except IOError as e:
|
||||
print("An error occurred when writing %s:\n%s" % (filenames[-1], e))
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser()
|
||||
parser.set_usage("""Concatenate several files into one.
|
||||
Equivalent to: cat file1 ... > target.""")
|
||||
(_options, args) = parser.parse_args()
|
||||
sys.exit(0 if Concatenate(args) else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,75 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2018 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Simple utility which concatenates a set of files into a single output file
|
||||
while also stripping any goog.provide or goog.require lines. This allows us to
|
||||
provide a very primitive sort of "compilation" without any extra toolchain
|
||||
support and without having to modify otherwise compilable sources in the tree
|
||||
which use these directives.
|
||||
|
||||
goog.provide lines are replaced with an equivalent invocation of
|
||||
mojo.internal.exportModule, which accomplishes essentially the same thing in an
|
||||
uncompiled context. A singular exception is made for the 'mojo.internal' export,
|
||||
which is instead replaced with an inlined assignment to initialize the
|
||||
namespace.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import optparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
_MOJO_INTERNAL_MODULE_NAME = "mojo.internal"
|
||||
_MOJO_EXPORT_MODULE_SYMBOL = "mojo.internal.exportModule"
|
||||
|
||||
|
||||
def FilterLine(filename, line, output):
|
||||
if line.startswith("goog.require"):
|
||||
return
|
||||
|
||||
if line.startswith("goog.provide"):
|
||||
match = re.match(r"goog.provide\('([^']+)'\);", line)
|
||||
if not match:
|
||||
print("Invalid goog.provide line in %s:\n%s" % (filename, line))
|
||||
sys.exit(1)
|
||||
|
||||
module_name = match.group(1)
|
||||
if module_name == _MOJO_INTERNAL_MODULE_NAME:
|
||||
output.write("self.mojo = { internal: {} };")
|
||||
else:
|
||||
output.write("%s('%s');\n" % (_MOJO_EXPORT_MODULE_SYMBOL, module_name))
|
||||
return
|
||||
|
||||
output.write(line)
|
||||
|
||||
def ConcatenateAndReplaceExports(filenames):
|
||||
if (len(filenames) < 2):
|
||||
print("At least two filenames (one input and the output) are required.")
|
||||
return False
|
||||
|
||||
try:
|
||||
with open(filenames[-1], "w") as target:
|
||||
for filename in filenames[:-1]:
|
||||
with open(filename, "r") as current:
|
||||
for line in current.readlines():
|
||||
FilterLine(filename, line, target)
|
||||
return True
|
||||
except IOError as e:
|
||||
print("Error generating %s\n: %s" % (filenames[-1], e))
|
||||
return False
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser()
|
||||
parser.set_usage("""file1 [file2...] outfile
|
||||
Concatenate several files into one, stripping Closure provide and
|
||||
require directives along the way.""")
|
||||
(_, args) = parser.parse_args()
|
||||
sys.exit(0 if ConcatenateAndReplaceExports(args) else 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,48 @@
|
|||
# Copyright 2017 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Generates a list of all files in a directory.
|
||||
|
||||
This script takes in a directory and an output file name as input.
|
||||
It then reads the directory and creates a list of all file names
|
||||
in that directory. The list is written to the output file.
|
||||
There is also an option to pass in '-p' or '--pattern'
|
||||
which will check each file name against a regular expression
|
||||
pattern that is passed in. Only files which match the regex
|
||||
will be written to the list.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
sys.path.insert(
|
||||
0,
|
||||
os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||
|
||||
from mojom.generate.generator import WriteFile
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser()
|
||||
parser.add_option('-d', '--directory', help='Read files from DIRECTORY')
|
||||
parser.add_option('-o', '--output', help='Write list to FILE')
|
||||
parser.add_option('-p',
|
||||
'--pattern',
|
||||
help='Only reads files that name matches PATTERN',
|
||||
default=".")
|
||||
(options, _) = parser.parse_args()
|
||||
pattern = re.compile(options.pattern)
|
||||
files = [f for f in os.listdir(options.directory) if pattern.match(f)]
|
||||
|
||||
contents = '\n'.join(f for f in files) + '\n'
|
||||
WriteFile(contents, options.output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
135
utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py
Executable file
135
utils/codegen/ipc/mojo/public/tools/bindings/generate_type_mappings.py
Executable file
|
@ -0,0 +1,135 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2016 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Generates a JSON typemap from its command-line arguments and dependencies.
|
||||
|
||||
Each typemap should be specified in an command-line argument of the form
|
||||
key=value, with an argument of "--start-typemap" preceding each typemap.
|
||||
|
||||
For example,
|
||||
generate_type_mappings.py --output=foo.typemap --start-typemap \\
|
||||
public_headers=foo.h traits_headers=foo_traits.h \\
|
||||
type_mappings=mojom.Foo=FooImpl
|
||||
|
||||
generates a foo.typemap containing
|
||||
{
|
||||
"c++": {
|
||||
"mojom.Foo": {
|
||||
"typename": "FooImpl",
|
||||
"traits_headers": [
|
||||
"foo_traits.h"
|
||||
],
|
||||
"public_headers": [
|
||||
"foo.h"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Then,
|
||||
generate_type_mappings.py --dependency foo.typemap --output=bar.typemap \\
|
||||
--start-typemap public_headers=bar.h traits_headers=bar_traits.h \\
|
||||
type_mappings=mojom.Bar=BarImpl
|
||||
|
||||
generates a bar.typemap containing
|
||||
{
|
||||
"c++": {
|
||||
"mojom.Bar": {
|
||||
"typename": "BarImpl",
|
||||
"traits_headers": [
|
||||
"bar_traits.h"
|
||||
],
|
||||
"public_headers": [
|
||||
"bar.h"
|
||||
]
|
||||
},
|
||||
"mojom.Foo": {
|
||||
"typename": "FooImpl",
|
||||
"traits_headers": [
|
||||
"foo_traits.h"
|
||||
],
|
||||
"public_headers": [
|
||||
"foo.h"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.path.insert(
|
||||
0,
|
||||
os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||
|
||||
from mojom.generate.generator import WriteFile
|
||||
|
||||
def ReadTypemap(path):
|
||||
with open(path) as f:
|
||||
return json.load(f)['c++']
|
||||
|
||||
|
||||
def LoadCppTypemapConfig(path):
|
||||
configs = {}
|
||||
with open(path) as f:
|
||||
for config in json.load(f):
|
||||
for entry in config['types']:
|
||||
configs[entry['mojom']] = {
|
||||
'typename': entry['cpp'],
|
||||
'forward_declaration': entry.get('forward_declaration', None),
|
||||
'public_headers': config.get('traits_headers', []),
|
||||
'traits_headers': config.get('traits_private_headers', []),
|
||||
'copyable_pass_by_value': entry.get('copyable_pass_by_value',
|
||||
False),
|
||||
'default_constructible': entry.get('default_constructible', True),
|
||||
'force_serialize': entry.get('force_serialize', False),
|
||||
'hashable': entry.get('hashable', False),
|
||||
'move_only': entry.get('move_only', False),
|
||||
'nullable_is_same_type': entry.get('nullable_is_same_type', False),
|
||||
'non_copyable_non_movable': False,
|
||||
}
|
||||
return configs
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description=__doc__,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||
parser.add_argument(
|
||||
'--dependency',
|
||||
type=str,
|
||||
action='append',
|
||||
default=[],
|
||||
help=('A path to another JSON typemap to merge into the output. '
|
||||
'This may be repeated to merge multiple typemaps.'))
|
||||
parser.add_argument(
|
||||
'--cpp-typemap-config',
|
||||
type=str,
|
||||
action='store',
|
||||
dest='cpp_config_path',
|
||||
help=('A path to a single JSON-formatted typemap config as emitted by'
|
||||
'GN when processing a mojom_cpp_typemap build rule.'))
|
||||
parser.add_argument('--output',
|
||||
type=str,
|
||||
required=True,
|
||||
help='The path to which to write the generated JSON.')
|
||||
params, _ = parser.parse_known_args()
|
||||
typemaps = {}
|
||||
if params.cpp_config_path:
|
||||
typemaps = LoadCppTypemapConfig(params.cpp_config_path)
|
||||
missing = [path for path in params.dependency if not os.path.exists(path)]
|
||||
if missing:
|
||||
raise IOError('Missing dependencies: %s' % ', '.join(missing))
|
||||
for path in params.dependency:
|
||||
typemaps.update(ReadTypemap(path))
|
||||
|
||||
WriteFile(json.dumps({'c++': typemaps}, indent=2), params.output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
47
utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py
Executable file
47
utils/codegen/ipc/mojo/public/tools/bindings/minify_with_terser.py
Executable file
|
@ -0,0 +1,47 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2023 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
#
|
||||
# This utility minifies JS files with terser.
|
||||
#
|
||||
# Instance of 'node' has no 'RunNode' member (no-member)
|
||||
# pylint: disable=no-member
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
_HERE_PATH = os.path.dirname(__file__)
|
||||
_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
|
||||
_CWD = os.getcwd()
|
||||
sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
|
||||
import node
|
||||
import node_modules
|
||||
|
||||
|
||||
def MinifyFile(input_file, output_file):
|
||||
node.RunNode([
|
||||
node_modules.PathToTerser(), input_file, '--mangle', '--compress',
|
||||
'--comments', 'false', '--output', output_file
|
||||
])
|
||||
|
||||
|
||||
def main(argv):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--input', required=True)
|
||||
parser.add_argument('--output', required=True)
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
# Delete the output file if it already exists. It may be a sym link to the
|
||||
# input, because in non-optimized/pre-Terser builds the input file is copied
|
||||
# to the output location with gn copy().
|
||||
out_path = os.path.join(_CWD, args.output)
|
||||
if (os.path.exists(out_path)):
|
||||
os.remove(out_path)
|
||||
|
||||
MinifyFile(os.path.join(_CWD, args.input), out_path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main(sys.argv[1:])
|
2118
utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni
Normal file
2118
utils/codegen/ipc/mojo/public/tools/bindings/mojom.gni
Normal file
File diff suppressed because it is too large
Load diff
424
utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
Executable file
424
utils/codegen/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
Executable file
|
@ -0,0 +1,424 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""The frontend for the Mojo bindings system."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
|
||||
import hashlib
|
||||
import importlib
|
||||
import json
|
||||
import os
|
||||
import pprint
|
||||
import re
|
||||
import struct
|
||||
import sys
|
||||
|
||||
# Disable lint check for finding modules:
|
||||
# pylint: disable=F0401
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
|
||||
sys.path.insert(
|
||||
0,
|
||||
os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||
|
||||
from mojom.error import Error
|
||||
import mojom.fileutil as fileutil
|
||||
from mojom.generate.module import Module
|
||||
from mojom.generate import template_expander
|
||||
from mojom.generate import translate
|
||||
from mojom.generate.generator import WriteFile
|
||||
|
||||
sys.path.append(
|
||||
os.path.join(_GetDirAbove("mojo"), "tools", "diagnosis"))
|
||||
import crbug_1001171
|
||||
|
||||
|
||||
_BUILTIN_GENERATORS = {
|
||||
"c++": "mojom_cpp_generator",
|
||||
"javascript": "mojom_js_generator",
|
||||
"java": "mojom_java_generator",
|
||||
"mojolpm": "mojom_mojolpm_generator",
|
||||
"typescript": "mojom_ts_generator",
|
||||
}
|
||||
|
||||
_BUILTIN_CHECKS = {
|
||||
"attributes": "mojom_attributes_check",
|
||||
"definitions": "mojom_definitions_check",
|
||||
"features": "mojom_interface_feature_check",
|
||||
"restrictions": "mojom_restrictions_check",
|
||||
}
|
||||
|
||||
|
||||
def LoadGenerators(generators_string):
|
||||
if not generators_string:
|
||||
return {} # No generators.
|
||||
|
||||
generators = {}
|
||||
for generator_name in [s.strip() for s in generators_string.split(",")]:
|
||||
language = generator_name.lower()
|
||||
if language not in _BUILTIN_GENERATORS:
|
||||
print("Unknown generator name %s" % generator_name)
|
||||
sys.exit(1)
|
||||
generator_module = importlib.import_module(
|
||||
"generators.%s" % _BUILTIN_GENERATORS[language])
|
||||
generators[language] = generator_module
|
||||
return generators
|
||||
|
||||
|
||||
def LoadChecks(checks_string):
|
||||
if not checks_string:
|
||||
return {} # No checks.
|
||||
|
||||
checks = {}
|
||||
for check_name in [s.strip() for s in checks_string.split(",")]:
|
||||
check = check_name.lower()
|
||||
if check not in _BUILTIN_CHECKS:
|
||||
print("Unknown check name %s" % check_name)
|
||||
sys.exit(1)
|
||||
check_module = importlib.import_module("checks.%s" % _BUILTIN_CHECKS[check])
|
||||
checks[check] = check_module
|
||||
return checks
|
||||
|
||||
|
||||
def MakeImportStackMessage(imported_filename_stack):
|
||||
"""Make a (human-readable) message listing a chain of imports. (Returned
|
||||
string begins with a newline (if nonempty) and does not end with one.)"""
|
||||
return ''.join(
|
||||
reversed(["\n %s was imported by %s" % (a, b) for (a, b) in \
|
||||
zip(imported_filename_stack[1:], imported_filename_stack)]))
|
||||
|
||||
|
||||
class RelativePath:
|
||||
"""Represents a path relative to the source tree or generated output dir."""
|
||||
|
||||
def __init__(self, path, source_root, output_dir):
|
||||
self.path = path
|
||||
if path.startswith(source_root):
|
||||
self.root = source_root
|
||||
elif path.startswith(output_dir):
|
||||
self.root = output_dir
|
||||
else:
|
||||
raise Exception("Invalid input path %s" % path)
|
||||
|
||||
def relative_path(self):
|
||||
return os.path.relpath(
|
||||
os.path.abspath(self.path), os.path.abspath(self.root))
|
||||
|
||||
|
||||
def _GetModulePath(path, output_dir):
|
||||
return os.path.join(output_dir, path.relative_path() + '-module')
|
||||
|
||||
|
||||
def ScrambleMethodOrdinals(interfaces, salt):
|
||||
already_generated = set()
|
||||
for interface in interfaces:
|
||||
i = 0
|
||||
already_generated.clear()
|
||||
for method in interface.methods:
|
||||
if method.explicit_ordinal is not None:
|
||||
continue
|
||||
while True:
|
||||
i = i + 1
|
||||
if i == 1000000:
|
||||
raise Exception("Could not generate %d method ordinals for %s" %
|
||||
(len(interface.methods), interface.mojom_name))
|
||||
# Generate a scrambled method.ordinal value. The algorithm doesn't have
|
||||
# to be very strong, cryptographically. It just needs to be non-trivial
|
||||
# to guess the results without the secret salt, in order to make it
|
||||
# harder for a compromised process to send fake Mojo messages.
|
||||
sha256 = hashlib.sha256(salt)
|
||||
sha256.update(interface.mojom_name.encode('utf-8'))
|
||||
sha256.update(str(i).encode('utf-8'))
|
||||
# Take the first 4 bytes as a little-endian uint32.
|
||||
ordinal = struct.unpack('<L', sha256.digest()[:4])[0]
|
||||
# Trim to 31 bits, so it always fits into a Java (signed) int.
|
||||
ordinal = ordinal & 0x7fffffff
|
||||
if ordinal in already_generated:
|
||||
continue
|
||||
already_generated.add(ordinal)
|
||||
method.ordinal = ordinal
|
||||
method.ordinal_comment = (
|
||||
'The %s value is based on sha256(salt + "%s%d").' %
|
||||
(ordinal, interface.mojom_name, i))
|
||||
break
|
||||
|
||||
|
||||
def ReadFileContents(filename):
|
||||
with open(filename, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
class MojomProcessor:
|
||||
"""Takes parsed mojom modules and generates language bindings from them.
|
||||
|
||||
Attributes:
|
||||
_processed_files: {Dict[str, mojom.generate.module.Module]} Mapping from
|
||||
relative mojom filename paths to the module AST for that mojom file.
|
||||
"""
|
||||
def __init__(self, should_generate):
|
||||
self._should_generate = should_generate
|
||||
self._processed_files = {}
|
||||
self._typemap = {}
|
||||
|
||||
def LoadTypemaps(self, typemaps):
|
||||
# Support some very simple single-line comments in typemap JSON.
|
||||
comment_expr = r"^\s*//.*$"
|
||||
def no_comments(line):
|
||||
return not re.match(comment_expr, line)
|
||||
for filename in typemaps:
|
||||
with open(filename) as f:
|
||||
typemaps = json.loads("".join(filter(no_comments, f.readlines())))
|
||||
for language, typemap in typemaps.items():
|
||||
language_map = self._typemap.get(language, {})
|
||||
language_map.update(typemap)
|
||||
self._typemap[language] = language_map
|
||||
if 'c++' in self._typemap:
|
||||
self._typemap['mojolpm'] = self._typemap['c++']
|
||||
|
||||
def _GenerateModule(self, args, remaining_args, check_modules,
|
||||
generator_modules, rel_filename, imported_filename_stack):
|
||||
# Return the already-generated module.
|
||||
if rel_filename.path in self._processed_files:
|
||||
return self._processed_files[rel_filename.path]
|
||||
|
||||
if rel_filename.path in imported_filename_stack:
|
||||
print("%s: Error: Circular dependency" % rel_filename.path + \
|
||||
MakeImportStackMessage(imported_filename_stack + [rel_filename.path]))
|
||||
sys.exit(1)
|
||||
|
||||
module_path = _GetModulePath(rel_filename, args.output_dir)
|
||||
with open(module_path, 'rb') as f:
|
||||
module = Module.Load(f)
|
||||
|
||||
if args.scrambled_message_id_salt_paths:
|
||||
salt = b''.join(
|
||||
map(ReadFileContents, args.scrambled_message_id_salt_paths))
|
||||
ScrambleMethodOrdinals(module.interfaces, salt)
|
||||
|
||||
if self._should_generate(rel_filename.path):
|
||||
# Run checks on module first.
|
||||
for check_module in check_modules.values():
|
||||
checker = check_module.Check(module)
|
||||
checker.CheckModule()
|
||||
# Then run generation.
|
||||
for language, generator_module in generator_modules.items():
|
||||
generator = generator_module.Generator(
|
||||
module, args.output_dir, typemap=self._typemap.get(language, {}),
|
||||
variant=args.variant, bytecode_path=args.bytecode_path,
|
||||
for_blink=args.for_blink,
|
||||
js_generate_struct_deserializers=\
|
||||
args.js_generate_struct_deserializers,
|
||||
export_attribute=args.export_attribute,
|
||||
export_header=args.export_header,
|
||||
generate_non_variant_code=args.generate_non_variant_code,
|
||||
support_lazy_serialization=args.support_lazy_serialization,
|
||||
disallow_native_types=args.disallow_native_types,
|
||||
disallow_interfaces=args.disallow_interfaces,
|
||||
generate_message_ids=args.generate_message_ids,
|
||||
generate_fuzzing=args.generate_fuzzing,
|
||||
enable_kythe_annotations=args.enable_kythe_annotations,
|
||||
extra_cpp_template_paths=args.extra_cpp_template_paths,
|
||||
generate_extra_cpp_only=args.generate_extra_cpp_only)
|
||||
filtered_args = []
|
||||
if hasattr(generator_module, 'GENERATOR_PREFIX'):
|
||||
prefix = '--' + generator_module.GENERATOR_PREFIX + '_'
|
||||
filtered_args = [arg for arg in remaining_args
|
||||
if arg.startswith(prefix)]
|
||||
generator.GenerateFiles(filtered_args)
|
||||
|
||||
# Save result.
|
||||
self._processed_files[rel_filename.path] = module
|
||||
return module
|
||||
|
||||
|
||||
def _Generate(args, remaining_args):
|
||||
if args.variant == "none":
|
||||
args.variant = None
|
||||
|
||||
for idx, import_dir in enumerate(args.import_directories):
|
||||
tokens = import_dir.split(":")
|
||||
if len(tokens) >= 2:
|
||||
args.import_directories[idx] = RelativePath(tokens[0], tokens[1],
|
||||
args.output_dir)
|
||||
else:
|
||||
args.import_directories[idx] = RelativePath(tokens[0], args.depth,
|
||||
args.output_dir)
|
||||
generator_modules = LoadGenerators(args.generators_string)
|
||||
check_modules = LoadChecks(args.checks_string)
|
||||
|
||||
fileutil.EnsureDirectoryExists(args.output_dir)
|
||||
|
||||
processor = MojomProcessor(lambda filename: filename in args.filename)
|
||||
processor.LoadTypemaps(set(args.typemaps))
|
||||
|
||||
if args.filelist:
|
||||
with open(args.filelist) as f:
|
||||
args.filename.extend(f.read().split())
|
||||
|
||||
for filename in args.filename:
|
||||
processor._GenerateModule(
|
||||
args, remaining_args, check_modules, generator_modules,
|
||||
RelativePath(filename, args.depth, args.output_dir), [])
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _Precompile(args, _):
|
||||
generator_modules = LoadGenerators(",".join(_BUILTIN_GENERATORS.keys()))
|
||||
|
||||
template_expander.PrecompileTemplates(generator_modules, args.output_dir)
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate bindings from mojom files.")
|
||||
parser.add_argument("--use_bundled_pylibs", action="store_true",
|
||||
help="use Python modules bundled in the SDK")
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output_dir",
|
||||
dest="output_dir",
|
||||
default=".",
|
||||
help="output directory for generated files")
|
||||
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
generate_parser = subparsers.add_parser(
|
||||
"generate", description="Generate bindings from mojom files.")
|
||||
generate_parser.add_argument("filename", nargs="*",
|
||||
help="mojom input file")
|
||||
generate_parser.add_argument("--filelist", help="mojom input file list")
|
||||
generate_parser.add_argument("-d", "--depth", dest="depth", default=".",
|
||||
help="depth from source root")
|
||||
generate_parser.add_argument("-g",
|
||||
"--generators",
|
||||
dest="generators_string",
|
||||
metavar="GENERATORS",
|
||||
default="c++,javascript,java,mojolpm",
|
||||
help="comma-separated list of generators")
|
||||
generate_parser.add_argument("-c",
|
||||
"--checks",
|
||||
dest="checks_string",
|
||||
metavar="CHECKS",
|
||||
default=",".join(_BUILTIN_CHECKS.keys()),
|
||||
help="comma-separated list of checks")
|
||||
generate_parser.add_argument(
|
||||
"--gen_dir", dest="gen_directories", action="append", metavar="directory",
|
||||
default=[], help="add a directory to be searched for the syntax trees.")
|
||||
generate_parser.add_argument(
|
||||
"-I", dest="import_directories", action="append", metavar="directory",
|
||||
default=[],
|
||||
help="add a directory to be searched for import files. The depth from "
|
||||
"source root can be specified for each import by appending it after "
|
||||
"a colon")
|
||||
generate_parser.add_argument("--typemap", action="append", metavar="TYPEMAP",
|
||||
default=[], dest="typemaps",
|
||||
help="apply TYPEMAP to generated output")
|
||||
generate_parser.add_argument("--variant", dest="variant", default=None,
|
||||
help="output a named variant of the bindings")
|
||||
generate_parser.add_argument(
|
||||
"--bytecode_path", required=True, help=(
|
||||
"the path from which to load template bytecode; to generate template "
|
||||
"bytecode, run %s precompile BYTECODE_PATH" % os.path.basename(
|
||||
sys.argv[0])))
|
||||
generate_parser.add_argument("--for_blink", action="store_true",
|
||||
help="Use WTF types as generated types for mojo "
|
||||
"string/array/map.")
|
||||
generate_parser.add_argument(
|
||||
"--js_generate_struct_deserializers", action="store_true",
|
||||
help="Generate javascript deserialize methods for structs in "
|
||||
"mojom-lite.js file")
|
||||
generate_parser.add_argument(
|
||||
"--export_attribute", default="",
|
||||
help="Optional attribute to specify on class declaration to export it "
|
||||
"for the component build.")
|
||||
generate_parser.add_argument(
|
||||
"--export_header", default="",
|
||||
help="Optional header to include in the generated headers to support the "
|
||||
"component build.")
|
||||
generate_parser.add_argument(
|
||||
"--generate_non_variant_code", action="store_true",
|
||||
help="Generate code that is shared by different variants.")
|
||||
generate_parser.add_argument(
|
||||
"--scrambled_message_id_salt_path",
|
||||
dest="scrambled_message_id_salt_paths",
|
||||
help="If non-empty, the path to a file whose contents should be used as"
|
||||
"a salt for generating scrambled message IDs. If this switch is specified"
|
||||
"more than once, the contents of all salt files are concatenated to form"
|
||||
"the salt value.", default=[], action="append")
|
||||
generate_parser.add_argument(
|
||||
"--support_lazy_serialization",
|
||||
help="If set, generated bindings will serialize lazily when possible.",
|
||||
action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--extra_cpp_template_paths",
|
||||
dest="extra_cpp_template_paths",
|
||||
action="append",
|
||||
metavar="path_to_template",
|
||||
default=[],
|
||||
help="Provide a path to a new template (.tmpl) that is used to generate "
|
||||
"additional C++ source/header files ")
|
||||
generate_parser.add_argument(
|
||||
"--generate_extra_cpp_only",
|
||||
help="If set and extra_cpp_template_paths provided, will only generate"
|
||||
"extra_cpp_template related C++ bindings",
|
||||
action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--disallow_native_types",
|
||||
help="Disallows the [Native] attribute to be specified on structs or "
|
||||
"enums within the mojom file.", action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--disallow_interfaces",
|
||||
help="Disallows interface definitions within the mojom file. It is an "
|
||||
"error to specify this flag when processing a mojom file which defines "
|
||||
"any interface.", action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--generate_message_ids",
|
||||
help="Generates only the message IDs header for C++ bindings. Note that "
|
||||
"this flag only matters if --generate_non_variant_code is also "
|
||||
"specified.", action="store_true")
|
||||
generate_parser.add_argument(
|
||||
"--generate_fuzzing",
|
||||
action="store_true",
|
||||
help="Generates additional bindings for fuzzing in JS.")
|
||||
generate_parser.add_argument(
|
||||
"--enable_kythe_annotations",
|
||||
action="store_true",
|
||||
help="Adds annotations for kythe metadata generation.")
|
||||
|
||||
generate_parser.set_defaults(func=_Generate)
|
||||
|
||||
precompile_parser = subparsers.add_parser("precompile",
|
||||
description="Precompile templates for the mojom bindings generator.")
|
||||
precompile_parser.set_defaults(func=_Precompile)
|
||||
|
||||
args, remaining_args = parser.parse_known_args()
|
||||
return args.func(args, remaining_args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with crbug_1001171.DumpStateOnLookupError():
|
||||
ret = main()
|
||||
# Exit without running GC, which can save multiple seconds due to the large
|
||||
# number of object created. But flush is necessary as os._exit doesn't do
|
||||
# that.
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
os._exit(ret)
|
|
@ -0,0 +1,62 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from mojom_bindings_generator import MakeImportStackMessage
|
||||
from mojom_bindings_generator import ScrambleMethodOrdinals
|
||||
|
||||
|
||||
class FakeIface:
|
||||
def __init__(self):
|
||||
self.mojom_name = None
|
||||
self.methods = None
|
||||
|
||||
|
||||
class FakeMethod:
|
||||
def __init__(self, explicit_ordinal=None):
|
||||
self.explicit_ordinal = explicit_ordinal
|
||||
self.ordinal = explicit_ordinal
|
||||
self.ordinal_comment = None
|
||||
|
||||
|
||||
class MojoBindingsGeneratorTest(unittest.TestCase):
|
||||
"""Tests mojo_bindings_generator."""
|
||||
|
||||
def testMakeImportStackMessage(self):
|
||||
"""Tests MakeImportStackMessage()."""
|
||||
self.assertEqual(MakeImportStackMessage(["x"]), "")
|
||||
self.assertEqual(MakeImportStackMessage(["x", "y"]),
|
||||
"\n y was imported by x")
|
||||
self.assertEqual(MakeImportStackMessage(["x", "y", "z"]),
|
||||
"\n z was imported by y\n y was imported by x")
|
||||
|
||||
def testScrambleMethodOrdinals(self):
|
||||
"""Tests ScrambleMethodOrdinals()."""
|
||||
interface = FakeIface()
|
||||
interface.mojom_name = 'RendererConfiguration'
|
||||
interface.methods = [
|
||||
FakeMethod(),
|
||||
FakeMethod(),
|
||||
FakeMethod(),
|
||||
FakeMethod(explicit_ordinal=42)
|
||||
]
|
||||
ScrambleMethodOrdinals([interface], "foo".encode('utf-8'))
|
||||
# These next three values are hard-coded. If the generation algorithm
|
||||
# changes from being based on sha256(seed + interface.name + str(i)) then
|
||||
# these numbers will obviously need to change too.
|
||||
#
|
||||
# Note that hashlib.sha256('fooRendererConfiguration1').digest()[:4] is
|
||||
# '\xa5\xbc\xf9\xca' and that hex(1257880741) = '0x4af9bca5'. The
|
||||
# difference in 0x4a vs 0xca is because we only take 31 bits.
|
||||
self.assertEqual(interface.methods[0].ordinal, 1257880741)
|
||||
self.assertEqual(interface.methods[1].ordinal, 631133653)
|
||||
self.assertEqual(interface.methods[2].ordinal, 549336076)
|
||||
|
||||
# Explicit method ordinals should not be scrambled.
|
||||
self.assertEqual(interface.methods[3].ordinal, 42)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
58
utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py
Executable file
58
utils/codegen/ipc/mojo/public/tools/bindings/validate_typemap_config.py
Executable file
|
@ -0,0 +1,58 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
def CheckCppTypemapConfigs(target_name, config_filename, out_filename):
|
||||
_SUPPORTED_CONFIG_KEYS = set([
|
||||
'types', 'traits_headers', 'traits_private_headers', 'traits_sources',
|
||||
'traits_deps', 'traits_public_deps'
|
||||
])
|
||||
_SUPPORTED_TYPE_KEYS = set([
|
||||
'mojom', 'cpp', 'copyable_pass_by_value', 'force_serialize', 'hashable',
|
||||
'move_only', 'nullable_is_same_type', 'forward_declaration',
|
||||
'default_constructible'
|
||||
])
|
||||
with open(config_filename, 'r') as f:
|
||||
for config in json.load(f):
|
||||
for key in config.keys():
|
||||
if key not in _SUPPORTED_CONFIG_KEYS:
|
||||
raise ValueError('Invalid typemap property "%s" when processing %s' %
|
||||
(key, target_name))
|
||||
|
||||
types = config.get('types')
|
||||
if not types:
|
||||
raise ValueError('Typemap for %s must specify at least one type to map'
|
||||
% target_name)
|
||||
|
||||
for entry in types:
|
||||
for key in entry.keys():
|
||||
if key not in _SUPPORTED_TYPE_KEYS:
|
||||
raise IOError(
|
||||
'Invalid type property "%s" in typemap for "%s" on target %s' %
|
||||
(key, entry.get('mojom', '(unknown)'), target_name))
|
||||
|
||||
with open(out_filename, 'w') as f:
|
||||
f.truncate(0)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
_, args = parser.parse_known_args()
|
||||
if len(args) != 3:
|
||||
print('Usage: validate_typemap_config.py target_name config_filename '
|
||||
'stamp_filename')
|
||||
sys.exit(1)
|
||||
|
||||
CheckCppTypemapConfigs(args[0], args[1], args[2])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
18
utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn
Normal file
18
utils/codegen/ipc/mojo/public/tools/mojom/BUILD.gn
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
group("tests") {
|
||||
data = [
|
||||
"check_stable_mojom_compatibility_unittest.py",
|
||||
"check_stable_mojom_compatibility.py",
|
||||
"const_unittest.py",
|
||||
"enum_unittest.py",
|
||||
"feature_unittest.py",
|
||||
"mojom_parser_test_case.py",
|
||||
"mojom_parser_unittest.py",
|
||||
"mojom_parser.py",
|
||||
"stable_attribute_unittest.py",
|
||||
"version_compatibility_unittest.py",
|
||||
]
|
||||
}
|
14
utils/codegen/ipc/mojo/public/tools/mojom/README.md
Normal file
14
utils/codegen/ipc/mojo/public/tools/mojom/README.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
# The Mojom Parser
|
||||
|
||||
The Mojom format is an interface definition language (IDL) for describing
|
||||
interprocess communication (IPC) messages and data types for use with the
|
||||
low-level cross-platform
|
||||
[Mojo IPC library](https://chromium.googlesource.com/chromium/src/+/main/mojo/public/c/system/README.md).
|
||||
|
||||
This directory consists of a `mojom` Python module, its tests, and supporting
|
||||
command-line tools. The Python module implements the parser used by the
|
||||
command-line tools and exposes an API to help external bindings generators emit
|
||||
useful code from the parser's outputs.
|
||||
|
||||
TODO(https://crbug.com/1060464): Fill out this documentation once the library
|
||||
and tools have stabilized.
|
204
utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
Executable file
204
utils/codegen/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
Executable file
|
@ -0,0 +1,204 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Verifies backward-compatibility of mojom type changes.
|
||||
|
||||
Given a set of pre- and post-diff mojom file contents, and a root directory
|
||||
for a project, this tool verifies that any changes to [Stable] mojom types are
|
||||
backward-compatible with the previous version.
|
||||
|
||||
This can be used e.g. by a presubmit check to prevent developers from making
|
||||
breaking changes to stable mojoms."""
|
||||
|
||||
import argparse
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom.generate import module
|
||||
from mojom.generate import translate
|
||||
from mojom.parse import parser
|
||||
|
||||
# pylint: disable=raise-missing-from
|
||||
|
||||
|
||||
class ParseError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _ValidateDelta(root, delta):
|
||||
"""Parses all modified mojoms (including all transitive mojom dependencies,
|
||||
even if unmodified) to perform backward-compatibility checks on any types
|
||||
marked with the [Stable] attribute.
|
||||
|
||||
Note that unlike the normal build-time parser in mojom_parser.py, this does
|
||||
not produce or rely on cached module translations, but instead parses the full
|
||||
transitive closure of a mojom's input dependencies all at once.
|
||||
"""
|
||||
|
||||
translate.is_running_backwards_compatibility_check_hack = True
|
||||
|
||||
# First build a map of all files covered by the delta
|
||||
affected_files = set()
|
||||
old_files = {}
|
||||
new_files = {}
|
||||
for change in delta:
|
||||
# TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
|
||||
filename = change['filename'].replace('\\', '/')
|
||||
affected_files.add(filename)
|
||||
if change['old']:
|
||||
old_files[filename] = change['old']
|
||||
if change['new']:
|
||||
new_files[filename] = change['new']
|
||||
|
||||
# Parse and translate all mojoms relevant to the delta, including transitive
|
||||
# imports that weren't modified.
|
||||
unmodified_modules = {}
|
||||
|
||||
def parseMojom(mojom, file_overrides, override_modules):
|
||||
if mojom in unmodified_modules or mojom in override_modules:
|
||||
return
|
||||
|
||||
contents = file_overrides.get(mojom)
|
||||
if contents:
|
||||
modules = override_modules
|
||||
else:
|
||||
modules = unmodified_modules
|
||||
with io.open(os.path.join(root, mojom), encoding='utf-8') as f:
|
||||
contents = f.read()
|
||||
|
||||
try:
|
||||
ast = parser.Parse(contents, mojom)
|
||||
except Exception as e:
|
||||
raise ParseError('encountered exception {0} while parsing {1}'.format(
|
||||
e, mojom))
|
||||
|
||||
# Files which are generated at compile time can't be checked by this script
|
||||
# (at the moment) since they may not exist in the output directory.
|
||||
generated_files_to_skip = {
|
||||
('third_party/blink/public/mojom/runtime_feature_state/'
|
||||
'runtime_feature.mojom'),
|
||||
('third_party/blink/public/mojom/origin_trial_feature/'
|
||||
'origin_trial_feature.mojom'),
|
||||
}
|
||||
|
||||
ast.import_list.items = [
|
||||
x for x in ast.import_list.items
|
||||
if x.import_filename not in generated_files_to_skip
|
||||
]
|
||||
|
||||
for imp in ast.import_list:
|
||||
if (not file_overrides.get(imp.import_filename)
|
||||
and not os.path.exists(os.path.join(root, imp.import_filename))):
|
||||
# Speculatively construct a path prefix to locate the import_filename
|
||||
mojom_path = os.path.dirname(os.path.normpath(mojom)).split(os.sep)
|
||||
test_prefix = ''
|
||||
for path_component in mojom_path:
|
||||
test_prefix = os.path.join(test_prefix, path_component)
|
||||
test_import_filename = os.path.join(test_prefix, imp.import_filename)
|
||||
if os.path.exists(os.path.join(root, test_import_filename)):
|
||||
imp.import_filename = test_import_filename
|
||||
break
|
||||
parseMojom(imp.import_filename, file_overrides, override_modules)
|
||||
|
||||
# Now that the transitive set of dependencies has been imported and parsed
|
||||
# above, translate each mojom AST into a Module so that all types are fully
|
||||
# defined and can be inspected.
|
||||
all_modules = {}
|
||||
all_modules.update(unmodified_modules)
|
||||
all_modules.update(override_modules)
|
||||
modules[mojom] = translate.OrderedModule(ast, mojom, all_modules)
|
||||
|
||||
old_modules = {}
|
||||
for mojom in old_files:
|
||||
parseMojom(mojom, old_files, old_modules)
|
||||
new_modules = {}
|
||||
for mojom in new_files:
|
||||
parseMojom(mojom, new_files, new_modules)
|
||||
|
||||
# At this point we have a complete set of translated Modules from both the
|
||||
# pre- and post-diff mojom contents. Now we can analyze backward-compatibility
|
||||
# of the deltas.
|
||||
#
|
||||
# Note that for backward-compatibility checks we only care about types which
|
||||
# were marked [Stable] before the diff. Types newly marked as [Stable] are not
|
||||
# checked.
|
||||
def collectTypes(modules):
|
||||
types = {}
|
||||
for m in modules.values():
|
||||
for kinds in (m.enums, m.structs, m.unions, m.interfaces):
|
||||
for kind in kinds:
|
||||
types[kind.qualified_name] = kind
|
||||
return types
|
||||
|
||||
old_types = collectTypes(old_modules)
|
||||
new_types = collectTypes(new_modules)
|
||||
|
||||
# Collect any renamed types so they can be compared accordingly.
|
||||
renamed_types = {}
|
||||
for name, kind in new_types.items():
|
||||
old_name = kind.attributes and kind.attributes.get('RenamedFrom')
|
||||
if old_name:
|
||||
renamed_types[old_name] = name
|
||||
|
||||
for qualified_name, kind in old_types.items():
|
||||
if not kind.stable:
|
||||
continue
|
||||
|
||||
new_name = renamed_types.get(qualified_name, qualified_name)
|
||||
if new_name not in new_types:
|
||||
raise Exception(
|
||||
'Stable type %s appears to be deleted by this change. If it was '
|
||||
'renamed, please add a [RenamedFrom] attribute to the new type. This '
|
||||
'can be deleted by a subsequent change.' % qualified_name)
|
||||
|
||||
checker = module.BackwardCompatibilityChecker()
|
||||
try:
|
||||
if not checker.IsBackwardCompatible(new_types[new_name], kind):
|
||||
raise Exception(
|
||||
'Stable type %s appears to have changed in a way which '
|
||||
'breaks backward-compatibility. Please fix!\n\nIf you '
|
||||
'believe this assessment to be incorrect, please file a '
|
||||
'Chromium bug against the "Internals>Mojo>Bindings" '
|
||||
'component.' % qualified_name)
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
'Stable type %s appears to have changed in a way which '
|
||||
'breaks backward-compatibility: \n\n%s.\nPlease fix!\n\nIf you '
|
||||
'believe this assessment to be incorrect, please file a '
|
||||
'Chromium bug against the "Internals>Mojo>Bindings" '
|
||||
'component.' % (qualified_name, e))
|
||||
|
||||
|
||||
def Run(command_line, delta=None):
|
||||
"""Runs the tool with the given command_line. Normally this will read the
|
||||
change description from stdin as a JSON-encoded list, but tests may pass a
|
||||
delta directly for convenience."""
|
||||
arg_parser = argparse.ArgumentParser(
|
||||
description='Verifies backward-compatibility of mojom type changes.',
|
||||
epilog="""
|
||||
This tool reads a change description from stdin and verifies that all modified
|
||||
[Stable] mojom types will retain backward-compatibility. The change description
|
||||
must be a JSON-encoded list of objects, each with a "filename" key (path to a
|
||||
changed mojom file, relative to ROOT); an "old" key whose value is a string of
|
||||
the full file contents before the change, or null if the file is being added;
|
||||
and a "new" key whose value is a string of the full file contents after the
|
||||
change, or null if the file is being deleted.""")
|
||||
arg_parser.add_argument(
|
||||
'--src-root',
|
||||
required=True,
|
||||
action='store',
|
||||
metavar='ROOT',
|
||||
help='The root of the source tree in which the checked mojoms live.')
|
||||
|
||||
args, _ = arg_parser.parse_known_args(command_line)
|
||||
if not delta:
|
||||
delta = json.load(sys.stdin)
|
||||
_ValidateDelta(args.src_root, delta)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
Run(sys.argv[1:])
|
|
@ -0,0 +1,339 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import check_stable_mojom_compatibility
|
||||
|
||||
from mojom.generate import module
|
||||
|
||||
|
||||
class Change:
|
||||
"""Helper to clearly define a mojom file delta to be analyzed."""
|
||||
|
||||
def __init__(self, filename, old=None, new=None):
|
||||
"""If old is None, this is a file addition. If new is None, this is a file
|
||||
deletion. Otherwise it's a file change."""
|
||||
self.filename = filename
|
||||
self.old = old
|
||||
self.new = new
|
||||
|
||||
|
||||
class UnchangedFile(Change):
|
||||
def __init__(self, filename, contents):
|
||||
super().__init__(filename, old=contents, new=contents)
|
||||
|
||||
|
||||
class CheckStableMojomCompatibilityTest(unittest.TestCase):
|
||||
"""Tests covering the behavior of the compatibility checking tool. Note that
|
||||
details of different compatibility checks and relevant failure modes are NOT
|
||||
covered by these tests. Those are instead covered by unittests in
|
||||
version_compatibility_unittest.py. Additionally, the tests which ensure a
|
||||
given set of [Stable] mojom definitions are indeed plausibly stable (i.e. they
|
||||
have no unstable dependencies) are covered by stable_attribute_unittest.py.
|
||||
|
||||
These tests cover higher-level concerns of the compatibility checking tool,
|
||||
like file or symbol, renames, changes spread over multiple files, etc."""
|
||||
|
||||
def verifyBackwardCompatibility(self, changes):
|
||||
"""Helper for implementing assertBackwardCompatible and
|
||||
assertNotBackwardCompatible"""
|
||||
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
for change in changes:
|
||||
if change.old:
|
||||
# Populate the old file on disk in our temporary fake source root
|
||||
file_path = os.path.join(temp_dir, change.filename)
|
||||
dir_path = os.path.dirname(file_path)
|
||||
if not os.path.exists(dir_path):
|
||||
os.makedirs(dir_path)
|
||||
with open(file_path, 'w') as f:
|
||||
f.write(change.old)
|
||||
|
||||
delta = []
|
||||
for change in changes:
|
||||
if change.old != change.new:
|
||||
delta.append({
|
||||
'filename': change.filename,
|
||||
'old': change.old,
|
||||
'new': change.new
|
||||
})
|
||||
|
||||
try:
|
||||
check_stable_mojom_compatibility.Run(['--src-root', temp_dir],
|
||||
delta=delta)
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
def assertBackwardCompatible(self, changes):
|
||||
self.verifyBackwardCompatibility(changes)
|
||||
|
||||
def assertNotBackwardCompatible(self, changes):
|
||||
try:
|
||||
self.verifyBackwardCompatibility(changes)
|
||||
except Exception:
|
||||
return
|
||||
|
||||
raise Exception('Change unexpectedly passed a backward-compatibility check')
|
||||
|
||||
def testBasicCompatibility(self):
|
||||
"""Minimal smoke test to verify acceptance of a simple valid change."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new='[Stable] struct S { [MinVersion=1] int32 x; };')
|
||||
])
|
||||
|
||||
def testBasicIncompatibility(self):
|
||||
"""Minimal smoke test to verify rejection of a simple invalid change."""
|
||||
self.assertNotBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new='[Stable] struct S { int32 x; };')
|
||||
])
|
||||
|
||||
def testIgnoreIfNotStable(self):
|
||||
"""We don't care about types not marked [Stable]"""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='struct S {};',
|
||||
new='struct S { int32 x; };')
|
||||
])
|
||||
|
||||
def testRename(self):
|
||||
"""We can do checks for renamed types."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new='[Stable, RenamedFrom="S"] struct T {};')
|
||||
])
|
||||
self.assertNotBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new='[Stable, RenamedFrom="S"] struct T { int32 x; };')
|
||||
])
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Stable] struct S {};',
|
||||
new="""\
|
||||
[Stable, RenamedFrom="S"]
|
||||
struct T { [MinVersion=1] int32 x; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testNewlyStable(self):
|
||||
"""We don't care about types newly marked as [Stable]."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='struct S {};',
|
||||
new='[Stable] struct S { int32 x; };')
|
||||
])
|
||||
|
||||
def testFileRename(self):
|
||||
"""Make sure we can still do compatibility checks after a file rename."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
|
||||
Change('bar/bar.mojom',
|
||||
old=None,
|
||||
new='[Stable] struct S { [MinVersion=1] int32 x; };')
|
||||
])
|
||||
self.assertNotBackwardCompatible([
|
||||
Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
|
||||
Change('bar/bar.mojom', old=None, new='[Stable] struct S { int32 x; };')
|
||||
])
|
||||
|
||||
def testWithImport(self):
|
||||
"""Ensure that cross-module dependencies do not break the compatibility
|
||||
checking tool."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old="""\
|
||||
module foo;
|
||||
[Stable] struct S {};
|
||||
""",
|
||||
new="""\
|
||||
module foo;
|
||||
[Stable] struct S { [MinVersion=2] int32 x; };
|
||||
"""),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; [MinVersion=1] int32 y; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testWithMovedDefinition(self):
|
||||
"""If a definition moves from one file to another, we should still be able
|
||||
to check compatibility accurately."""
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old="""\
|
||||
module foo;
|
||||
[Stable] struct S {};
|
||||
""",
|
||||
new="""\
|
||||
module foo;
|
||||
"""),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable, RenamedFrom="foo.S"] struct S {
|
||||
[MinVersion=2] int32 x;
|
||||
};
|
||||
[Stable] struct T { S s; [MinVersion=1] int32 y; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertNotBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old="""\
|
||||
module foo;
|
||||
[Stable] struct S {};
|
||||
""",
|
||||
new="""\
|
||||
module foo;
|
||||
"""),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable, RenamedFrom="foo.S"] struct S { int32 x; };
|
||||
[Stable] struct T { S s; [MinVersion=1] int32 y; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testWithUnmodifiedImport(self):
|
||||
"""Unchanged files in the filesystem are still parsed by the compatibility
|
||||
checking tool if they're imported by a changed file."""
|
||||
self.assertBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; [MinVersion=1] int32 x; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertNotBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; int32 x; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testWithPartialImport(self):
|
||||
"""The compatibility checking tool correctly parses imports with partial
|
||||
paths."""
|
||||
self.assertBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('foo/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('foo/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertNotBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""")
|
||||
])
|
||||
|
||||
self.assertNotBackwardCompatible([
|
||||
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||
Change('bar/bar.mojom',
|
||||
old="""\
|
||||
module bar;
|
||||
import "foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""",
|
||||
new="""\
|
||||
module bar;
|
||||
import "foo/foo.mojom";
|
||||
[Stable] struct T { foo.S s; };
|
||||
""")
|
||||
])
|
||||
|
||||
def testNewEnumDefault(self):
|
||||
# Should be backwards compatible since it does not affect the wire format.
|
||||
# This specific case also checks that the backwards compatibility checker
|
||||
# does not throw an error due to the older version of the enum not
|
||||
# specifying [Default].
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Extensible] enum E { One };',
|
||||
new='[Extensible] enum E { [Default] One };')
|
||||
])
|
||||
self.assertBackwardCompatible([
|
||||
Change('foo/foo.mojom',
|
||||
old='[Extensible] enum E { [Default] One, Two, };',
|
||||
new='[Extensible] enum E { One, [Default] Two, };')
|
||||
])
|
90
utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py
Normal file
90
utils/codegen/ipc/mojo/public/tools/mojom/const_unittest.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
from mojom.generate import module as mojom
|
||||
|
||||
|
||||
class ConstTest(MojomParserTestCase):
|
||||
"""Tests constant parsing behavior."""
|
||||
|
||||
def testLiteralInt(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const int32 k = 42;')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(1, len(a.constants))
|
||||
self.assertEqual('k', a.constants[0].mojom_name)
|
||||
self.assertEqual('42', a.constants[0].value)
|
||||
|
||||
def testLiteralFloat(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const float k = 42.5;')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(1, len(a.constants))
|
||||
self.assertEqual('k', a.constants[0].mojom_name)
|
||||
self.assertEqual('42.5', a.constants[0].value)
|
||||
|
||||
def testLiteralString(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const string k = "woot";')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(1, len(a.constants))
|
||||
self.assertEqual('k', a.constants[0].mojom_name)
|
||||
self.assertEqual('"woot"', a.constants[0].value)
|
||||
|
||||
def testEnumConstant(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'module a; enum E { kA = 41, kB };')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(
|
||||
b_mojom, """\
|
||||
import "a.mojom";
|
||||
const a.E kE1 = a.E.kB;
|
||||
|
||||
// We also allow value names to be unqualified, implying scope from the
|
||||
// constant's type.
|
||||
const a.E kE2 = kB;
|
||||
""")
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
b = self.LoadModule(b_mojom)
|
||||
self.assertEqual(1, len(a.enums))
|
||||
self.assertEqual('E', a.enums[0].mojom_name)
|
||||
self.assertEqual(2, len(b.constants))
|
||||
self.assertEqual('kE1', b.constants[0].mojom_name)
|
||||
self.assertEqual(a.enums[0], b.constants[0].kind)
|
||||
self.assertEqual(a.enums[0].fields[1], b.constants[0].value.field)
|
||||
self.assertEqual(42, b.constants[0].value.field.numeric_value)
|
||||
self.assertEqual('kE2', b.constants[1].mojom_name)
|
||||
self.assertEqual(a.enums[0].fields[1], b.constants[1].value.field)
|
||||
self.assertEqual(42, b.constants[1].value.field.numeric_value)
|
||||
|
||||
def testConstantReference(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const int32 kA = 42; const int32 kB = kA;')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(2, len(a.constants))
|
||||
self.assertEqual('kA', a.constants[0].mojom_name)
|
||||
self.assertEqual('42', a.constants[0].value)
|
||||
self.assertEqual('kB', a.constants[1].mojom_name)
|
||||
self.assertEqual('42', a.constants[1].value)
|
||||
|
||||
def testImportedConstantReference(self):
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'const int32 kA = 42;')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(b_mojom, 'import "a.mojom"; const int32 kB = kA;')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
b = self.LoadModule(b_mojom)
|
||||
self.assertEqual(1, len(a.constants))
|
||||
self.assertEqual(1, len(b.constants))
|
||||
self.assertEqual('kA', a.constants[0].mojom_name)
|
||||
self.assertEqual('42', a.constants[0].value)
|
||||
self.assertEqual('kB', b.constants[0].mojom_name)
|
||||
self.assertEqual('42', b.constants[0].value)
|
120
utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py
Normal file
120
utils/codegen/ipc/mojo/public/tools/mojom/enum_unittest.py
Normal file
|
@ -0,0 +1,120 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class EnumTest(MojomParserTestCase):
|
||||
"""Tests enum parsing behavior."""
|
||||
|
||||
def testExplicitValues(self):
|
||||
"""Verifies basic parsing of assigned integral values."""
|
||||
types = self.ExtractTypes('enum E { kFoo=0, kBar=2, kBaz };')
|
||||
self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
|
||||
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||
self.assertEqual('kBar', types['E'].fields[1].mojom_name)
|
||||
self.assertEqual(2, types['E'].fields[1].numeric_value)
|
||||
self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
|
||||
self.assertEqual(3, types['E'].fields[2].numeric_value)
|
||||
|
||||
def testImplicitValues(self):
|
||||
"""Verifies basic automatic assignment of integral values at parse time."""
|
||||
types = self.ExtractTypes('enum E { kFoo, kBar, kBaz };')
|
||||
self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
|
||||
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||
self.assertEqual('kBar', types['E'].fields[1].mojom_name)
|
||||
self.assertEqual(1, types['E'].fields[1].numeric_value)
|
||||
self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
|
||||
self.assertEqual(2, types['E'].fields[2].numeric_value)
|
||||
|
||||
def testSameEnumReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of another
|
||||
field within the same enum."""
|
||||
types = self.ExtractTypes('enum E { kA, kB, kFirst=kA };')
|
||||
self.assertEqual('kA', types['E'].fields[0].mojom_name)
|
||||
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||
self.assertEqual('kB', types['E'].fields[1].mojom_name)
|
||||
self.assertEqual(1, types['E'].fields[1].numeric_value)
|
||||
self.assertEqual('kFirst', types['E'].fields[2].mojom_name)
|
||||
self.assertEqual(0, types['E'].fields[2].numeric_value)
|
||||
|
||||
def testSameModuleOtherEnumReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of a field
|
||||
in another enum within the same module."""
|
||||
types = self.ExtractTypes('enum E { kA, kB }; enum F { kA = E.kB };')
|
||||
self.assertEqual(1, types['F'].fields[0].numeric_value)
|
||||
|
||||
def testImportedEnumReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of a field
|
||||
in another enum within a different module."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'module a; enum E { kFoo=42, kBar };')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(b_mojom,
|
||||
'module b; import "a.mojom"; enum F { kFoo = a.E.kBar };')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
b = self.LoadModule(b_mojom)
|
||||
|
||||
self.assertEqual('F', b.enums[0].mojom_name)
|
||||
self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
|
||||
self.assertEqual(43, b.enums[0].fields[0].numeric_value)
|
||||
|
||||
def testConstantReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of an
|
||||
integral constant within the same module."""
|
||||
types = self.ExtractTypes('const int32 kFoo = 42; enum E { kA = kFoo };')
|
||||
self.assertEqual(42, types['E'].fields[0].numeric_value)
|
||||
|
||||
def testInvalidConstantReference(self):
|
||||
"""Verifies that enum values cannot be assigned from the value of
|
||||
non-integral constants."""
|
||||
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||
self.ExtractTypes('const float kFoo = 1.0; enum E { kA = kFoo };')
|
||||
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||
self.ExtractTypes('const double kFoo = 1.0; enum E { kA = kFoo };')
|
||||
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||
self.ExtractTypes('const string kFoo = "lol"; enum E { kA = kFoo };')
|
||||
|
||||
def testImportedConstantReference(self):
|
||||
"""Verifies that an enum value can be assigned from the value of an integral
|
||||
constant within an imported module."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'module a; const int32 kFoo = 37;')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(b_mojom,
|
||||
'module b; import "a.mojom"; enum F { kFoo = a.kFoo };')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
b = self.LoadModule(b_mojom)
|
||||
|
||||
self.assertEqual('F', b.enums[0].mojom_name)
|
||||
self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
|
||||
self.assertEqual(37, b.enums[0].fields[0].numeric_value)
|
||||
|
||||
def testEnumAttributesAreEnums(self):
|
||||
"""Verifies that enum values in attributes are really enum types."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(a_mojom, 'module a; enum E { kFoo, kBar };')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(
|
||||
b_mojom, 'module b;'
|
||||
'import "a.mojom";'
|
||||
'[MooCow=a.E.kFoo]'
|
||||
'interface Foo { Foo(); };')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
b = self.LoadModule(b_mojom)
|
||||
self.assertEqual(b.interfaces[0].attributes['MooCow'].mojom_name, 'kFoo')
|
||||
|
||||
def testConstantAttributes(self):
|
||||
"""Verifies that constants as attributes are translated to the constant."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(
|
||||
a_mojom, 'module a;'
|
||||
'enum E { kFoo, kBar };'
|
||||
'const E kB = E.kFoo;'
|
||||
'[Attr=kB] interface Hello { Foo(); };')
|
||||
self.ParseMojoms([a_mojom])
|
||||
a = self.LoadModule(a_mojom)
|
||||
self.assertEqual(a.interfaces[0].attributes['Attr'].mojom_name, 'kB')
|
||||
self.assertEquals(a.interfaces[0].attributes['Attr'].value.mojom_name,
|
||||
'kFoo')
|
|
@ -0,0 +1,84 @@
|
|||
# Copyright 2023 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class FeatureTest(MojomParserTestCase):
|
||||
"""Tests feature parsing behavior."""
|
||||
def testFeatureOff(self):
|
||||
"""Verifies basic parsing of feature types."""
|
||||
types = self.ExtractTypes("""
|
||||
// e.g. BASE_DECLARE_FEATURE(kFeature);
|
||||
[AttributeOne=ValueOne]
|
||||
feature kFeature {
|
||||
// BASE_FEATURE(kFeature,"MyFeature",
|
||||
// base::FEATURE_DISABLED_BY_DEFAULT);
|
||||
const string name = "MyFeature";
|
||||
const bool default_state = false;
|
||||
};
|
||||
""")
|
||||
self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
|
||||
self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
|
||||
self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
|
||||
self.assertEqual('false', types['kFeature'].constants[1].value)
|
||||
|
||||
def testFeatureOn(self):
|
||||
"""Verifies basic parsing of feature types."""
|
||||
types = self.ExtractTypes("""
|
||||
// e.g. BASE_DECLARE_FEATURE(kFeature);
|
||||
feature kFeature {
|
||||
// BASE_FEATURE(kFeature,"MyFeature",
|
||||
// base::FEATURE_ENABLED_BY_DEFAULT);
|
||||
const string name = "MyFeature";
|
||||
const bool default_state = true;
|
||||
};
|
||||
""")
|
||||
self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
|
||||
self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
|
||||
self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
|
||||
self.assertEqual('true', types['kFeature'].constants[1].value)
|
||||
|
||||
def testFeatureWeakKeyword(self):
|
||||
"""Verifies that `feature` is a weak keyword."""
|
||||
types = self.ExtractTypes("""
|
||||
// e.g. BASE_DECLARE_FEATURE(kFeature);
|
||||
[AttributeOne=ValueOne]
|
||||
feature kFeature {
|
||||
// BASE_FEATURE(kFeature,"MyFeature",
|
||||
// base::FEATURE_DISABLED_BY_DEFAULT);
|
||||
const string name = "MyFeature";
|
||||
const bool default_state = false;
|
||||
};
|
||||
struct MyStruct {
|
||||
bool feature = true;
|
||||
};
|
||||
interface InterfaceName {
|
||||
Method(string feature) => (int32 feature);
|
||||
};
|
||||
""")
|
||||
self.assertEqual('name', types['kFeature'].constants[0].mojom_name)
|
||||
self.assertEqual('"MyFeature"', types['kFeature'].constants[0].value)
|
||||
self.assertEqual('default_state', types['kFeature'].constants[1].mojom_name)
|
||||
self.assertEqual('false', types['kFeature'].constants[1].value)
|
||||
|
||||
def testFeatureAttributesAreFeatures(self):
|
||||
"""Verifies that feature values in attributes are really feature types."""
|
||||
a_mojom = 'a.mojom'
|
||||
self.WriteFile(
|
||||
a_mojom, 'module a;'
|
||||
'feature F { const string name = "f";'
|
||||
'const bool default_state = false; };')
|
||||
b_mojom = 'b.mojom'
|
||||
self.WriteFile(
|
||||
b_mojom, 'module b;'
|
||||
'import "a.mojom";'
|
||||
'feature G'
|
||||
'{const string name = "g"; const bool default_state = false;};'
|
||||
'[Attri=a.F] interface Foo { Foo(); };'
|
||||
'[Boink=G] interface Bar {};')
|
||||
self.ParseMojoms([a_mojom, b_mojom])
|
||||
b = self.LoadModule(b_mojom)
|
||||
self.assertEqual(b.interfaces[0].attributes['Attri'].mojom_name, 'F')
|
||||
self.assertEqual(b.interfaces[1].attributes['Boink'].mojom_name, 'G')
|
43
utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
Normal file
43
utils/codegen/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
Normal file
|
@ -0,0 +1,43 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
group("mojom") {
|
||||
data = [
|
||||
"__init__.py",
|
||||
"error.py",
|
||||
"fileutil.py",
|
||||
"generate/__init__.py",
|
||||
"generate/check.py",
|
||||
"generate/generator.py",
|
||||
"generate/module.py",
|
||||
"generate/pack.py",
|
||||
"generate/template_expander.py",
|
||||
"generate/translate.py",
|
||||
"parse/__init__.py",
|
||||
"parse/ast.py",
|
||||
"parse/conditional_features.py",
|
||||
"parse/lexer.py",
|
||||
"parse/parser.py",
|
||||
|
||||
# Third-party module dependencies
|
||||
"//third_party/jinja2/",
|
||||
"//third_party/ply/",
|
||||
]
|
||||
}
|
||||
|
||||
group("tests") {
|
||||
data = [
|
||||
"fileutil_unittest.py",
|
||||
"generate/generator_unittest.py",
|
||||
"generate/module_unittest.py",
|
||||
"generate/pack_unittest.py",
|
||||
"generate/translate_unittest.py",
|
||||
"parse/ast_unittest.py",
|
||||
"parse/conditional_features_unittest.py",
|
||||
"parse/lexer_unittest.py",
|
||||
"parse/parser_unittest.py",
|
||||
]
|
||||
|
||||
public_deps = [ ":mojom" ]
|
||||
}
|
28
utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py
Normal file
28
utils/codegen/ipc/mojo/public/tools/mojom/mojom/error.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Base class for Mojo IDL bindings parser/generator errors."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None, addenda=None, **kwargs):
|
||||
"""|filename| is the (primary) file which caused the error, |message| is the
|
||||
error message, |lineno| is the 1-based line number (or |None| if not
|
||||
applicable/available), and |addenda| is a list of additional lines to append
|
||||
to the final error message."""
|
||||
Exception.__init__(self, **kwargs)
|
||||
self.filename = filename
|
||||
self.message = message
|
||||
self.lineno = lineno
|
||||
self.addenda = addenda
|
||||
|
||||
def __str__(self):
|
||||
if self.lineno:
|
||||
s = "%s:%d: Error: %s" % (self.filename, self.lineno, self.message)
|
||||
else:
|
||||
s = "%s: Error: %s" % (self.filename, self.message)
|
||||
return "\n".join([s] + self.addenda) if self.addenda else s
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
44
utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py
Normal file
44
utils/codegen/ipc/mojo/public/tools/mojom/mojom/fileutil.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# Copyright 2015 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
if not tail:
|
||||
return None
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
|
||||
def EnsureDirectoryExists(path, always_try_to_create=False):
|
||||
"""A wrapper for os.makedirs that does not error if the directory already
|
||||
exists. A different process could be racing to create this directory."""
|
||||
|
||||
if not os.path.exists(path) or always_try_to_create:
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as e:
|
||||
# There may have been a race to create this directory.
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
||||
def AddLocalRepoThirdPartyDirToModulePath():
|
||||
"""Helper function to find the top-level directory of this script's repository
|
||||
assuming the script falls somewhere within a 'mojo' directory, and insert the
|
||||
top-level 'third_party' directory early in the module search path. Used to
|
||||
ensure that third-party dependencies provided within the repository itself
|
||||
(e.g. Chromium sources include snapshots of jinja2 and ply) are preferred over
|
||||
locally installed system library packages."""
|
||||
toplevel_dir = _GetDirAbove('mojo')
|
||||
if toplevel_dir:
|
||||
sys.path.insert(1, os.path.join(toplevel_dir, 'third_party'))
|
|
@ -0,0 +1,37 @@
|
|||
# Copyright 2015 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from mojom import fileutil
|
||||
|
||||
class FileUtilTest(unittest.TestCase):
|
||||
def testEnsureDirectoryExists(self):
|
||||
"""Test that EnsureDirectoryExists functions correctly."""
|
||||
|
||||
temp_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
self.assertTrue(os.path.exists(temp_dir))
|
||||
|
||||
# Directory does not exist, yet.
|
||||
full = os.path.join(temp_dir, "foo", "bar")
|
||||
self.assertFalse(os.path.exists(full))
|
||||
|
||||
# Create the directory.
|
||||
fileutil.EnsureDirectoryExists(full)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
|
||||
# Trying to create it again does not cause an error.
|
||||
fileutil.EnsureDirectoryExists(full)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
|
||||
# Bypass check for directory existence to tickle error handling that
|
||||
# occurs in response to a race.
|
||||
fileutil.EnsureDirectoryExists(full, always_try_to_create=True)
|
||||
self.assertTrue(os.path.exists(full))
|
||||
finally:
|
||||
shutil.rmtree(temp_dir)
|
|
@ -0,0 +1,26 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Code shared by the various pre-generation mojom checkers."""
|
||||
|
||||
|
||||
class CheckException(Exception):
|
||||
def __init__(self, module, message):
|
||||
self.module = module
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
||||
def __str__(self):
|
||||
return "Failed mojo pre-generation check for {}:\n{}".format(
|
||||
self.module.path, self.message)
|
||||
|
||||
|
||||
class Check:
|
||||
def __init__(self, module):
|
||||
self.module = module
|
||||
|
||||
def CheckModule(self):
|
||||
""" Subclass should return True if its Checks pass, and throw an
|
||||
exception otherwise. CheckModule will be called immediately before
|
||||
mojom.generate.Generator.GenerateFiles()"""
|
||||
raise NotImplementedError("Subclasses must override/implement this method")
|
|
@ -0,0 +1,328 @@
|
|||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Code shared by the various language-specific code generators."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from functools import partial
|
||||
import os.path
|
||||
import re
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import pack
|
||||
|
||||
|
||||
def ExpectedArraySize(kind):
|
||||
if mojom.IsArrayKind(kind):
|
||||
return kind.length
|
||||
return None
|
||||
|
||||
|
||||
def SplitCamelCase(identifier):
|
||||
"""Splits a camel-cased |identifier| and returns a list of lower-cased
|
||||
strings.
|
||||
"""
|
||||
# Add underscores after uppercase letters when appropriate. An uppercase
|
||||
# letter is considered the end of a word if it is followed by an upper and a
|
||||
# lower. E.g. URLLoaderFactory -> URL_LoaderFactory
|
||||
identifier = re.sub('([A-Z][0-9]*)(?=[A-Z][0-9]*[a-z])', r'\1_', identifier)
|
||||
# Add underscores after lowercase letters when appropriate. A lowercase letter
|
||||
# is considered the end of a word if it is followed by an upper.
|
||||
# E.g. URLLoaderFactory -> URLLoader_Factory
|
||||
identifier = re.sub('([a-z][0-9]*)(?=[A-Z])', r'\1_', identifier)
|
||||
return [x.lower() for x in identifier.split('_')]
|
||||
|
||||
|
||||
def ToCamel(identifier, lower_initial=False, digits_split=False, delimiter='_'):
|
||||
"""Splits |identifier| using |delimiter|, makes the first character of each
|
||||
word uppercased (but makes the first character of the first word lowercased
|
||||
if |lower_initial| is set to True), and joins the words. Please note that for
|
||||
each word, all the characters except the first one are untouched.
|
||||
"""
|
||||
result = ''
|
||||
capitalize_next = True
|
||||
for i in range(len(identifier)):
|
||||
if identifier[i] == delimiter:
|
||||
capitalize_next = True
|
||||
elif digits_split and identifier[i].isdigit():
|
||||
capitalize_next = True
|
||||
result += identifier[i]
|
||||
elif capitalize_next:
|
||||
capitalize_next = False
|
||||
result += identifier[i].upper()
|
||||
else:
|
||||
result += identifier[i]
|
||||
|
||||
if lower_initial and result:
|
||||
result = result[0].lower() + result[1:]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _ToSnakeCase(identifier, upper=False):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"URL_LOADER_FACTORY" if upper, otherwise "url_loader_factory".
|
||||
"""
|
||||
words = SplitCamelCase(identifier)
|
||||
if words[0] == 'k' and len(words) > 1:
|
||||
words = words[1:]
|
||||
|
||||
# Variables cannot start with a digit
|
||||
if (words[0][0].isdigit()):
|
||||
words[0] = '_' + words[0]
|
||||
|
||||
|
||||
if upper:
|
||||
words = map(lambda x: x.upper(), words)
|
||||
|
||||
return '_'.join(words)
|
||||
|
||||
|
||||
def ToUpperSnakeCase(identifier):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"URL_LOADER_FACTORY".
|
||||
"""
|
||||
return _ToSnakeCase(identifier, upper=True)
|
||||
|
||||
|
||||
def ToLowerSnakeCase(identifier):
|
||||
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||
"url_loader_factory".
|
||||
"""
|
||||
return _ToSnakeCase(identifier, upper=False)
|
||||
|
||||
|
||||
class Stylizer:
|
||||
"""Stylizers specify naming rules to map mojom names to names in generated
|
||||
code. For example, if you would like method_name in mojom to be mapped to
|
||||
MethodName in the generated code, you need to define a subclass of Stylizer
|
||||
and override StylizeMethod to do the conversion."""
|
||||
|
||||
def StylizeConstant(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeField(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeStruct(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeUnion(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeParameter(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeMethod(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeInterface(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeEnumField(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeEnum(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeFeature(self, mojom_name):
|
||||
return mojom_name
|
||||
|
||||
def StylizeModule(self, mojom_namespace):
|
||||
return mojom_namespace
|
||||
|
||||
|
||||
def WriteFile(contents, full_path):
|
||||
# If |contents| is same with the file content, we skip updating.
|
||||
if not isinstance(contents, bytes):
|
||||
data = contents.encode('utf8')
|
||||
else:
|
||||
data = contents
|
||||
|
||||
if os.path.isfile(full_path):
|
||||
with open(full_path, 'rb') as destination_file:
|
||||
if destination_file.read() == data:
|
||||
return
|
||||
|
||||
# Make sure the containing directory exists.
|
||||
full_dir = os.path.dirname(full_path)
|
||||
fileutil.EnsureDirectoryExists(full_dir)
|
||||
|
||||
# Dump the data to disk.
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
def AddComputedData(module):
|
||||
"""Adds computed data to the given module. The data is computed once and
|
||||
used repeatedly in the generation process."""
|
||||
|
||||
def _AddStructComputedData(exported, struct):
|
||||
struct.packed = pack.PackedStruct(struct)
|
||||
struct.bytes = pack.GetByteLayout(struct.packed)
|
||||
struct.versions = pack.GetVersionInfo(struct.packed)
|
||||
struct.exported = exported
|
||||
|
||||
def _AddInterfaceComputedData(interface):
|
||||
interface.version = 0
|
||||
for method in interface.methods:
|
||||
# this field is never scrambled
|
||||
method.sequential_ordinal = method.ordinal
|
||||
|
||||
if method.min_version is not None:
|
||||
interface.version = max(interface.version, method.min_version)
|
||||
|
||||
method.param_struct = _GetStructFromMethod(method)
|
||||
if interface.stable:
|
||||
method.param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
|
||||
if method.explicit_ordinal is None:
|
||||
raise Exception(
|
||||
'Stable interfaces must declare explicit method ordinals. The '
|
||||
'method %s on stable interface %s does not declare an explicit '
|
||||
'ordinal.' % (method.mojom_name, interface.qualified_name))
|
||||
interface.version = max(interface.version,
|
||||
method.param_struct.versions[-1].version)
|
||||
|
||||
if method.response_parameters is not None:
|
||||
method.response_param_struct = _GetResponseStructFromMethod(method)
|
||||
if interface.stable:
|
||||
method.response_param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
|
||||
interface.version = max(
|
||||
interface.version,
|
||||
method.response_param_struct.versions[-1].version)
|
||||
else:
|
||||
method.response_param_struct = None
|
||||
|
||||
def _GetStructFromMethod(method):
|
||||
"""Converts a method's parameters into the fields of a struct."""
|
||||
params_class = "%s_%s_Params" % (method.interface.mojom_name,
|
||||
method.mojom_name)
|
||||
struct = mojom.Struct(params_class,
|
||||
module=method.interface.module,
|
||||
attributes={})
|
||||
for param in method.parameters:
|
||||
struct.AddField(
|
||||
param.mojom_name,
|
||||
param.kind,
|
||||
param.ordinal,
|
||||
attributes=param.attributes)
|
||||
_AddStructComputedData(False, struct)
|
||||
return struct
|
||||
|
||||
def _GetResponseStructFromMethod(method):
|
||||
"""Converts a method's response_parameters into the fields of a struct."""
|
||||
params_class = "%s_%s_ResponseParams" % (method.interface.mojom_name,
|
||||
method.mojom_name)
|
||||
struct = mojom.Struct(params_class,
|
||||
module=method.interface.module,
|
||||
attributes={})
|
||||
for param in method.response_parameters:
|
||||
struct.AddField(
|
||||
param.mojom_name,
|
||||
param.kind,
|
||||
param.ordinal,
|
||||
attributes=param.attributes)
|
||||
_AddStructComputedData(False, struct)
|
||||
return struct
|
||||
|
||||
for struct in module.structs:
|
||||
_AddStructComputedData(True, struct)
|
||||
for interface in module.interfaces:
|
||||
_AddInterfaceComputedData(interface)
|
||||
|
||||
|
||||
class Generator:
|
||||
# Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all
|
||||
# files to stdout.
|
||||
def __init__(self,
|
||||
module,
|
||||
output_dir=None,
|
||||
typemap=None,
|
||||
variant=None,
|
||||
bytecode_path=None,
|
||||
for_blink=False,
|
||||
js_generate_struct_deserializers=False,
|
||||
export_attribute=None,
|
||||
export_header=None,
|
||||
generate_non_variant_code=False,
|
||||
support_lazy_serialization=False,
|
||||
disallow_native_types=False,
|
||||
disallow_interfaces=False,
|
||||
generate_message_ids=False,
|
||||
generate_fuzzing=False,
|
||||
enable_kythe_annotations=False,
|
||||
extra_cpp_template_paths=None,
|
||||
generate_extra_cpp_only=False):
|
||||
self.module = module
|
||||
self.output_dir = output_dir
|
||||
self.typemap = typemap or {}
|
||||
self.variant = variant
|
||||
self.bytecode_path = bytecode_path
|
||||
self.for_blink = for_blink
|
||||
self.js_generate_struct_deserializers = js_generate_struct_deserializers
|
||||
self.export_attribute = export_attribute
|
||||
self.export_header = export_header
|
||||
self.generate_non_variant_code = generate_non_variant_code
|
||||
self.support_lazy_serialization = support_lazy_serialization
|
||||
self.disallow_native_types = disallow_native_types
|
||||
self.disallow_interfaces = disallow_interfaces
|
||||
self.generate_message_ids = generate_message_ids
|
||||
self.generate_fuzzing = generate_fuzzing
|
||||
self.enable_kythe_annotations = enable_kythe_annotations
|
||||
self.extra_cpp_template_paths = extra_cpp_template_paths
|
||||
self.generate_extra_cpp_only = generate_extra_cpp_only
|
||||
|
||||
def Write(self, contents, filename):
|
||||
if self.output_dir is None:
|
||||
print(contents)
|
||||
return
|
||||
full_path = os.path.join(self.output_dir, filename)
|
||||
WriteFile(contents, full_path)
|
||||
|
||||
def OptimizeEmpty(self, contents):
|
||||
# Look for .cc files that contain no actual code. There are many of these
|
||||
# and they collectively take a while to compile.
|
||||
lines = contents.splitlines()
|
||||
|
||||
for line in lines:
|
||||
if line.startswith('#') or line.startswith('//'):
|
||||
continue
|
||||
if re.match(r'namespace .* {', line) or re.match(r'}.*//.*namespace',
|
||||
line):
|
||||
continue
|
||||
if line.strip():
|
||||
# There is some actual code - return the unmodified contents.
|
||||
return contents
|
||||
|
||||
# If we reach here then we have a .cc file with no actual code. The
|
||||
# includes are therefore unneeded and can be removed.
|
||||
new_lines = [line for line in lines if not line.startswith('#include')]
|
||||
if len(new_lines) < len(lines):
|
||||
new_lines.append('')
|
||||
new_lines.append('// Includes removed due to no code being generated.')
|
||||
return '\n'.join(new_lines)
|
||||
|
||||
def WriteWithComment(self, contents, filename):
|
||||
generator_name = "mojom_bindings_generator.py"
|
||||
comment = r"// %s is auto generated by %s, do not edit" % (filename,
|
||||
generator_name)
|
||||
contents = comment + '\n' + '\n' + contents;
|
||||
if filename.endswith('.cc'):
|
||||
contents = self.OptimizeEmpty(contents)
|
||||
self.Write(contents, filename)
|
||||
|
||||
def GenerateFiles(self, args):
|
||||
raise NotImplementedError("Subclasses must override/implement this method")
|
||||
|
||||
def GetJinjaParameters(self):
|
||||
"""Returns default constructor parameters for the jinja environment."""
|
||||
return {}
|
||||
|
||||
def GetGlobals(self):
|
||||
"""Returns global mappings for the template generation."""
|
||||
return {}
|
|
@ -0,0 +1,71 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os.path
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
|
||||
from mojom.generate import generator
|
||||
|
||||
class StringManipulationTest(unittest.TestCase):
|
||||
"""generator contains some string utilities, this tests only those."""
|
||||
|
||||
def testSplitCamelCase(self):
|
||||
self.assertEquals(["camel", "case"], generator.SplitCamelCase("CamelCase"))
|
||||
self.assertEquals(["url", "loader", "factory"],
|
||||
generator.SplitCamelCase('URLLoaderFactory'))
|
||||
self.assertEquals(["get99", "entries"],
|
||||
generator.SplitCamelCase('Get99Entries'))
|
||||
self.assertEquals(["get99entries"],
|
||||
generator.SplitCamelCase('Get99entries'))
|
||||
|
||||
def testToCamel(self):
|
||||
self.assertEquals("CamelCase", generator.ToCamel("camel_case"))
|
||||
self.assertEquals("CAMELCASE", generator.ToCamel("CAMEL_CASE"))
|
||||
self.assertEquals("camelCase",
|
||||
generator.ToCamel("camel_case", lower_initial=True))
|
||||
self.assertEquals("CamelCase", generator.ToCamel(
|
||||
"camel case", delimiter=' '))
|
||||
self.assertEquals("CaMelCaSe", generator.ToCamel("caMel_caSe"))
|
||||
self.assertEquals("L2Tp", generator.ToCamel("l2tp", digits_split=True))
|
||||
self.assertEquals("l2tp", generator.ToCamel("l2tp", lower_initial=True))
|
||||
|
||||
def testToSnakeCase(self):
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCase"))
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("snakeCase"))
|
||||
self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCASE"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("SnakeD3D11Case"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("SnakeD3d11Case"))
|
||||
self.assertEquals("snake_d3d11_case",
|
||||
generator.ToLowerSnakeCase("snakeD3d11Case"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCase"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("snakeCase"))
|
||||
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCASE"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("SnakeD3D11Case"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("SnakeD3d11Case"))
|
||||
self.assertEquals("SNAKE_D3D11_CASE",
|
||||
generator.ToUpperSnakeCase("snakeD3d11Case"))
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
2059
utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py
Normal file
2059
utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/module.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,31 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
|
||||
|
||||
class ModuleTest(unittest.TestCase):
|
||||
def testNonInterfaceAsInterfaceRequest(self):
|
||||
"""Tests that a non-interface cannot be used for interface requests."""
|
||||
module = mojom.Module('test_module', 'test_namespace')
|
||||
struct = mojom.Struct('TestStruct', module=module)
|
||||
with self.assertRaises(Exception) as e:
|
||||
mojom.InterfaceRequest(struct)
|
||||
self.assertEquals(
|
||||
e.exception.__str__(),
|
||||
'Interface request requires \'x:TestStruct\' to be an interface.')
|
||||
|
||||
def testNonInterfaceAsAssociatedInterface(self):
|
||||
"""Tests that a non-interface type cannot be used for associated interfaces.
|
||||
"""
|
||||
module = mojom.Module('test_module', 'test_namespace')
|
||||
struct = mojom.Struct('TestStruct', module=module)
|
||||
with self.assertRaises(Exception) as e:
|
||||
mojom.AssociatedInterface(struct)
|
||||
self.assertEquals(
|
||||
e.exception.__str__(),
|
||||
'Associated interface requires \'x:TestStruct\' to be an interface.')
|
367
utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
Normal file
367
utils/codegen/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
Normal file
|
@ -0,0 +1,367 @@
|
|||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import copy
|
||||
from mojom.generate import module as mojom
|
||||
|
||||
# This module provides a mechanism for determining the packed order and offsets
|
||||
# of a mojom.Struct.
|
||||
#
|
||||
# ps = pack.PackedStruct(struct)
|
||||
# ps.packed_fields will access a list of PackedField objects, each of which
|
||||
# will have an offset, a size and a bit (for mojom.BOOLs).
|
||||
|
||||
# Size of struct header in bytes: num_bytes [4B] + version [4B].
|
||||
HEADER_SIZE = 8
|
||||
|
||||
|
||||
class PackedField:
|
||||
kind_to_size = {
|
||||
mojom.BOOL: 1,
|
||||
mojom.INT8: 1,
|
||||
mojom.UINT8: 1,
|
||||
mojom.INT16: 2,
|
||||
mojom.UINT16: 2,
|
||||
mojom.INT32: 4,
|
||||
mojom.UINT32: 4,
|
||||
mojom.FLOAT: 4,
|
||||
mojom.HANDLE: 4,
|
||||
mojom.MSGPIPE: 4,
|
||||
mojom.SHAREDBUFFER: 4,
|
||||
mojom.PLATFORMHANDLE: 4,
|
||||
mojom.DCPIPE: 4,
|
||||
mojom.DPPIPE: 4,
|
||||
mojom.NULLABLE_HANDLE: 4,
|
||||
mojom.NULLABLE_MSGPIPE: 4,
|
||||
mojom.NULLABLE_SHAREDBUFFER: 4,
|
||||
mojom.NULLABLE_PLATFORMHANDLE: 4,
|
||||
mojom.NULLABLE_DCPIPE: 4,
|
||||
mojom.NULLABLE_DPPIPE: 4,
|
||||
mojom.INT64: 8,
|
||||
mojom.UINT64: 8,
|
||||
mojom.DOUBLE: 8,
|
||||
mojom.STRING: 8,
|
||||
mojom.NULLABLE_STRING: 8
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def GetSizeForKind(cls, kind):
|
||||
if isinstance(kind, (mojom.Array, mojom.Map, mojom.Struct, mojom.Interface,
|
||||
mojom.AssociatedInterface, mojom.PendingRemote,
|
||||
mojom.PendingAssociatedRemote)):
|
||||
return 8
|
||||
if isinstance(kind, mojom.Union):
|
||||
return 16
|
||||
if isinstance(kind, (mojom.InterfaceRequest, mojom.PendingReceiver)):
|
||||
kind = mojom.MSGPIPE
|
||||
if isinstance(
|
||||
kind,
|
||||
(mojom.AssociatedInterfaceRequest, mojom.PendingAssociatedReceiver)):
|
||||
return 4
|
||||
if isinstance(kind, mojom.Enum):
|
||||
# TODO(mpcomplete): what about big enums?
|
||||
return cls.kind_to_size[mojom.INT32]
|
||||
if not kind in cls.kind_to_size:
|
||||
raise Exception("Undefined type: %s. Did you forget to import the file "
|
||||
"containing the definition?" % kind.spec)
|
||||
return cls.kind_to_size[kind]
|
||||
|
||||
@classmethod
|
||||
def GetAlignmentForKind(cls, kind):
|
||||
if isinstance(kind, (mojom.Interface, mojom.AssociatedInterface,
|
||||
mojom.PendingRemote, mojom.PendingAssociatedRemote)):
|
||||
return 4
|
||||
if isinstance(kind, mojom.Union):
|
||||
return 8
|
||||
return cls.GetSizeForKind(kind)
|
||||
|
||||
def __init__(self,
|
||||
field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=None,
|
||||
sub_ordinal=None,
|
||||
linked_value_packed_field=None):
|
||||
"""
|
||||
Args:
|
||||
field: the original field.
|
||||
index: the position of the original field in the struct.
|
||||
ordinal: the ordinal of the field for serialization.
|
||||
original_field: See below.
|
||||
sub_ordinal: See below.
|
||||
linked_value_packed_field: See below.
|
||||
|
||||
original_field, sub_ordinal, and linked_value_packed_field are used to
|
||||
support nullable ValueKind fields. For legacy reasons, nullable ValueKind
|
||||
fields actually generate two PackedFields. This allows:
|
||||
|
||||
- backwards compatibility prior to Mojo support for nullable ValueKinds.
|
||||
- correct packing of fields for the aforementioned backwards compatibility.
|
||||
|
||||
When translating Fields to PackedFields, the original field is turned into
|
||||
two PackedFields: the first PackedField always has type mojom.BOOL, while
|
||||
the second PackedField has the non-nullable version of the field's kind.
|
||||
|
||||
When constructing these PackedFields, original_field references the field
|
||||
as defined in the mojom; the name as defined in the mojom will be used for
|
||||
all layers above the wire/data layer.
|
||||
|
||||
sub_ordinal is used to sort the two PackedFields correctly with respect to
|
||||
each other: the first mojom.BOOL field always has sub_ordinal 0, while the
|
||||
second field always has sub_ordinal 1.
|
||||
|
||||
Finally, linked_value_packed_field is used by the serialization and
|
||||
deserialization helpers, which generally just iterate over a PackedStruct's
|
||||
PackedField's in ordinal order. This allows the helpers to easily reference
|
||||
any related PackedFields rather than having to lookup related PackedFields
|
||||
by index while iterating.
|
||||
"""
|
||||
self.field = field
|
||||
self.index = index
|
||||
self.ordinal = ordinal
|
||||
self.original_field = original_field
|
||||
self.sub_ordinal = sub_ordinal
|
||||
self.linked_value_packed_field = linked_value_packed_field
|
||||
self.size = self.GetSizeForKind(self.field.kind)
|
||||
self.alignment = self.GetAlignmentForKind(self.field.kind)
|
||||
self.offset = None
|
||||
self.bit = None
|
||||
self.min_version = None
|
||||
|
||||
|
||||
def GetPad(offset, alignment):
|
||||
"""Returns the pad necessary to reserve space so that |offset + pad| equals to
|
||||
some multiple of |alignment|."""
|
||||
return (alignment - (offset % alignment)) % alignment
|
||||
|
||||
|
||||
def GetFieldOffset(field, last_field):
|
||||
"""Returns a 2-tuple of the field offset and bit (for BOOLs)."""
|
||||
if (field.field.kind == mojom.BOOL and last_field.field.kind == mojom.BOOL
|
||||
and last_field.bit < 7):
|
||||
return (last_field.offset, last_field.bit + 1)
|
||||
|
||||
offset = last_field.offset + last_field.size
|
||||
pad = GetPad(offset, field.alignment)
|
||||
return (offset + pad, 0)
|
||||
|
||||
|
||||
def GetPayloadSizeUpToField(field):
|
||||
"""Returns the payload size (not including struct header) if |field| is the
|
||||
last field.
|
||||
"""
|
||||
if not field:
|
||||
return 0
|
||||
offset = field.offset + field.size
|
||||
pad = GetPad(offset, 8)
|
||||
return offset + pad
|
||||
|
||||
|
||||
def IsNullableValueKindPackedField(field):
|
||||
"""Returns true if `field` is derived from a nullable ValueKind field.
|
||||
|
||||
Nullable ValueKind fields often require special handling in the bindings due
|
||||
to the way the implementation is constrained for wire compatibility.
|
||||
"""
|
||||
assert isinstance(field, PackedField)
|
||||
return field.sub_ordinal is not None
|
||||
|
||||
|
||||
def IsPrimaryNullableValueKindPackedField(field):
|
||||
"""Returns true if `field` is derived from a nullable ValueKind mojom field
|
||||
and is the "primary" field.
|
||||
|
||||
The primary field is a bool PackedField that controls if the field should be
|
||||
considered as present or not; it will have a reference to the PackedField that
|
||||
holds the actual value representation if considered present.
|
||||
|
||||
Bindings code that translates between the wire protocol and the higher layers
|
||||
can use this to simplify mapping multiple PackedFields to the single field
|
||||
that is logically exposed to bindings consumers.
|
||||
"""
|
||||
assert isinstance(field, PackedField)
|
||||
return field.linked_value_packed_field is not None
|
||||
|
||||
|
||||
class PackedStruct:
|
||||
def __init__(self, struct):
|
||||
self.struct = struct
|
||||
# |packed_fields| contains all the fields, in increasing offset order.
|
||||
self.packed_fields = []
|
||||
# |packed_fields_in_ordinal_order| refers to the same fields as
|
||||
# |packed_fields|, but in ordinal order.
|
||||
self.packed_fields_in_ordinal_order = []
|
||||
|
||||
# No fields.
|
||||
if (len(struct.fields) == 0):
|
||||
return
|
||||
|
||||
# Start by sorting by ordinal.
|
||||
src_fields = self.packed_fields_in_ordinal_order
|
||||
ordinal = 0
|
||||
for index, field in enumerate(struct.fields):
|
||||
if field.ordinal is not None:
|
||||
ordinal = field.ordinal
|
||||
# Nullable value types are a bit weird: they generate two PackedFields
|
||||
# despite being a single ValueKind. This is for wire compatibility to
|
||||
# ease the transition from legacy mojom syntax where nullable value types
|
||||
# were not supported.
|
||||
if isinstance(field.kind, mojom.ValueKind) and field.kind.is_nullable:
|
||||
# The suffixes intentionally use Unicode codepoints which are considered
|
||||
# valid C++/Java/JavaScript identifiers, yet are unlikely to be used in
|
||||
# actual user code.
|
||||
has_value_field = copy.copy(field)
|
||||
has_value_field.name = f'{field.mojom_name}_$flag'
|
||||
has_value_field.kind = mojom.BOOL
|
||||
|
||||
value_field = copy.copy(field)
|
||||
value_field.name = f'{field.mojom_name}_$value'
|
||||
value_field.kind = field.kind.MakeUnnullableKind()
|
||||
|
||||
value_packed_field = PackedField(value_field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=field,
|
||||
sub_ordinal=1,
|
||||
linked_value_packed_field=None)
|
||||
has_value_packed_field = PackedField(
|
||||
has_value_field,
|
||||
index,
|
||||
ordinal,
|
||||
original_field=field,
|
||||
sub_ordinal=0,
|
||||
linked_value_packed_field=value_packed_field)
|
||||
src_fields.append(has_value_packed_field)
|
||||
src_fields.append(value_packed_field)
|
||||
else:
|
||||
src_fields.append(PackedField(field, index, ordinal))
|
||||
ordinal += 1
|
||||
src_fields.sort(key=lambda field: (field.ordinal, field.sub_ordinal))
|
||||
|
||||
# Set |min_version| for each field.
|
||||
next_min_version = 0
|
||||
for packed_field in src_fields:
|
||||
if packed_field.field.min_version is None:
|
||||
assert next_min_version == 0
|
||||
else:
|
||||
assert packed_field.field.min_version >= next_min_version
|
||||
next_min_version = packed_field.field.min_version
|
||||
packed_field.min_version = next_min_version
|
||||
|
||||
if (packed_field.min_version != 0
|
||||
and mojom.IsReferenceKind(packed_field.field.kind)
|
||||
and not packed_field.field.kind.is_nullable):
|
||||
raise Exception(
|
||||
"Non-nullable reference fields are only allowed in version 0 of a "
|
||||
"struct. %s.%s is defined with [MinVersion=%d]." %
|
||||
(self.struct.name, packed_field.field.name,
|
||||
packed_field.min_version))
|
||||
|
||||
src_field = src_fields[0]
|
||||
src_field.offset = 0
|
||||
src_field.bit = 0
|
||||
dst_fields = self.packed_fields
|
||||
dst_fields.append(src_field)
|
||||
|
||||
# Then find first slot that each field will fit.
|
||||
for src_field in src_fields[1:]:
|
||||
last_field = dst_fields[0]
|
||||
for i in range(1, len(dst_fields)):
|
||||
next_field = dst_fields[i]
|
||||
offset, bit = GetFieldOffset(src_field, last_field)
|
||||
if offset + src_field.size <= next_field.offset:
|
||||
# Found hole.
|
||||
src_field.offset = offset
|
||||
src_field.bit = bit
|
||||
dst_fields.insert(i, src_field)
|
||||
break
|
||||
last_field = next_field
|
||||
if src_field.offset is None:
|
||||
# Add to end
|
||||
src_field.offset, src_field.bit = GetFieldOffset(src_field, last_field)
|
||||
dst_fields.append(src_field)
|
||||
|
||||
|
||||
class ByteInfo:
|
||||
def __init__(self):
|
||||
self.is_padding = False
|
||||
self.packed_fields = []
|
||||
|
||||
|
||||
def GetByteLayout(packed_struct):
|
||||
total_payload_size = GetPayloadSizeUpToField(
|
||||
packed_struct.packed_fields[-1] if packed_struct.packed_fields else None)
|
||||
byte_info = [ByteInfo() for i in range(total_payload_size)]
|
||||
|
||||
limit_of_previous_field = 0
|
||||
for packed_field in packed_struct.packed_fields:
|
||||
for i in range(limit_of_previous_field, packed_field.offset):
|
||||
byte_info[i].is_padding = True
|
||||
byte_info[packed_field.offset].packed_fields.append(packed_field)
|
||||
limit_of_previous_field = packed_field.offset + packed_field.size
|
||||
|
||||
for i in range(limit_of_previous_field, len(byte_info)):
|
||||
byte_info[i].is_padding = True
|
||||
|
||||
for byte in byte_info:
|
||||
# A given byte cannot both be padding and have a fields packed into it.
|
||||
assert not (byte.is_padding and byte.packed_fields)
|
||||
|
||||
return byte_info
|
||||
|
||||
|
||||
class VersionInfo:
|
||||
def __init__(self, version, num_fields, num_packed_fields, num_bytes):
|
||||
self.version = version
|
||||
self.num_fields = num_fields
|
||||
self.num_packed_fields = num_packed_fields
|
||||
self.num_bytes = num_bytes
|
||||
|
||||
|
||||
def GetVersionInfo(packed_struct):
|
||||
"""Get version information for a struct.
|
||||
|
||||
Args:
|
||||
packed_struct: A PackedStruct instance.
|
||||
|
||||
Returns:
|
||||
A non-empty list of VersionInfo instances, sorted by version in increasing
|
||||
order.
|
||||
Note: The version numbers may not be consecutive.
|
||||
"""
|
||||
versions = []
|
||||
last_version = 0
|
||||
last_num_fields = 0
|
||||
last_num_packed_fields = 0
|
||||
last_payload_size = 0
|
||||
|
||||
for packed_field in packed_struct.packed_fields_in_ordinal_order:
|
||||
if packed_field.min_version != last_version:
|
||||
versions.append(
|
||||
VersionInfo(last_version, last_num_fields, last_num_packed_fields,
|
||||
last_payload_size + HEADER_SIZE))
|
||||
last_version = packed_field.min_version
|
||||
|
||||
# Nullable numeric fields (e.g. `int32?`) expand to two packed fields, so to
|
||||
# avoid double-counting, only increment if the field is:
|
||||
# - not used for representing a nullable value kind field, or
|
||||
# - the primary field representing the nullable value kind field.
|
||||
last_num_fields += 1 if (
|
||||
not IsNullableValueKindPackedField(packed_field)
|
||||
or IsPrimaryNullableValueKindPackedField(packed_field)) else 0
|
||||
|
||||
last_num_packed_fields += 1
|
||||
|
||||
# The fields are iterated in ordinal order here. However, the size of a
|
||||
# version is determined by the last field of that version in pack order,
|
||||
# instead of ordinal order. Therefore, we need to calculate the max value.
|
||||
last_payload_size = max(GetPayloadSizeUpToField(packed_field),
|
||||
last_payload_size)
|
||||
|
||||
assert len(
|
||||
versions) == 0 or last_num_packed_fields != versions[-1].num_packed_fields
|
||||
versions.append(
|
||||
VersionInfo(last_version, last_num_fields, last_num_packed_fields,
|
||||
last_payload_size + HEADER_SIZE))
|
||||
return versions
|
|
@ -0,0 +1,253 @@
|
|||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import pack
|
||||
|
||||
|
||||
class PackTest(unittest.TestCase):
|
||||
def testOrdinalOrder(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT32, 2)
|
||||
struct.AddField('testfield2', mojom.INT32, 1)
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual(2, len(ps.packed_fields))
|
||||
self.assertEqual('testfield2', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('testfield1', ps.packed_fields[1].field.mojom_name)
|
||||
|
||||
def testZeroFields(self):
|
||||
struct = mojom.Struct('test')
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(0, len(ps.packed_fields))
|
||||
|
||||
def testOneField(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT8)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(1, len(ps.packed_fields))
|
||||
|
||||
def _CheckPackSequence(self, kinds, fields, offsets):
|
||||
"""Checks the pack order and offsets of a sequence of mojom.Kinds.
|
||||
|
||||
Args:
|
||||
kinds: A sequence of mojom.Kinds that specify the fields that are to be
|
||||
created.
|
||||
fields: The expected order of the resulting fields, with the integer "1"
|
||||
first.
|
||||
offsets: The expected order of offsets, with the integer "0" first.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
index = 1
|
||||
for kind in kinds:
|
||||
struct.AddField('%d' % index, kind)
|
||||
index += 1
|
||||
ps = pack.PackedStruct(struct)
|
||||
num_fields = len(ps.packed_fields)
|
||||
self.assertEqual(len(kinds), num_fields)
|
||||
for i in range(num_fields):
|
||||
self.assertEqual('%d' % fields[i], ps.packed_fields[i].field.mojom_name)
|
||||
self.assertEqual(offsets[i], ps.packed_fields[i].offset)
|
||||
|
||||
def testPaddingPackedInOrder(self):
|
||||
return self._CheckPackSequence((mojom.INT8, mojom.UINT8, mojom.INT32),
|
||||
(1, 2, 3), (0, 1, 4))
|
||||
|
||||
def testPaddingPackedOutOfOrder(self):
|
||||
return self._CheckPackSequence((mojom.INT8, mojom.INT32, mojom.UINT8),
|
||||
(1, 3, 2), (0, 1, 4))
|
||||
|
||||
def testPaddingPackedOverflow(self):
|
||||
kinds = (mojom.INT8, mojom.INT32, mojom.INT16, mojom.INT8, mojom.INT8)
|
||||
# 2 bytes should be packed together first, followed by short, then by int.
|
||||
fields = (1, 4, 3, 2, 5)
|
||||
offsets = (0, 1, 2, 4, 8)
|
||||
return self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testNullableTypes(self):
|
||||
kinds = (mojom.STRING.MakeNullableKind(), mojom.HANDLE.MakeNullableKind(),
|
||||
mojom.Struct('test_struct').MakeNullableKind(),
|
||||
mojom.DCPIPE.MakeNullableKind(), mojom.Array().MakeNullableKind(),
|
||||
mojom.DPPIPE.MakeNullableKind(),
|
||||
mojom.Array(length=5).MakeNullableKind(),
|
||||
mojom.MSGPIPE.MakeNullableKind(),
|
||||
mojom.Interface('test_interface').MakeNullableKind(),
|
||||
mojom.SHAREDBUFFER.MakeNullableKind(),
|
||||
mojom.InterfaceRequest().MakeNullableKind())
|
||||
fields = (1, 2, 4, 3, 5, 6, 8, 7, 9, 10, 11)
|
||||
offsets = (0, 8, 12, 16, 24, 32, 36, 40, 48, 56, 60)
|
||||
return self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testAllTypes(self):
|
||||
return self._CheckPackSequence(
|
||||
(mojom.BOOL, mojom.INT8, mojom.STRING, mojom.UINT8, mojom.INT16,
|
||||
mojom.DOUBLE, mojom.UINT16, mojom.INT32, mojom.UINT32, mojom.INT64,
|
||||
mojom.FLOAT, mojom.STRING, mojom.HANDLE, mojom.UINT64,
|
||||
mojom.Struct('test'), mojom.Array(), mojom.STRING.MakeNullableKind()),
|
||||
(1, 2, 4, 5, 7, 3, 6, 8, 9, 10, 11, 13, 12, 14, 15, 16, 17, 18),
|
||||
(0, 1, 2, 4, 6, 8, 16, 24, 28, 32, 40, 44, 48, 56, 64, 72, 80, 88))
|
||||
|
||||
def testPaddingPackedOutOfOrderByOrdinal(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('testfield1', mojom.INT8)
|
||||
struct.AddField('testfield3', mojom.UINT8, 3)
|
||||
struct.AddField('testfield2', mojom.INT32, 2)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(3, len(ps.packed_fields))
|
||||
|
||||
# Second byte should be packed in behind first, altering order.
|
||||
self.assertEqual('testfield1', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('testfield3', ps.packed_fields[1].field.mojom_name)
|
||||
self.assertEqual('testfield2', ps.packed_fields[2].field.mojom_name)
|
||||
|
||||
# Second byte should be packed with first.
|
||||
self.assertEqual(0, ps.packed_fields[0].offset)
|
||||
self.assertEqual(1, ps.packed_fields[1].offset)
|
||||
self.assertEqual(4, ps.packed_fields[2].offset)
|
||||
|
||||
def testBools(self):
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('bit0', mojom.BOOL)
|
||||
struct.AddField('bit1', mojom.BOOL)
|
||||
struct.AddField('int', mojom.INT32)
|
||||
struct.AddField('bit2', mojom.BOOL)
|
||||
struct.AddField('bit3', mojom.BOOL)
|
||||
struct.AddField('bit4', mojom.BOOL)
|
||||
struct.AddField('bit5', mojom.BOOL)
|
||||
struct.AddField('bit6', mojom.BOOL)
|
||||
struct.AddField('bit7', mojom.BOOL)
|
||||
struct.AddField('bit8', mojom.BOOL)
|
||||
ps = pack.PackedStruct(struct)
|
||||
self.assertEqual(10, len(ps.packed_fields))
|
||||
|
||||
# First 8 bits packed together.
|
||||
for i in range(8):
|
||||
pf = ps.packed_fields[i]
|
||||
self.assertEqual(0, pf.offset)
|
||||
self.assertEqual("bit%d" % i, pf.field.mojom_name)
|
||||
self.assertEqual(i, pf.bit)
|
||||
|
||||
# Ninth bit goes into second byte.
|
||||
self.assertEqual("bit8", ps.packed_fields[8].field.mojom_name)
|
||||
self.assertEqual(1, ps.packed_fields[8].offset)
|
||||
self.assertEqual(0, ps.packed_fields[8].bit)
|
||||
|
||||
# int comes last.
|
||||
self.assertEqual("int", ps.packed_fields[9].field.mojom_name)
|
||||
self.assertEqual(4, ps.packed_fields[9].offset)
|
||||
|
||||
def testMinVersion(self):
|
||||
"""Tests that |min_version| is properly set for packed fields."""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('field_2', mojom.BOOL, 2)
|
||||
struct.AddField('field_0', mojom.INT32, 0)
|
||||
struct.AddField('field_1', mojom.INT64, 1)
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual('field_0', ps.packed_fields[0].field.mojom_name)
|
||||
self.assertEqual('field_2', ps.packed_fields[1].field.mojom_name)
|
||||
self.assertEqual('field_1', ps.packed_fields[2].field.mojom_name)
|
||||
|
||||
self.assertEqual(0, ps.packed_fields[0].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[1].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[2].min_version)
|
||||
|
||||
struct.fields[0].attributes = {'MinVersion': 1}
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
self.assertEqual(0, ps.packed_fields[0].min_version)
|
||||
self.assertEqual(1, ps.packed_fields[1].min_version)
|
||||
self.assertEqual(0, ps.packed_fields[2].min_version)
|
||||
|
||||
def testGetVersionInfoEmptyStruct(self):
|
||||
"""Tests that pack.GetVersionInfo() never returns an empty list, even for
|
||||
empty structs.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
self.assertEqual(1, len(versions))
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(0, versions[0].num_fields)
|
||||
self.assertEqual(8, versions[0].num_bytes)
|
||||
|
||||
def testGetVersionInfoComplexOrder(self):
|
||||
"""Tests pack.GetVersionInfo() using a struct whose definition order,
|
||||
ordinal order and pack order for fields are all different.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField(
|
||||
'field_3', mojom.BOOL, ordinal=3, attributes={'MinVersion': 3})
|
||||
struct.AddField('field_0', mojom.INT32, ordinal=0)
|
||||
struct.AddField(
|
||||
'field_1', mojom.INT64, ordinal=1, attributes={'MinVersion': 2})
|
||||
struct.AddField(
|
||||
'field_2', mojom.INT64, ordinal=2, attributes={'MinVersion': 3})
|
||||
ps = pack.PackedStruct(struct)
|
||||
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
self.assertEqual(3, len(versions))
|
||||
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(1, versions[0].num_fields)
|
||||
self.assertEqual(16, versions[0].num_bytes)
|
||||
|
||||
self.assertEqual(2, versions[1].version)
|
||||
self.assertEqual(2, versions[1].num_fields)
|
||||
self.assertEqual(24, versions[1].num_bytes)
|
||||
|
||||
self.assertEqual(3, versions[2].version)
|
||||
self.assertEqual(4, versions[2].num_fields)
|
||||
self.assertEqual(32, versions[2].num_bytes)
|
||||
|
||||
def testGetVersionInfoPackedStruct(self):
|
||||
"""Tests that pack.GetVersionInfo() correctly sets version, num_fields,
|
||||
and num_packed_fields for a packed struct.
|
||||
"""
|
||||
struct = mojom.Struct('test')
|
||||
struct.AddField('field_0', mojom.BOOL, ordinal=0)
|
||||
struct.AddField('field_1',
|
||||
mojom.NULLABLE_BOOL,
|
||||
ordinal=1,
|
||||
attributes={'MinVersion': 1})
|
||||
struct.AddField('field_2',
|
||||
mojom.NULLABLE_BOOL,
|
||||
ordinal=2,
|
||||
attributes={'MinVersion': 2})
|
||||
ps = pack.PackedStruct(struct)
|
||||
versions = pack.GetVersionInfo(ps)
|
||||
|
||||
self.assertEqual(3, len(versions))
|
||||
self.assertEqual(0, versions[0].version)
|
||||
self.assertEqual(1, versions[1].version)
|
||||
self.assertEqual(2, versions[2].version)
|
||||
self.assertEqual(1, versions[0].num_fields)
|
||||
self.assertEqual(2, versions[1].num_fields)
|
||||
self.assertEqual(3, versions[2].num_fields)
|
||||
self.assertEqual(1, versions[0].num_packed_fields)
|
||||
self.assertEqual(3, versions[1].num_packed_fields)
|
||||
self.assertEqual(5, versions[2].num_packed_fields)
|
||||
|
||||
def testInterfaceAlignment(self):
|
||||
"""Tests that interfaces are aligned on 4-byte boundaries, although the size
|
||||
of an interface is 8 bytes.
|
||||
"""
|
||||
kinds = (mojom.INT32, mojom.Interface('test_interface'))
|
||||
fields = (1, 2)
|
||||
offsets = (0, 4)
|
||||
self._CheckPackSequence(kinds, fields, offsets)
|
||||
|
||||
def testAssociatedInterfaceAlignment(self):
|
||||
"""Tests that associated interfaces are aligned on 4-byte boundaries,
|
||||
although the size of an associated interface is 8 bytes.
|
||||
"""
|
||||
kinds = (mojom.INT32,
|
||||
mojom.AssociatedInterface(mojom.Interface('test_interface')))
|
||||
fields = (1, 2)
|
||||
offsets = (0, 4)
|
||||
self._CheckPackSequence(kinds, fields, offsets)
|
|
@ -0,0 +1,82 @@
|
|||
# Copyright 2013 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# Based on third_party/WebKit/Source/build/scripts/template_expander.py.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
import jinja2
|
||||
|
||||
|
||||
def ApplyTemplate(mojo_generator, path_to_template, params, **kwargs):
|
||||
loader = jinja2.ModuleLoader(
|
||||
os.path.join(mojo_generator.bytecode_path,
|
||||
"%s.zip" % mojo_generator.GetTemplatePrefix()))
|
||||
final_kwargs = dict(mojo_generator.GetJinjaParameters())
|
||||
final_kwargs.update(kwargs)
|
||||
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=loader, keep_trailing_newline=True, **final_kwargs)
|
||||
jinja_env.globals.update(mojo_generator.GetGlobals())
|
||||
jinja_env.filters.update(mojo_generator.GetFilters())
|
||||
template = jinja_env.get_template(path_to_template)
|
||||
return template.render(params)
|
||||
|
||||
|
||||
def UseJinja(path_to_template, **kwargs):
|
||||
def RealDecorator(generator):
|
||||
def GeneratorInternal(*args, **kwargs2):
|
||||
parameters = generator(*args, **kwargs2)
|
||||
return ApplyTemplate(args[0], path_to_template, parameters, **kwargs)
|
||||
|
||||
GeneratorInternal.__name__ = generator.__name__
|
||||
return GeneratorInternal
|
||||
|
||||
return RealDecorator
|
||||
|
||||
|
||||
def ApplyImportedTemplate(mojo_generator, path_to_template, filename, params,
|
||||
**kwargs):
|
||||
loader = jinja2.FileSystemLoader(searchpath=path_to_template)
|
||||
final_kwargs = dict(mojo_generator.GetJinjaParameters())
|
||||
final_kwargs.update(kwargs)
|
||||
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=loader, keep_trailing_newline=True, **final_kwargs)
|
||||
jinja_env.globals.update(mojo_generator.GetGlobals())
|
||||
jinja_env.filters.update(mojo_generator.GetFilters())
|
||||
template = jinja_env.get_template(filename)
|
||||
return template.render(params)
|
||||
|
||||
|
||||
def UseJinjaForImportedTemplate(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
parameters = func(*args, **kwargs)
|
||||
path_to_template = args[1]
|
||||
filename = args[2]
|
||||
return ApplyImportedTemplate(args[0], path_to_template, filename,
|
||||
parameters)
|
||||
|
||||
wrapper.__name__ = func.__name__
|
||||
return wrapper
|
||||
|
||||
|
||||
def PrecompileTemplates(generator_modules, output_dir):
|
||||
for module in generator_modules.values():
|
||||
generator = module.Generator(None)
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader([
|
||||
os.path.join(
|
||||
os.path.dirname(module.__file__), generator.GetTemplatePrefix())
|
||||
]))
|
||||
jinja_env.filters.update(generator.GetFilters())
|
||||
jinja_env.compile_templates(os.path.join(
|
||||
output_dir, "%s.zip" % generator.GetTemplatePrefix()),
|
||||
extensions=["tmpl"],
|
||||
zip="stored",
|
||||
ignore_errors=False)
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,141 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from mojom.generate import module as mojom
|
||||
from mojom.generate import translate
|
||||
from mojom.parse import ast
|
||||
|
||||
class TranslateTest(unittest.TestCase):
|
||||
"""Tests |parser.Parse()|."""
|
||||
|
||||
def testSimpleArray(self):
|
||||
"""Tests a simple int32[]."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("int32[]"), "a:i32")
|
||||
|
||||
def testAssociativeArray(self):
|
||||
"""Tests a simple uint8{string}."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]")
|
||||
|
||||
def testLeftToRightAssociativeArray(self):
|
||||
"""Makes sure that parsing is done from right to left on the internal kinds
|
||||
in the presence of an associative array."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]")
|
||||
|
||||
def testTranslateSimpleUnions(self):
|
||||
"""Makes sure that a simple union is translated correctly."""
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"SomeUnion", None,
|
||||
ast.UnionBody([
|
||||
ast.UnionField("a", None, None, "int32"),
|
||||
ast.UnionField("b", None, None, "string")
|
||||
]))
|
||||
])
|
||||
|
||||
translation = translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertEqual(1, len(translation.unions))
|
||||
|
||||
union = translation.unions[0]
|
||||
self.assertTrue(isinstance(union, mojom.Union))
|
||||
self.assertEqual("SomeUnion", union.mojom_name)
|
||||
self.assertEqual(2, len(union.fields))
|
||||
self.assertEqual("a", union.fields[0].mojom_name)
|
||||
self.assertEqual(mojom.INT32.spec, union.fields[0].kind.spec)
|
||||
self.assertEqual("b", union.fields[1].mojom_name)
|
||||
self.assertEqual(mojom.STRING.spec, union.fields[1].kind.spec)
|
||||
|
||||
def testMapKindRaisesWithDuplicate(self):
|
||||
"""Verifies _MapTreeForType() raises when passed two values with the same
|
||||
name."""
|
||||
methods = [
|
||||
ast.Method('dup', None, None, ast.ParameterList(), None),
|
||||
ast.Method('dup', None, None, ast.ParameterList(), None)
|
||||
]
|
||||
with self.assertRaises(Exception):
|
||||
translate._ElemsOfType(methods, ast.Method, 'scope')
|
||||
|
||||
def testAssociatedKinds(self):
|
||||
"""Tests type spec translation of associated interfaces and requests."""
|
||||
# pylint: disable=W0212
|
||||
self.assertEquals(
|
||||
translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
|
||||
self.assertEquals(translate._MapKind("rca<SomeInterface>?"),
|
||||
"?rca:x:SomeInterface")
|
||||
|
||||
def testSelfRecursiveUnions(self):
|
||||
"""Verifies _UnionField() raises when a union is self-recursive."""
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union("SomeUnion", None,
|
||||
ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion")]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"SomeUnion", None,
|
||||
ast.UnionBody([ast.UnionField("a", None, None, "SomeUnion?")]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
def testDuplicateAttributesException(self):
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Union(
|
||||
"FakeUnion",
|
||||
ast.AttributeList([
|
||||
ast.Attribute("key1", "value"),
|
||||
ast.Attribute("key1", "value")
|
||||
]),
|
||||
ast.UnionBody([
|
||||
ast.UnionField("a", None, None, "int32"),
|
||||
ast.UnionField("b", None, None, "string")
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception):
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
|
||||
def testEnumWithReservedValues(self):
|
||||
"""Verifies that assigning reserved values to enumerators fails."""
|
||||
# -128 is reserved for the empty representation in WTF::HashTraits.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kReserved', None, '-128'),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
||||
|
||||
# -127 is reserved for the deleted representation in WTF::HashTraits.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kReserved', None, '-127'),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
||||
|
||||
# Implicitly assigning a reserved value should also fail.
|
||||
tree = ast.Mojom(None, ast.ImportList(), [
|
||||
ast.Enum(
|
||||
"MyEnum", None,
|
||||
ast.EnumValueList([
|
||||
ast.EnumValue('kNotReserved', None, '-129'),
|
||||
ast.EnumValue('kImplicitlyReserved', None, None),
|
||||
]))
|
||||
])
|
||||
with self.assertRaises(Exception) as context:
|
||||
translate.OrderedModule(tree, "mojom_tree", [])
|
||||
self.assertIn("reserved for WTF::HashTrait", str(context.exception))
|
462
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
Normal file
462
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
Normal file
|
@ -0,0 +1,462 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Node classes for the AST for a Mojo IDL file."""
|
||||
|
||||
# Note: For convenience of testing, you probably want to define __eq__() methods
|
||||
# for all node types; it's okay to be slightly lax (e.g., not compare filename
|
||||
# and lineno). You may also define __repr__() to help with analyzing test
|
||||
# failures, especially for more complex types.
|
||||
|
||||
import os.path
|
||||
|
||||
|
||||
# Instance of 'NodeListBase' has no '_list_item_type' member (no-member)
|
||||
# pylint: disable=no-member
|
||||
|
||||
|
||||
class NodeBase:
|
||||
"""Base class for nodes in the AST."""
|
||||
|
||||
def __init__(self, filename=None, lineno=None):
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
|
||||
def __eq__(self, other):
|
||||
# We want strict comparison of the two object's types. Disable pylint's
|
||||
# insistence upon recommending isinstance().
|
||||
# pylint: disable=unidiomatic-typecheck
|
||||
return type(self) == type(other)
|
||||
|
||||
# Make != the inverse of ==. (Subclasses shouldn't have to override this.)
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
# TODO(vtl): Some of this is complicated enough that it should be tested.
|
||||
class NodeListBase(NodeBase):
|
||||
"""Represents a list of other nodes, all having the same type. (This is meant
|
||||
to be subclassed, with subclasses defining _list_item_type to be the class (or
|
||||
classes, in a tuple) of the members of the list.)"""
|
||||
|
||||
def __init__(self, item_or_items=None, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.items = []
|
||||
if item_or_items is None:
|
||||
pass
|
||||
elif isinstance(item_or_items, list):
|
||||
for item in item_or_items:
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.Append(item)
|
||||
else:
|
||||
assert isinstance(item_or_items, self._list_item_type)
|
||||
self.Append(item_or_items)
|
||||
|
||||
# Support iteration. For everything else, users should just access |items|
|
||||
# directly. (We intentionally do NOT supply |__len__()| or |__nonzero__()|, so
|
||||
# |bool(NodeListBase())| is true.)
|
||||
def __iter__(self):
|
||||
return self.items.__iter__()
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.items == other.items
|
||||
|
||||
# Implement this so that on failure, we get slightly more sensible output.
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + "([" + \
|
||||
", ".join([repr(elem) for elem in self.items]) + "])"
|
||||
|
||||
def Insert(self, item):
|
||||
"""Inserts item at the front of the list."""
|
||||
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.items.insert(0, item)
|
||||
self._UpdateFilenameAndLineno()
|
||||
|
||||
def Append(self, item):
|
||||
"""Appends item to the end of the list."""
|
||||
|
||||
assert isinstance(item, self._list_item_type)
|
||||
self.items.append(item)
|
||||
self._UpdateFilenameAndLineno()
|
||||
|
||||
def _UpdateFilenameAndLineno(self):
|
||||
if self.items:
|
||||
self.filename = self.items[0].filename
|
||||
self.lineno = self.items[0].lineno
|
||||
|
||||
|
||||
class Definition(NodeBase):
|
||||
"""Represents a definition of anything that has a global name (e.g., enums,
|
||||
enum values, consts, structs, struct fields, interfaces). (This does not
|
||||
include parameter definitions.) This class is meant to be subclassed."""
|
||||
|
||||
def __init__(self, mojom_name, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
NodeBase.__init__(self, **kwargs)
|
||||
self.mojom_name = mojom_name
|
||||
|
||||
|
||||
################################################################################
|
||||
|
||||
|
||||
class Attribute(NodeBase):
|
||||
"""Represents an attribute."""
|
||||
|
||||
def __init__(self, key, value, **kwargs):
|
||||
assert isinstance(key, str)
|
||||
super().__init__(**kwargs)
|
||||
self.key = key
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.key == other.key and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class AttributeList(NodeListBase):
|
||||
"""Represents a list attributes."""
|
||||
|
||||
_list_item_type = Attribute
|
||||
|
||||
|
||||
class Const(Definition):
|
||||
"""Represents a const definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, typename, value, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
# The typename is currently passed through as a string.
|
||||
assert isinstance(typename, str)
|
||||
# The value is either a literal (currently passed through as a string) or a
|
||||
# "wrapped identifier".
|
||||
assert isinstance(value, (tuple, str))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.typename = typename
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.typename == other.typename and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class Enum(Definition):
|
||||
"""Represents an enum definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, enum_value_list, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert enum_value_list is None or isinstance(enum_value_list, EnumValueList)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.enum_value_list = enum_value_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.enum_value_list == other.enum_value_list
|
||||
|
||||
|
||||
class EnumValue(Definition):
|
||||
"""Represents a definition of an enum value."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, value, **kwargs):
|
||||
# The optional value is either an int (which is current a string) or a
|
||||
# "wrapped identifier".
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert value is None or isinstance(value, (tuple, str))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class EnumValueList(NodeListBase):
|
||||
"""Represents a list of enum value definitions (i.e., the "body" of an enum
|
||||
definition)."""
|
||||
|
||||
_list_item_type = EnumValue
|
||||
|
||||
|
||||
class Feature(Definition):
|
||||
"""Represents a runtime feature definition."""
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, FeatureBody) or body is None
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
def __repr__(self):
|
||||
return "Feature(mojom_name = %s, attribute_list = %s, body = %s)" % (
|
||||
self.mojom_name, self.attribute_list, self.body)
|
||||
|
||||
|
||||
# This needs to be declared after `FeatureConst` and `FeatureField`.
|
||||
class FeatureBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) a feature."""
|
||||
|
||||
# Features are compile time helpers so all fields are initializers/consts
|
||||
# for the underlying platform feature type.
|
||||
_list_item_type = (Const)
|
||||
|
||||
|
||||
class Import(NodeBase):
|
||||
"""Represents an import statement."""
|
||||
|
||||
def __init__(self, attribute_list, import_filename, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(import_filename, str)
|
||||
super().__init__(**kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
# TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
|
||||
self.import_filename = os.path.normpath(import_filename).replace('\\', '/')
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.import_filename == other.import_filename
|
||||
|
||||
|
||||
class ImportList(NodeListBase):
|
||||
"""Represents a list (i.e., sequence) of import statements."""
|
||||
|
||||
_list_item_type = Import
|
||||
|
||||
|
||||
class Interface(Definition):
|
||||
"""Represents an interface definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, InterfaceBody)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
|
||||
class Method(Definition):
|
||||
"""Represents a method definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, parameter_list,
|
||||
response_parameter_list, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(parameter_list, ParameterList)
|
||||
assert response_parameter_list is None or \
|
||||
isinstance(response_parameter_list, ParameterList)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.parameter_list = parameter_list
|
||||
self.response_parameter_list = response_parameter_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.parameter_list == other.parameter_list and \
|
||||
self.response_parameter_list == other.response_parameter_list
|
||||
|
||||
|
||||
# This needs to be declared after |Method|.
|
||||
class InterfaceBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) an interface."""
|
||||
|
||||
_list_item_type = (Const, Enum, Method)
|
||||
|
||||
|
||||
class Module(NodeBase):
|
||||
"""Represents a module statement."""
|
||||
|
||||
def __init__(self, mojom_namespace, attribute_list, **kwargs):
|
||||
# |mojom_namespace| is either none or a "wrapped identifier".
|
||||
assert mojom_namespace is None or isinstance(mojom_namespace, tuple)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
super().__init__(**kwargs)
|
||||
self.mojom_namespace = mojom_namespace
|
||||
self.attribute_list = attribute_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.mojom_namespace == other.mojom_namespace and \
|
||||
self.attribute_list == other.attribute_list
|
||||
|
||||
|
||||
class Mojom(NodeBase):
|
||||
"""Represents an entire .mojom file. (This is the root node.)"""
|
||||
|
||||
def __init__(self, module, import_list, definition_list, **kwargs):
|
||||
assert module is None or isinstance(module, Module)
|
||||
assert isinstance(import_list, ImportList)
|
||||
assert isinstance(definition_list, list)
|
||||
super().__init__(**kwargs)
|
||||
self.module = module
|
||||
self.import_list = import_list
|
||||
self.definition_list = definition_list
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.module == other.module and \
|
||||
self.import_list == other.import_list and \
|
||||
self.definition_list == other.definition_list
|
||||
|
||||
def __repr__(self):
|
||||
return "%s(%r, %r, %r)" % (self.__class__.__name__, self.module,
|
||||
self.import_list, self.definition_list)
|
||||
|
||||
|
||||
class Ordinal(NodeBase):
|
||||
"""Represents an ordinal value labeling, e.g., a struct field."""
|
||||
|
||||
def __init__(self, value, **kwargs):
|
||||
assert isinstance(value, int)
|
||||
super().__init__(**kwargs)
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.value == other.value
|
||||
|
||||
|
||||
class Parameter(NodeBase):
|
||||
"""Represents a method request or response parameter."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
super().__init__(**kwargs)
|
||||
self.mojom_name = mojom_name
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.mojom_name == other.mojom_name and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename
|
||||
|
||||
|
||||
class ParameterList(NodeListBase):
|
||||
"""Represents a list of (method request or response) parameters."""
|
||||
|
||||
_list_item_type = Parameter
|
||||
|
||||
|
||||
class Struct(Definition):
|
||||
"""Represents a struct definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, StructBody) or body is None
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
def __repr__(self):
|
||||
return "Struct(mojom_name = %s, attribute_list = %s, body = %s)" % (
|
||||
self.mojom_name, self.attribute_list, self.body)
|
||||
|
||||
|
||||
class StructField(Definition):
|
||||
"""Represents a struct field definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename,
|
||||
default_value, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
# The optional default value is currently either a value as a string or a
|
||||
# "wrapped identifier".
|
||||
assert default_value is None or isinstance(default_value, (str, tuple))
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
self.default_value = default_value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename and \
|
||||
self.default_value == other.default_value
|
||||
|
||||
def __repr__(self):
|
||||
return ("StructField(mojom_name = %s, attribute_list = %s, ordinal = %s, "
|
||||
"typename = %s, default_value = %s") % (
|
||||
self.mojom_name, self.attribute_list, self.ordinal,
|
||||
self.typename, self.default_value)
|
||||
|
||||
|
||||
# This needs to be declared after |StructField|.
|
||||
class StructBody(NodeListBase):
|
||||
"""Represents the body of (i.e., list of definitions inside) a struct."""
|
||||
|
||||
_list_item_type = (Const, Enum, StructField)
|
||||
|
||||
|
||||
class Union(Definition):
|
||||
"""Represents a union definition."""
|
||||
|
||||
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert isinstance(body, UnionBody)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.body = body
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.body == other.body
|
||||
|
||||
|
||||
class UnionField(Definition):
|
||||
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
|
||||
assert isinstance(mojom_name, str)
|
||||
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||
assert isinstance(typename, str)
|
||||
super().__init__(mojom_name, **kwargs)
|
||||
self.attribute_list = attribute_list
|
||||
self.ordinal = ordinal
|
||||
self.typename = typename
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and \
|
||||
self.attribute_list == other.attribute_list and \
|
||||
self.ordinal == other.ordinal and \
|
||||
self.typename == other.typename
|
||||
|
||||
|
||||
class UnionBody(NodeListBase):
|
||||
|
||||
_list_item_type = UnionField
|
|
@ -0,0 +1,115 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import unittest
|
||||
|
||||
from mojom.parse import ast
|
||||
|
||||
class _TestNode(ast.NodeBase):
|
||||
"""Node type for tests."""
|
||||
|
||||
def __init__(self, value, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.value = value
|
||||
|
||||
def __eq__(self, other):
|
||||
return super().__eq__(other) and self.value == other.value
|
||||
|
||||
class _TestNodeList(ast.NodeListBase):
|
||||
"""Node list type for tests."""
|
||||
|
||||
_list_item_type = _TestNode
|
||||
|
||||
class ASTTest(unittest.TestCase):
|
||||
"""Tests various AST classes."""
|
||||
|
||||
def testNodeBase(self):
|
||||
# Test |__eq__()|; this is only used for testing, where we want to do
|
||||
# comparison by value and ignore filenames/line numbers (for convenience).
|
||||
node1 = ast.NodeBase(filename="hello.mojom", lineno=123)
|
||||
node2 = ast.NodeBase()
|
||||
self.assertEquals(node1, node2)
|
||||
self.assertEquals(node2, node1)
|
||||
|
||||
# Check that |__ne__()| just defers to |__eq__()| properly.
|
||||
self.assertFalse(node1 != node2)
|
||||
self.assertFalse(node2 != node1)
|
||||
|
||||
# Check that |filename| and |lineno| are set properly (and are None by
|
||||
# default).
|
||||
self.assertEquals(node1.filename, "hello.mojom")
|
||||
self.assertEquals(node1.lineno, 123)
|
||||
self.assertIsNone(node2.filename)
|
||||
self.assertIsNone(node2.lineno)
|
||||
|
||||
# |NodeBase|'s |__eq__()| should compare types (and a subclass's |__eq__()|
|
||||
# should first defer to its superclass's).
|
||||
node3 = _TestNode(123)
|
||||
self.assertNotEqual(node1, node3)
|
||||
self.assertNotEqual(node3, node1)
|
||||
# Also test |__eq__()| directly.
|
||||
self.assertFalse(node1 == node3)
|
||||
self.assertFalse(node3 == node1)
|
||||
|
||||
node4 = _TestNode(123, filename="world.mojom", lineno=123)
|
||||
self.assertEquals(node4, node3)
|
||||
node5 = _TestNode(456)
|
||||
self.assertNotEquals(node5, node4)
|
||||
|
||||
def testNodeListBase(self):
|
||||
node1 = _TestNode(1, filename="foo.mojom", lineno=1)
|
||||
# Equal to, but not the same as, |node1|:
|
||||
node1b = _TestNode(1, filename="foo.mojom", lineno=1)
|
||||
node2 = _TestNode(2, filename="foo.mojom", lineno=2)
|
||||
|
||||
nodelist1 = _TestNodeList() # Contains: (empty).
|
||||
self.assertEquals(nodelist1, nodelist1)
|
||||
self.assertEquals(nodelist1.items, [])
|
||||
self.assertIsNone(nodelist1.filename)
|
||||
self.assertIsNone(nodelist1.lineno)
|
||||
|
||||
nodelist2 = _TestNodeList(node1) # Contains: 1.
|
||||
self.assertEquals(nodelist2, nodelist2)
|
||||
self.assertEquals(nodelist2.items, [node1])
|
||||
self.assertNotEqual(nodelist2, nodelist1)
|
||||
self.assertEquals(nodelist2.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist2.lineno, 1)
|
||||
|
||||
nodelist3 = _TestNodeList([node2]) # Contains: 2.
|
||||
self.assertEquals(nodelist3.items, [node2])
|
||||
self.assertNotEqual(nodelist3, nodelist1)
|
||||
self.assertNotEqual(nodelist3, nodelist2)
|
||||
self.assertEquals(nodelist3.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist3.lineno, 2)
|
||||
|
||||
nodelist1.Append(node1b) # Contains: 1.
|
||||
self.assertEquals(nodelist1.items, [node1])
|
||||
self.assertEquals(nodelist1, nodelist2)
|
||||
self.assertNotEqual(nodelist1, nodelist3)
|
||||
self.assertEquals(nodelist1.filename, "foo.mojom")
|
||||
self.assertEquals(nodelist1.lineno, 1)
|
||||
|
||||
nodelist1.Append(node2) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist1.items, [node1, node2])
|
||||
self.assertNotEqual(nodelist1, nodelist2)
|
||||
self.assertNotEqual(nodelist1, nodelist3)
|
||||
self.assertEquals(nodelist1.lineno, 1)
|
||||
|
||||
nodelist2.Append(node2) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist2.items, [node1, node2])
|
||||
self.assertEquals(nodelist2, nodelist1)
|
||||
self.assertNotEqual(nodelist2, nodelist3)
|
||||
self.assertEquals(nodelist2.lineno, 1)
|
||||
|
||||
nodelist3.Insert(node1) # Contains: 1, 2.
|
||||
self.assertEquals(nodelist3.items, [node1, node2])
|
||||
self.assertEquals(nodelist3, nodelist1)
|
||||
self.assertEquals(nodelist3, nodelist2)
|
||||
self.assertEquals(nodelist3.lineno, 1)
|
||||
|
||||
# Test iteration:
|
||||
i = 1
|
||||
for item in nodelist1:
|
||||
self.assertEquals(item.value, i)
|
||||
i += 1
|
|
@ -0,0 +1,83 @@
|
|||
# Copyright 2018 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Helpers for processing conditionally enabled features in a mojom."""
|
||||
|
||||
from mojom.error import Error
|
||||
from mojom.parse import ast
|
||||
|
||||
|
||||
class EnableIfError(Error):
|
||||
""" Class for errors from ."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None):
|
||||
Error.__init__(self, filename, message, lineno=lineno, addenda=None)
|
||||
|
||||
|
||||
def _IsEnabled(definition, enabled_features):
|
||||
"""Returns true if a definition is enabled.
|
||||
|
||||
A definition is enabled if it has no EnableIf/EnableIfNot attribute.
|
||||
It is retained if it has an EnableIf attribute and the attribute is in
|
||||
enabled_features. It is retained if it has an EnableIfNot attribute and the
|
||||
attribute is not in enabled features.
|
||||
"""
|
||||
if not hasattr(definition, "attribute_list"):
|
||||
return True
|
||||
if not definition.attribute_list:
|
||||
return True
|
||||
|
||||
already_defined = False
|
||||
for a in definition.attribute_list:
|
||||
if a.key == 'EnableIf' or a.key == 'EnableIfNot':
|
||||
if already_defined:
|
||||
raise EnableIfError(
|
||||
definition.filename,
|
||||
"EnableIf/EnableIfNot attribute may only be set once per field.",
|
||||
definition.lineno)
|
||||
already_defined = True
|
||||
|
||||
for attribute in definition.attribute_list:
|
||||
if attribute.key == 'EnableIf' and attribute.value not in enabled_features:
|
||||
return False
|
||||
if attribute.key == 'EnableIfNot' and attribute.value in enabled_features:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _FilterDisabledFromNodeList(node_list, enabled_features):
|
||||
if not node_list:
|
||||
return
|
||||
assert isinstance(node_list, ast.NodeListBase)
|
||||
node_list.items = [
|
||||
item for item in node_list.items if _IsEnabled(item, enabled_features)
|
||||
]
|
||||
for item in node_list.items:
|
||||
_FilterDefinition(item, enabled_features)
|
||||
|
||||
|
||||
def _FilterDefinition(definition, enabled_features):
|
||||
"""Filters definitions with a body."""
|
||||
if isinstance(definition, ast.Enum):
|
||||
_FilterDisabledFromNodeList(definition.enum_value_list, enabled_features)
|
||||
elif isinstance(definition, ast.Method):
|
||||
_FilterDisabledFromNodeList(definition.parameter_list, enabled_features)
|
||||
_FilterDisabledFromNodeList(definition.response_parameter_list,
|
||||
enabled_features)
|
||||
elif isinstance(definition,
|
||||
(ast.Interface, ast.Struct, ast.Union, ast.Feature)):
|
||||
_FilterDisabledFromNodeList(definition.body, enabled_features)
|
||||
|
||||
|
||||
def RemoveDisabledDefinitions(mojom, enabled_features):
|
||||
"""Removes conditionally disabled definitions from a Mojom node."""
|
||||
mojom.import_list = ast.ImportList([
|
||||
imported_file for imported_file in mojom.import_list
|
||||
if _IsEnabled(imported_file, enabled_features)
|
||||
])
|
||||
mojom.definition_list = [
|
||||
definition for definition in mojom.definition_list
|
||||
if _IsEnabled(definition, enabled_features)
|
||||
]
|
||||
for definition in mojom.definition_list:
|
||||
_FilterDefinition(definition, enabled_features)
|
|
@ -0,0 +1,376 @@
|
|||
# Copyright 2018 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove('pylib'), 'pylib'))
|
||||
import mojom.parse.ast as ast
|
||||
import mojom.parse.conditional_features as conditional_features
|
||||
import mojom.parse.parser as parser
|
||||
|
||||
ENABLED_FEATURES = frozenset({'red', 'green', 'blue'})
|
||||
|
||||
class ConditionalFeaturesTest(unittest.TestCase):
|
||||
"""Tests |mojom.parse.conditional_features|."""
|
||||
|
||||
def parseAndAssertEqual(self, source, expected_source):
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
conditional_features.RemoveDisabledDefinitions(definition, ENABLED_FEATURES)
|
||||
expected = parser.Parse(expected_source, "my_file.mojom")
|
||||
self.assertEquals(definition, expected)
|
||||
|
||||
def testFilterConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIf=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIf=orange]
|
||||
const double kMyConst2 = 2;
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
const int kMyConst1 = 1;
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterIfNotConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIf=blue]
|
||||
const int kMyConst3 = 3;
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst4 = 4;
|
||||
[EnableIfNot=purple]
|
||||
const int kMyConst5 = 5;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIf=blue]
|
||||
const int kMyConst3 = 3;
|
||||
[EnableIfNot=purple]
|
||||
const int kMyConst5 = 5;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterIfNotMultipleConst(self):
|
||||
"""Test that Consts are correctly filtered."""
|
||||
const_source = """
|
||||
[EnableIfNot=blue]
|
||||
const int kMyConst1 = 1;
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIfNot=orange]
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIfNot=orange]
|
||||
const double kMyConst2 = 2;
|
||||
[EnableIfNot=orange]
|
||||
const int kMyConst3 = 3;
|
||||
"""
|
||||
self.parseAndAssertEqual(const_source, expected_source)
|
||||
|
||||
def testFilterEnum(self):
|
||||
"""Test that EnumValues are correctly filtered from an Enum."""
|
||||
enum_source = """
|
||||
enum MyEnum {
|
||||
[EnableIf=purple]
|
||||
VALUE1,
|
||||
[EnableIf=blue]
|
||||
VALUE2,
|
||||
VALUE3,
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
enum MyEnum {
|
||||
[EnableIf=blue]
|
||||
VALUE2,
|
||||
VALUE3
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(enum_source, expected_source)
|
||||
|
||||
def testFilterImport(self):
|
||||
"""Test that imports are correctly filtered from a Mojom."""
|
||||
import_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
import "bar.mojom";
|
||||
[EnableIf=purple]
|
||||
import "baz.mojom";
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
import "bar.mojom";
|
||||
"""
|
||||
self.parseAndAssertEqual(import_source, expected_source)
|
||||
|
||||
def testFilterIfNotImport(self):
|
||||
"""Test that imports are correctly filtered from a Mojom."""
|
||||
import_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
[EnableIfNot=purple]
|
||||
import "bar.mojom";
|
||||
[EnableIfNot=green]
|
||||
import "baz.mojom";
|
||||
"""
|
||||
expected_source = """
|
||||
[EnableIf=blue]
|
||||
import "foo.mojom";
|
||||
[EnableIfNot=purple]
|
||||
import "bar.mojom";
|
||||
"""
|
||||
self.parseAndAssertEqual(import_source, expected_source)
|
||||
|
||||
def testFilterInterface(self):
|
||||
"""Test that definitions are correctly filtered from an Interface."""
|
||||
interface_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
[EnableIf=purple]
|
||||
VALUE1,
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=blue]
|
||||
const int32 kMyConst = 123;
|
||||
[EnableIf=purple]
|
||||
MyMethod();
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=blue]
|
||||
const int32 kMyConst = 123;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(interface_source, expected_source)
|
||||
|
||||
def testFilterMethod(self):
|
||||
"""Test that Parameters are correctly filtered from a Method."""
|
||||
method_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
MyMethod([EnableIf=purple] int32 a) => ([EnableIf=red] int32 b);
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
interface MyInterface {
|
||||
[EnableIf=blue]
|
||||
MyMethod() => ([EnableIf=red] int32 b);
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(method_source, expected_source)
|
||||
|
||||
def testFilterStruct(self):
|
||||
"""Test that definitions are correctly filtered from a Struct."""
|
||||
struct_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
[EnableIf=purple]
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIf=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIf=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
[EnableIf=orange]
|
||||
double f;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
};
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(struct_source, expected_source)
|
||||
|
||||
def testFilterIfNotStruct(self):
|
||||
"""Test that definitions are correctly filtered from a Struct."""
|
||||
struct_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
[EnableIfNot=red]
|
||||
VALUE2,
|
||||
};
|
||||
[EnableIfNot=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIfNot=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
[EnableIfNot=red]
|
||||
double f;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
struct MyStruct {
|
||||
[EnableIf=blue]
|
||||
enum MyEnum {
|
||||
VALUE1,
|
||||
};
|
||||
[EnableIfNot=yellow]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=green]
|
||||
int32 a;
|
||||
double b;
|
||||
[EnableIfNot=purple]
|
||||
int32 c;
|
||||
[EnableIf=blue]
|
||||
double d;
|
||||
int32 e;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(struct_source, expected_source)
|
||||
|
||||
def testFilterUnion(self):
|
||||
"""Test that UnionFields are correctly filtered from a Union."""
|
||||
union_source = """
|
||||
union MyUnion {
|
||||
[EnableIf=yellow]
|
||||
int32 a;
|
||||
[EnableIf=red]
|
||||
bool b;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
union MyUnion {
|
||||
[EnableIf=red]
|
||||
bool b;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(union_source, expected_source)
|
||||
|
||||
def testSameNameFields(self):
|
||||
mojom_source = """
|
||||
enum Foo {
|
||||
[EnableIf=red]
|
||||
VALUE1 = 5,
|
||||
[EnableIf=yellow]
|
||||
VALUE1 = 6,
|
||||
};
|
||||
[EnableIf=red]
|
||||
const double kMyConst = 1.23;
|
||||
[EnableIf=yellow]
|
||||
const double kMyConst = 4.56;
|
||||
"""
|
||||
expected_source = """
|
||||
enum Foo {
|
||||
[EnableIf=red]
|
||||
VALUE1 = 5,
|
||||
};
|
||||
[EnableIf=red]
|
||||
const double kMyConst = 1.23;
|
||||
"""
|
||||
self.parseAndAssertEqual(mojom_source, expected_source)
|
||||
|
||||
def testFeaturesWithEnableIf(self):
|
||||
mojom_source = """
|
||||
feature Foo {
|
||||
const string name = "FooFeature";
|
||||
[EnableIf=red]
|
||||
const bool default_state = false;
|
||||
[EnableIf=yellow]
|
||||
const bool default_state = true;
|
||||
};
|
||||
"""
|
||||
expected_source = """
|
||||
feature Foo {
|
||||
const string name = "FooFeature";
|
||||
[EnableIf=red]
|
||||
const bool default_state = false;
|
||||
};
|
||||
"""
|
||||
self.parseAndAssertEqual(mojom_source, expected_source)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIf=red,EnableIf=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIf=red,EnableIfNot=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
def testMultipleEnableIfs(self):
|
||||
source = """
|
||||
enum Foo {
|
||||
[EnableIfNot=red,EnableIfNot=yellow]
|
||||
kBarValue = 5,
|
||||
};
|
||||
"""
|
||||
definition = parser.Parse(source, "my_file.mojom")
|
||||
self.assertRaises(conditional_features.EnableIfError,
|
||||
conditional_features.RemoveDisabledDefinitions,
|
||||
definition, ENABLED_FEATURES)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
249
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
Normal file
249
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
Normal file
|
@ -0,0 +1,249 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.error import Error
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
from ply.lex import TOKEN
|
||||
|
||||
|
||||
class LexError(Error):
|
||||
"""Class for errors from the lexer."""
|
||||
|
||||
def __init__(self, filename, message, lineno):
|
||||
Error.__init__(self, filename, message, lineno=lineno)
|
||||
|
||||
|
||||
# We have methods which look like they could be functions:
|
||||
# pylint: disable=R0201
|
||||
class Lexer:
|
||||
def __init__(self, filename):
|
||||
self.filename = filename
|
||||
|
||||
######################-- PRIVATE --######################
|
||||
|
||||
##
|
||||
## Internal auxiliary methods
|
||||
##
|
||||
def _error(self, msg, token):
|
||||
raise LexError(self.filename, msg, token.lineno)
|
||||
|
||||
##
|
||||
## Reserved keywords
|
||||
##
|
||||
keywords = (
|
||||
'HANDLE',
|
||||
'IMPORT',
|
||||
'MODULE',
|
||||
'STRUCT',
|
||||
'UNION',
|
||||
'INTERFACE',
|
||||
'ENUM',
|
||||
'CONST',
|
||||
'TRUE',
|
||||
'FALSE',
|
||||
'DEFAULT',
|
||||
'ARRAY',
|
||||
'MAP',
|
||||
'ASSOCIATED',
|
||||
'PENDING_REMOTE',
|
||||
'PENDING_RECEIVER',
|
||||
'PENDING_ASSOCIATED_REMOTE',
|
||||
'PENDING_ASSOCIATED_RECEIVER',
|
||||
'FEATURE',
|
||||
)
|
||||
|
||||
keyword_map = {}
|
||||
for keyword in keywords:
|
||||
keyword_map[keyword.lower()] = keyword
|
||||
|
||||
##
|
||||
## All the tokens recognized by the lexer
|
||||
##
|
||||
tokens = keywords + (
|
||||
# Identifiers
|
||||
'NAME',
|
||||
|
||||
# Constants
|
||||
'ORDINAL',
|
||||
'INT_CONST_DEC',
|
||||
'INT_CONST_HEX',
|
||||
'FLOAT_CONST',
|
||||
|
||||
# String literals
|
||||
'STRING_LITERAL',
|
||||
|
||||
# Operators
|
||||
'MINUS',
|
||||
'PLUS',
|
||||
'QSTN',
|
||||
|
||||
# Assignment
|
||||
'EQUALS',
|
||||
|
||||
# Request / response
|
||||
'RESPONSE',
|
||||
|
||||
# Delimiters
|
||||
'LPAREN',
|
||||
'RPAREN', # ( )
|
||||
'LBRACKET',
|
||||
'RBRACKET', # [ ]
|
||||
'LBRACE',
|
||||
'RBRACE', # { }
|
||||
'LANGLE',
|
||||
'RANGLE', # < >
|
||||
'SEMI', # ;
|
||||
'COMMA',
|
||||
'DOT' # , .
|
||||
)
|
||||
|
||||
##
|
||||
## Regexes for use in tokens
|
||||
##
|
||||
|
||||
# valid C identifiers (K&R2: A.2.3)
|
||||
identifier = r'[a-zA-Z_][0-9a-zA-Z_]*'
|
||||
|
||||
hex_prefix = '0[xX]'
|
||||
hex_digits = '[0-9a-fA-F]+'
|
||||
|
||||
# integer constants (K&R2: A.2.5.1)
|
||||
decimal_constant = '0|([1-9][0-9]*)'
|
||||
hex_constant = hex_prefix + hex_digits
|
||||
# Don't allow octal constants (even invalid octal).
|
||||
octal_constant_disallowed = '0[0-9]+'
|
||||
|
||||
# character constants (K&R2: A.2.5.2)
|
||||
# Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
|
||||
# directives with Windows paths as filenames (..\..\dir\file)
|
||||
# For the same reason, decimal_escape allows all digit sequences. We want to
|
||||
# parse all correct code, even if it means to sometimes parse incorrect
|
||||
# code.
|
||||
#
|
||||
simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
|
||||
decimal_escape = r"""(\d+)"""
|
||||
hex_escape = r"""(x[0-9a-fA-F]+)"""
|
||||
bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
|
||||
|
||||
escape_sequence = \
|
||||
r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
|
||||
|
||||
# string literals (K&R2: A.2.6)
|
||||
string_char = r"""([^"\\\n]|""" + escape_sequence + ')'
|
||||
string_literal = '"' + string_char + '*"'
|
||||
bad_string_literal = '"' + string_char + '*' + bad_escape + string_char + '*"'
|
||||
|
||||
# floating constants (K&R2: A.2.5.3)
|
||||
exponent_part = r"""([eE][-+]?[0-9]+)"""
|
||||
fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
|
||||
floating_constant = \
|
||||
'(((('+fractional_constant+')'+ \
|
||||
exponent_part+'?)|([0-9]+'+exponent_part+')))'
|
||||
|
||||
# Ordinals
|
||||
ordinal = r'@[0-9]+'
|
||||
missing_ordinal_value = r'@'
|
||||
# Don't allow ordinal values in octal (even invalid octal, like 09) or
|
||||
# hexadecimal.
|
||||
octal_or_hex_ordinal_disallowed = (
|
||||
r'@((0[0-9]+)|(' + hex_prefix + hex_digits + '))')
|
||||
|
||||
##
|
||||
## Rules for the normal state
|
||||
##
|
||||
t_ignore = ' \t\r'
|
||||
|
||||
# Newlines
|
||||
def t_NEWLINE(self, t):
|
||||
r'\n+'
|
||||
t.lexer.lineno += len(t.value)
|
||||
|
||||
# Operators
|
||||
t_MINUS = r'-'
|
||||
t_PLUS = r'\+'
|
||||
t_QSTN = r'\?'
|
||||
|
||||
# =
|
||||
t_EQUALS = r'='
|
||||
|
||||
# =>
|
||||
t_RESPONSE = r'=>'
|
||||
|
||||
# Delimiters
|
||||
t_LPAREN = r'\('
|
||||
t_RPAREN = r'\)'
|
||||
t_LBRACKET = r'\['
|
||||
t_RBRACKET = r'\]'
|
||||
t_LBRACE = r'\{'
|
||||
t_RBRACE = r'\}'
|
||||
t_LANGLE = r'<'
|
||||
t_RANGLE = r'>'
|
||||
t_COMMA = r','
|
||||
t_DOT = r'\.'
|
||||
t_SEMI = r';'
|
||||
|
||||
t_STRING_LITERAL = string_literal
|
||||
|
||||
# The following floating and integer constants are defined as
|
||||
# functions to impose a strict order (otherwise, decimal
|
||||
# is placed before the others because its regex is longer,
|
||||
# and this is bad)
|
||||
#
|
||||
@TOKEN(floating_constant)
|
||||
def t_FLOAT_CONST(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(hex_constant)
|
||||
def t_INT_CONST_HEX(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(octal_constant_disallowed)
|
||||
def t_OCTAL_CONSTANT_DISALLOWED(self, t):
|
||||
msg = "Octal values not allowed"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(decimal_constant)
|
||||
def t_INT_CONST_DEC(self, t):
|
||||
return t
|
||||
|
||||
# unmatched string literals are caught by the preprocessor
|
||||
|
||||
@TOKEN(bad_string_literal)
|
||||
def t_BAD_STRING_LITERAL(self, t):
|
||||
msg = "String contains invalid escape code"
|
||||
self._error(msg, t)
|
||||
|
||||
# Handle ordinal-related tokens in the right order:
|
||||
@TOKEN(octal_or_hex_ordinal_disallowed)
|
||||
def t_OCTAL_OR_HEX_ORDINAL_DISALLOWED(self, t):
|
||||
msg = "Octal and hexadecimal ordinal values not allowed"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(ordinal)
|
||||
def t_ORDINAL(self, t):
|
||||
return t
|
||||
|
||||
@TOKEN(missing_ordinal_value)
|
||||
def t_BAD_ORDINAL(self, t):
|
||||
msg = "Missing ordinal value"
|
||||
self._error(msg, t)
|
||||
|
||||
@TOKEN(identifier)
|
||||
def t_NAME(self, t):
|
||||
t.type = self.keyword_map.get(t.value, "NAME")
|
||||
return t
|
||||
|
||||
# Ignore C and C++ style comments
|
||||
def t_COMMENT(self, t):
|
||||
r'(/\*(.|\n)*?\*/)|(//.*(\n[ \t]*//.*)*)'
|
||||
t.lexer.lineno += t.value.count("\n")
|
||||
|
||||
def t_error(self, t):
|
||||
msg = "Illegal character %s" % repr(t.value[0])
|
||||
self._error(msg, t)
|
|
@ -0,0 +1,194 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import importlib.util
|
||||
import os.path
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
def _GetDirAbove(dirname):
|
||||
"""Returns the directory "above" this file containing |dirname| (which must
|
||||
also be "above" this file)."""
|
||||
path = os.path.abspath(__file__)
|
||||
while True:
|
||||
path, tail = os.path.split(path)
|
||||
assert tail
|
||||
if tail == dirname:
|
||||
return path
|
||||
|
||||
sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
|
||||
from ply import lex
|
||||
|
||||
try:
|
||||
importlib.util.find_spec("mojom")
|
||||
except ImportError:
|
||||
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
|
||||
import mojom.parse.lexer
|
||||
|
||||
# This (monkey-patching LexToken to make comparison value-based) is evil, but
|
||||
# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
|
||||
# for object identity.)
|
||||
def _LexTokenEq(self, other):
|
||||
return self.type == other.type and self.value == other.value and \
|
||||
self.lineno == other.lineno and self.lexpos == other.lexpos
|
||||
|
||||
|
||||
setattr(lex.LexToken, '__eq__', _LexTokenEq)
|
||||
|
||||
|
||||
def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
|
||||
"""Makes a LexToken with the given parameters. (Note that lineno is 1-based,
|
||||
but lexpos is 0-based.)"""
|
||||
rv = lex.LexToken()
|
||||
rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
|
||||
return rv
|
||||
|
||||
|
||||
def _MakeLexTokenForKeyword(keyword, **kwargs):
|
||||
"""Makes a LexToken for the given keyword."""
|
||||
return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
|
||||
|
||||
|
||||
class LexerTest(unittest.TestCase):
|
||||
"""Tests |mojom.parse.lexer.Lexer|."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
unittest.TestCase.__init__(self, *args, **kwargs)
|
||||
# Clone all lexer instances from this one, since making a lexer is slow.
|
||||
self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
|
||||
|
||||
def testValidKeywords(self):
|
||||
"""Tests valid keywords."""
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("handle"), _MakeLexTokenForKeyword("handle"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("import"), _MakeLexTokenForKeyword("import"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("module"), _MakeLexTokenForKeyword("module"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("struct"), _MakeLexTokenForKeyword("struct"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("union"), _MakeLexTokenForKeyword("union"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("interface"),
|
||||
_MakeLexTokenForKeyword("interface"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("enum"), _MakeLexTokenForKeyword("enum"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("const"), _MakeLexTokenForKeyword("const"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("true"), _MakeLexTokenForKeyword("true"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("false"), _MakeLexTokenForKeyword("false"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("default"),
|
||||
_MakeLexTokenForKeyword("default"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("array"), _MakeLexTokenForKeyword("array"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("map"), _MakeLexTokenForKeyword("map"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("associated"),
|
||||
_MakeLexTokenForKeyword("associated"))
|
||||
|
||||
def testValidIdentifiers(self):
|
||||
"""Tests identifiers."""
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("abcd"), _MakeLexToken("NAME", "abcd"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("AbC_d012_"),
|
||||
_MakeLexToken("NAME", "AbC_d012_"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("_0123"), _MakeLexToken("NAME", "_0123"))
|
||||
|
||||
def testInvalidIdentifiers(self):
|
||||
with self.assertRaisesRegexp(
|
||||
mojom.parse.lexer.LexError,
|
||||
r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
|
||||
self._TokensForInput("$abc")
|
||||
with self.assertRaisesRegexp(
|
||||
mojom.parse.lexer.LexError,
|
||||
r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
|
||||
self._TokensForInput("a$bc")
|
||||
|
||||
def testDecimalIntegerConstants(self):
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("0"), _MakeLexToken("INT_CONST_DEC", "0"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("1"), _MakeLexToken("INT_CONST_DEC", "1"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("123"), _MakeLexToken("INT_CONST_DEC", "123"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("10"), _MakeLexToken("INT_CONST_DEC", "10"))
|
||||
|
||||
def testValidTokens(self):
|
||||
"""Tests valid tokens (which aren't tested elsewhere)."""
|
||||
# Keywords tested in |testValidKeywords|.
|
||||
# NAME tested in |testValidIdentifiers|.
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("@123"), _MakeLexToken("ORDINAL", "@123"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("456"), _MakeLexToken("INT_CONST_DEC", "456"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("0x01aB2eF3"),
|
||||
_MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("123.456"),
|
||||
_MakeLexToken("FLOAT_CONST", "123.456"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("\"hello\""),
|
||||
_MakeLexToken("STRING_LITERAL", "\"hello\""))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("="), _MakeLexToken("EQUALS", "="))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("=>"), _MakeLexToken("RESPONSE", "=>"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("("), _MakeLexToken("LPAREN", "("))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(")"), _MakeLexToken("RPAREN", ")"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("["), _MakeLexToken("LBRACKET", "["))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("]"), _MakeLexToken("RBRACKET", "]"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("{"), _MakeLexToken("LBRACE", "{"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("}"), _MakeLexToken("RBRACE", "}"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput("<"), _MakeLexToken("LANGLE", "<"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(">"), _MakeLexToken("RANGLE", ">"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(";"), _MakeLexToken("SEMI", ";"))
|
||||
self.assertEquals(
|
||||
self._SingleTokenForInput(","), _MakeLexToken("COMMA", ","))
|
||||
self.assertEquals(self._SingleTokenForInput("."), _MakeLexToken("DOT", "."))
|
||||
|
||||
def _TokensForInput(self, input_string):
|
||||
"""Gets a list of tokens for the given input string."""
|
||||
lexer = self._zygote_lexer.clone()
|
||||
lexer.input(input_string)
|
||||
rv = []
|
||||
while True:
|
||||
tok = lexer.token()
|
||||
if not tok:
|
||||
return rv
|
||||
rv.append(tok)
|
||||
|
||||
def _SingleTokenForInput(self, input_string):
|
||||
"""Gets the single token for the given input string. (Raises an exception if
|
||||
the input string does not result in exactly one token.)"""
|
||||
toks = self._TokensForInput(input_string)
|
||||
assert len(toks) == 1
|
||||
return toks[0]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
510
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
Normal file
510
utils/codegen/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
Normal file
|
@ -0,0 +1,510 @@
|
|||
# Copyright 2014 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Generates a syntax tree from a Mojo IDL file."""
|
||||
|
||||
# Breaking parser stanzas is unhelpful so allow longer lines.
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from mojom import fileutil
|
||||
from mojom.error import Error
|
||||
from mojom.parse import ast
|
||||
from mojom.parse.lexer import Lexer
|
||||
|
||||
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||
from ply import lex
|
||||
from ply import yacc
|
||||
|
||||
_MAX_ORDINAL_VALUE = 0xffffffff
|
||||
_MAX_ARRAY_SIZE = 0xffffffff
|
||||
|
||||
|
||||
class ParseError(Error):
|
||||
"""Class for errors from the parser."""
|
||||
|
||||
def __init__(self, filename, message, lineno=None, snippet=None):
|
||||
Error.__init__(
|
||||
self,
|
||||
filename,
|
||||
message,
|
||||
lineno=lineno,
|
||||
addenda=([snippet] if snippet else None))
|
||||
|
||||
|
||||
# We have methods which look like they could be functions:
|
||||
# pylint: disable=R0201
|
||||
class Parser:
|
||||
def __init__(self, lexer, source, filename):
|
||||
self.tokens = lexer.tokens
|
||||
self.source = source
|
||||
self.filename = filename
|
||||
|
||||
# Names of functions
|
||||
#
|
||||
# In general, we name functions after the left-hand-side of the rule(s) that
|
||||
# they handle. E.g., |p_foo_bar| for a rule |foo_bar : ...|.
|
||||
#
|
||||
# There may be multiple functions handling rules for the same left-hand-side;
|
||||
# then we name the functions |p_foo_bar_N| (for left-hand-side |foo_bar|),
|
||||
# where N is a number (numbered starting from 1). Note that using multiple
|
||||
# functions is actually more efficient than having single functions handle
|
||||
# multiple rules (and, e.g., distinguishing them by examining |len(p)|).
|
||||
#
|
||||
# It's also possible to have a function handling multiple rules with different
|
||||
# left-hand-sides. We do not do this.
|
||||
#
|
||||
# See http://www.dabeaz.com/ply/ply.html#ply_nn25 for more details.
|
||||
|
||||
# TODO(vtl): Get rid of the braces in the module "statement". (Consider
|
||||
# renaming "module" -> "package".) Then we'll be able to have a single rule
|
||||
# for root (by making module "optional").
|
||||
def p_root_1(self, p):
|
||||
"""root : """
|
||||
p[0] = ast.Mojom(None, ast.ImportList(), [])
|
||||
|
||||
def p_root_2(self, p):
|
||||
"""root : root module"""
|
||||
if p[1].module is not None:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Multiple \"module\" statements not allowed:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
if p[1].import_list.items or p[1].definition_list:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"\"module\" statements must precede imports and definitions:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
p[0] = p[1]
|
||||
p[0].module = p[2]
|
||||
|
||||
def p_root_3(self, p):
|
||||
"""root : root import"""
|
||||
if p[1].definition_list:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"\"import\" statements must precede definitions:",
|
||||
p[2].lineno,
|
||||
snippet=self._GetSnippet(p[2].lineno))
|
||||
p[0] = p[1]
|
||||
p[0].import_list.Append(p[2])
|
||||
|
||||
def p_root_4(self, p):
|
||||
"""root : root definition"""
|
||||
p[0] = p[1]
|
||||
p[0].definition_list.append(p[2])
|
||||
|
||||
def p_import(self, p):
|
||||
"""import : attribute_section IMPORT STRING_LITERAL SEMI"""
|
||||
# 'eval' the literal to strip the quotes.
|
||||
# TODO(vtl): This eval is dubious. We should unquote/unescape ourselves.
|
||||
p[0] = ast.Import(
|
||||
p[1], eval(p[3]), filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_module(self, p):
|
||||
"""module : attribute_section MODULE identifier_wrapped SEMI"""
|
||||
p[0] = ast.Module(p[3], p[1], filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_definition(self, p):
|
||||
"""definition : struct
|
||||
| union
|
||||
| interface
|
||||
| enum
|
||||
| const
|
||||
| feature"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_attribute_section_1(self, p):
|
||||
"""attribute_section : """
|
||||
p[0] = None
|
||||
|
||||
def p_attribute_section_2(self, p):
|
||||
"""attribute_section : LBRACKET attribute_list RBRACKET"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_attribute_list_1(self, p):
|
||||
"""attribute_list : """
|
||||
p[0] = ast.AttributeList()
|
||||
|
||||
def p_attribute_list_2(self, p):
|
||||
"""attribute_list : nonempty_attribute_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_attribute_list_1(self, p):
|
||||
"""nonempty_attribute_list : attribute"""
|
||||
p[0] = ast.AttributeList(p[1])
|
||||
|
||||
def p_nonempty_attribute_list_2(self, p):
|
||||
"""nonempty_attribute_list : nonempty_attribute_list COMMA attribute"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_attribute_1(self, p):
|
||||
"""attribute : name_wrapped EQUALS identifier_wrapped"""
|
||||
p[0] = ast.Attribute(p[1],
|
||||
p[3][1],
|
||||
filename=self.filename,
|
||||
lineno=p.lineno(1))
|
||||
|
||||
def p_attribute_2(self, p):
|
||||
"""attribute : name_wrapped EQUALS evaled_literal
|
||||
| name_wrapped EQUALS name_wrapped"""
|
||||
p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_attribute_3(self, p):
|
||||
"""attribute : name_wrapped"""
|
||||
p[0] = ast.Attribute(p[1], True, filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_evaled_literal(self, p):
|
||||
"""evaled_literal : literal"""
|
||||
# 'eval' the literal to strip the quotes. Handle keywords "true" and "false"
|
||||
# specially since they cannot directly be evaluated to python boolean
|
||||
# values.
|
||||
if p[1] == "true":
|
||||
p[0] = True
|
||||
elif p[1] == "false":
|
||||
p[0] = False
|
||||
else:
|
||||
p[0] = eval(p[1])
|
||||
|
||||
def p_struct_1(self, p):
|
||||
"""struct : attribute_section STRUCT name_wrapped LBRACE struct_body RBRACE SEMI"""
|
||||
p[0] = ast.Struct(p[3], p[1], p[5])
|
||||
|
||||
def p_struct_2(self, p):
|
||||
"""struct : attribute_section STRUCT name_wrapped SEMI"""
|
||||
p[0] = ast.Struct(p[3], p[1], None)
|
||||
|
||||
def p_struct_body_1(self, p):
|
||||
"""struct_body : """
|
||||
p[0] = ast.StructBody()
|
||||
|
||||
def p_struct_body_2(self, p):
|
||||
"""struct_body : struct_body const
|
||||
| struct_body enum
|
||||
| struct_body struct_field"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_struct_field(self, p):
|
||||
"""struct_field : attribute_section typename name_wrapped ordinal default SEMI"""
|
||||
p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
|
||||
|
||||
def p_feature(self, p):
|
||||
"""feature : attribute_section FEATURE NAME LBRACE feature_body RBRACE SEMI"""
|
||||
p[0] = ast.Feature(p[3], p[1], p[5])
|
||||
|
||||
def p_feature_body_1(self, p):
|
||||
"""feature_body : """
|
||||
p[0] = ast.FeatureBody()
|
||||
|
||||
def p_feature_body_2(self, p):
|
||||
"""feature_body : feature_body const"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_union(self, p):
|
||||
"""union : attribute_section UNION name_wrapped LBRACE union_body RBRACE SEMI"""
|
||||
p[0] = ast.Union(p[3], p[1], p[5])
|
||||
|
||||
def p_union_body_1(self, p):
|
||||
"""union_body : """
|
||||
p[0] = ast.UnionBody()
|
||||
|
||||
def p_union_body_2(self, p):
|
||||
"""union_body : union_body union_field"""
|
||||
p[0] = p[1]
|
||||
p[1].Append(p[2])
|
||||
|
||||
def p_union_field(self, p):
|
||||
"""union_field : attribute_section typename name_wrapped ordinal SEMI"""
|
||||
p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
|
||||
|
||||
def p_default_1(self, p):
|
||||
"""default : """
|
||||
p[0] = None
|
||||
|
||||
def p_default_2(self, p):
|
||||
"""default : EQUALS constant"""
|
||||
p[0] = p[2]
|
||||
|
||||
def p_interface(self, p):
|
||||
"""interface : attribute_section INTERFACE name_wrapped LBRACE interface_body RBRACE SEMI"""
|
||||
p[0] = ast.Interface(p[3], p[1], p[5])
|
||||
|
||||
def p_interface_body_1(self, p):
|
||||
"""interface_body : """
|
||||
p[0] = ast.InterfaceBody()
|
||||
|
||||
def p_interface_body_2(self, p):
|
||||
"""interface_body : interface_body const
|
||||
| interface_body enum
|
||||
| interface_body method"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[2])
|
||||
|
||||
def p_response_1(self, p):
|
||||
"""response : """
|
||||
p[0] = None
|
||||
|
||||
def p_response_2(self, p):
|
||||
"""response : RESPONSE LPAREN parameter_list RPAREN"""
|
||||
p[0] = p[3]
|
||||
|
||||
def p_method(self, p):
|
||||
"""method : attribute_section name_wrapped ordinal LPAREN parameter_list RPAREN response SEMI"""
|
||||
p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
|
||||
|
||||
def p_parameter_list_1(self, p):
|
||||
"""parameter_list : """
|
||||
p[0] = ast.ParameterList()
|
||||
|
||||
def p_parameter_list_2(self, p):
|
||||
"""parameter_list : nonempty_parameter_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_parameter_list_1(self, p):
|
||||
"""nonempty_parameter_list : parameter"""
|
||||
p[0] = ast.ParameterList(p[1])
|
||||
|
||||
def p_nonempty_parameter_list_2(self, p):
|
||||
"""nonempty_parameter_list : nonempty_parameter_list COMMA parameter"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_parameter(self, p):
|
||||
"""parameter : attribute_section typename name_wrapped ordinal"""
|
||||
p[0] = ast.Parameter(
|
||||
p[3], p[1], p[4], p[2], filename=self.filename, lineno=p.lineno(3))
|
||||
|
||||
def p_typename(self, p):
|
||||
"""typename : nonnullable_typename QSTN
|
||||
| nonnullable_typename"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
p[0] = p[1] + "?"
|
||||
|
||||
def p_nonnullable_typename(self, p):
|
||||
"""nonnullable_typename : basictypename
|
||||
| array
|
||||
| fixed_array
|
||||
| associative_array"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_basictypename(self, p):
|
||||
"""basictypename : remotetype
|
||||
| receivertype
|
||||
| associatedremotetype
|
||||
| associatedreceivertype
|
||||
| identifier
|
||||
| ASSOCIATED identifier
|
||||
| handletype"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
p[0] = "asso<" + p[2] + ">"
|
||||
|
||||
def p_remotetype(self, p):
|
||||
"""remotetype : PENDING_REMOTE LANGLE identifier RANGLE"""
|
||||
p[0] = "rmt<%s>" % p[3]
|
||||
|
||||
def p_receivertype(self, p):
|
||||
"""receivertype : PENDING_RECEIVER LANGLE identifier RANGLE"""
|
||||
p[0] = "rcv<%s>" % p[3]
|
||||
|
||||
def p_associatedremotetype(self, p):
|
||||
"""associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier RANGLE"""
|
||||
p[0] = "rma<%s>" % p[3]
|
||||
|
||||
def p_associatedreceivertype(self, p):
|
||||
"""associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier RANGLE"""
|
||||
p[0] = "rca<%s>" % p[3]
|
||||
|
||||
def p_handletype(self, p):
|
||||
"""handletype : HANDLE
|
||||
| HANDLE LANGLE name_wrapped RANGLE"""
|
||||
if len(p) == 2:
|
||||
p[0] = p[1]
|
||||
else:
|
||||
if p[3] not in ('data_pipe_consumer', 'data_pipe_producer',
|
||||
'message_pipe', 'shared_buffer', 'platform'):
|
||||
# Note: We don't enable tracking of line numbers for everything, so we
|
||||
# can't use |p.lineno(3)|.
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Invalid handle type %r:" % p[3],
|
||||
lineno=p.lineno(1),
|
||||
snippet=self._GetSnippet(p.lineno(1)))
|
||||
p[0] = "handle<" + p[3] + ">"
|
||||
|
||||
def p_array(self, p):
|
||||
"""array : ARRAY LANGLE typename RANGLE"""
|
||||
p[0] = p[3] + "[]"
|
||||
|
||||
def p_fixed_array(self, p):
|
||||
"""fixed_array : ARRAY LANGLE typename COMMA INT_CONST_DEC RANGLE"""
|
||||
value = int(p[5])
|
||||
if value == 0 or value > _MAX_ARRAY_SIZE:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Fixed array size %d invalid:" % value,
|
||||
lineno=p.lineno(5),
|
||||
snippet=self._GetSnippet(p.lineno(5)))
|
||||
p[0] = p[3] + "[" + p[5] + "]"
|
||||
|
||||
def p_associative_array(self, p):
|
||||
"""associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
|
||||
p[0] = p[5] + "{" + p[3] + "}"
|
||||
|
||||
def p_ordinal_1(self, p):
|
||||
"""ordinal : """
|
||||
p[0] = None
|
||||
|
||||
def p_ordinal_2(self, p):
|
||||
"""ordinal : ORDINAL"""
|
||||
value = int(p[1][1:])
|
||||
if value > _MAX_ORDINAL_VALUE:
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Ordinal value %d too large:" % value,
|
||||
lineno=p.lineno(1),
|
||||
snippet=self._GetSnippet(p.lineno(1)))
|
||||
p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
|
||||
|
||||
def p_enum_1(self, p):
|
||||
"""enum : attribute_section ENUM name_wrapped LBRACE enum_value_list RBRACE SEMI
|
||||
| attribute_section ENUM name_wrapped LBRACE \
|
||||
nonempty_enum_value_list COMMA RBRACE SEMI"""
|
||||
p[0] = ast.Enum(
|
||||
p[3], p[1], p[5], filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_enum_2(self, p):
|
||||
"""enum : attribute_section ENUM name_wrapped SEMI"""
|
||||
p[0] = ast.Enum(
|
||||
p[3], p[1], None, filename=self.filename, lineno=p.lineno(2))
|
||||
|
||||
def p_enum_value_list_1(self, p):
|
||||
"""enum_value_list : """
|
||||
p[0] = ast.EnumValueList()
|
||||
|
||||
def p_enum_value_list_2(self, p):
|
||||
"""enum_value_list : nonempty_enum_value_list"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_nonempty_enum_value_list_1(self, p):
|
||||
"""nonempty_enum_value_list : enum_value"""
|
||||
p[0] = ast.EnumValueList(p[1])
|
||||
|
||||
def p_nonempty_enum_value_list_2(self, p):
|
||||
"""nonempty_enum_value_list : nonempty_enum_value_list COMMA enum_value"""
|
||||
p[0] = p[1]
|
||||
p[0].Append(p[3])
|
||||
|
||||
def p_enum_value(self, p):
|
||||
"""enum_value : attribute_section name_wrapped
|
||||
| attribute_section name_wrapped EQUALS int
|
||||
| attribute_section name_wrapped EQUALS identifier_wrapped"""
|
||||
p[0] = ast.EnumValue(
|
||||
p[2],
|
||||
p[1],
|
||||
p[4] if len(p) == 5 else None,
|
||||
filename=self.filename,
|
||||
lineno=p.lineno(2))
|
||||
|
||||
def p_const(self, p):
|
||||
"""const : attribute_section CONST typename name_wrapped EQUALS constant SEMI"""
|
||||
p[0] = ast.Const(p[4], p[1], p[3], p[6])
|
||||
|
||||
def p_constant(self, p):
|
||||
"""constant : literal
|
||||
| identifier_wrapped"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_identifier_wrapped(self, p):
|
||||
"""identifier_wrapped : identifier"""
|
||||
p[0] = ('IDENTIFIER', p[1])
|
||||
|
||||
# TODO(vtl): Make this produce a "wrapped" identifier (probably as an
|
||||
# |ast.Identifier|, to be added) and get rid of identifier_wrapped.
|
||||
def p_identifier(self, p):
|
||||
"""identifier : name_wrapped
|
||||
| name_wrapped DOT identifier"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
# Allow 'feature' to be a name literal not just a keyword.
|
||||
def p_name_wrapped(self, p):
|
||||
"""name_wrapped : NAME
|
||||
| FEATURE"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_literal(self, p):
|
||||
"""literal : int
|
||||
| float
|
||||
| TRUE
|
||||
| FALSE
|
||||
| DEFAULT
|
||||
| STRING_LITERAL"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_int(self, p):
|
||||
"""int : int_const
|
||||
| PLUS int_const
|
||||
| MINUS int_const"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
def p_int_const(self, p):
|
||||
"""int_const : INT_CONST_DEC
|
||||
| INT_CONST_HEX"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_float(self, p):
|
||||
"""float : FLOAT_CONST
|
||||
| PLUS FLOAT_CONST
|
||||
| MINUS FLOAT_CONST"""
|
||||
p[0] = ''.join(p[1:])
|
||||
|
||||
def p_error(self, e):
|
||||
if e is None:
|
||||
# Unexpected EOF.
|
||||
# TODO(vtl): Can we figure out what's missing?
|
||||
raise ParseError(self.filename, "Unexpected end of file")
|
||||
|
||||
if e.value == 'feature':
|
||||
raise ParseError(self.filename,
|
||||
"`feature` is reserved for a future mojom keyword",
|
||||
lineno=e.lineno,
|
||||
snippet=self._GetSnippet(e.lineno))
|
||||
|
||||
raise ParseError(
|
||||
self.filename,
|
||||
"Unexpected %r:" % e.value,
|
||||
lineno=e.lineno,
|
||||
snippet=self._GetSnippet(e.lineno))
|
||||
|
||||
def _GetSnippet(self, lineno):
|
||||
return self.source.split('\n')[lineno - 1]
|
||||
|
||||
|
||||
def Parse(source, filename):
|
||||
"""Parse source file to AST.
|
||||
|
||||
Args:
|
||||
source: The source text as a str (Python 2 or 3) or unicode (Python 2).
|
||||
filename: The filename that |source| originates from.
|
||||
|
||||
Returns:
|
||||
The AST as a mojom.parse.ast.Mojom object.
|
||||
"""
|
||||
lexer = Lexer(filename)
|
||||
parser = Parser(lexer, source, filename)
|
||||
|
||||
lex.lex(object=lexer)
|
||||
yacc.yacc(module=parser, debug=0, write_tables=0)
|
||||
|
||||
tree = yacc.parse(source)
|
||||
return tree
|
File diff suppressed because it is too large
Load diff
502
utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py
Executable file
502
utils/codegen/ipc/mojo/public/tools/mojom/mojom_parser.py
Executable file
|
@ -0,0 +1,502 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
"""Parses mojom IDL files.
|
||||
|
||||
This script parses one or more input mojom files and produces corresponding
|
||||
module files fully describing the definitions contained within each mojom. The
|
||||
module data is pickled and can be easily consumed by other tools to, e.g.,
|
||||
generate usable language bindings.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import builtins
|
||||
import codecs
|
||||
import errno
|
||||
import json
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
|
||||
from mojom.generate import module
|
||||
from mojom.generate import translate
|
||||
from mojom.parse import parser
|
||||
from mojom.parse import conditional_features
|
||||
|
||||
|
||||
# Disable this for easier debugging.
|
||||
_ENABLE_MULTIPROCESSING = True
|
||||
|
||||
# https://docs.python.org/3/library/multiprocessing.html#:~:text=bpo-33725
|
||||
if __name__ == '__main__' and sys.platform == 'darwin':
|
||||
multiprocessing.set_start_method('fork')
|
||||
_MULTIPROCESSING_USES_FORK = multiprocessing.get_start_method() == 'fork'
|
||||
|
||||
|
||||
def _ResolveRelativeImportPath(path, roots):
|
||||
"""Attempts to resolve a relative import path against a set of possible roots.
|
||||
|
||||
Args:
|
||||
path: The relative import path to resolve.
|
||||
roots: A list of absolute paths which will be checked in descending length
|
||||
order for a match against path.
|
||||
|
||||
Returns:
|
||||
A normalized absolute path combining one of the roots with the input path if
|
||||
and only if such a file exists.
|
||||
|
||||
Raises:
|
||||
ValueError: The path could not be resolved against any of the given roots.
|
||||
"""
|
||||
for root in reversed(sorted(roots, key=len)):
|
||||
abs_path = os.path.join(root, path)
|
||||
if os.path.isfile(abs_path):
|
||||
return os.path.normcase(os.path.normpath(abs_path))
|
||||
|
||||
raise ValueError('"%s" does not exist in any of %s' % (path, roots))
|
||||
|
||||
|
||||
def RebaseAbsolutePath(path, roots):
|
||||
"""Rewrites an absolute file path as relative to an absolute directory path in
|
||||
roots.
|
||||
|
||||
Args:
|
||||
path: The absolute path of an existing file.
|
||||
roots: A list of absolute directory paths. The given path argument must fall
|
||||
within one of these directories.
|
||||
|
||||
Returns:
|
||||
A path equivalent to the input path, but relative to one of the provided
|
||||
roots. If the input path falls within multiple roots, the longest root is
|
||||
chosen (and thus the shortest relative path is returned).
|
||||
|
||||
Paths returned by this method always use forward slashes as a separator to
|
||||
mirror mojom import syntax.
|
||||
|
||||
Raises:
|
||||
ValueError if the given path does not fall within any of the listed roots.
|
||||
"""
|
||||
assert os.path.isabs(path)
|
||||
assert os.path.isfile(path)
|
||||
assert all(map(os.path.isabs, roots))
|
||||
|
||||
sorted_roots = list(reversed(sorted(roots, key=len)))
|
||||
|
||||
def try_rebase_path(path, root):
|
||||
head, rebased_path = os.path.split(path)
|
||||
while head != root:
|
||||
head, tail = os.path.split(head)
|
||||
if not tail:
|
||||
return None
|
||||
rebased_path = os.path.join(tail, rebased_path)
|
||||
return rebased_path
|
||||
|
||||
for root in sorted_roots:
|
||||
relative_path = try_rebase_path(path, root)
|
||||
if relative_path:
|
||||
# TODO(crbug.com/953884): Use pathlib for this kind of thing once we're
|
||||
# fully migrated to Python 3.
|
||||
return relative_path.replace('\\', '/')
|
||||
|
||||
raise ValueError('%s does not fall within any of %s' % (path, sorted_roots))
|
||||
|
||||
|
||||
def _GetModuleFilename(mojom_filename):
|
||||
return mojom_filename + '-module'
|
||||
|
||||
|
||||
def _EnsureInputLoaded(mojom_abspath, module_path, abs_paths, asts,
|
||||
dependencies, loaded_modules, module_metadata):
|
||||
"""Recursively ensures that a module and its dependencies are loaded.
|
||||
|
||||
Args:
|
||||
mojom_abspath: An absolute file path pointing to a mojom file to load.
|
||||
module_path: The relative path used to identify mojom_abspath.
|
||||
abs_paths: A mapping from module paths to absolute file paths for all
|
||||
inputs given to this execution of the script.
|
||||
asts: A map from each input mojom's absolute path to its parsed AST.
|
||||
dependencies: A mapping of which input mojoms depend on each other, indexed
|
||||
by absolute file path.
|
||||
loaded_modules: A mapping of all modules loaded so far, including non-input
|
||||
modules that were pulled in as transitive dependencies of the inputs.
|
||||
module_metadata: Metadata to be attached to every module loaded by this
|
||||
helper.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
||||
On return, loaded_modules will be populated with the loaded input mojom's
|
||||
Module as well as the Modules of all of its transitive dependencies."""
|
||||
|
||||
if mojom_abspath in loaded_modules:
|
||||
# Already done.
|
||||
return
|
||||
|
||||
for dep_abspath, dep_path in sorted(dependencies[mojom_abspath]):
|
||||
if dep_abspath not in loaded_modules:
|
||||
_EnsureInputLoaded(dep_abspath, dep_path, abs_paths, asts, dependencies,
|
||||
loaded_modules, module_metadata)
|
||||
|
||||
imports = {}
|
||||
for imp in asts[mojom_abspath].import_list:
|
||||
path = imp.import_filename
|
||||
imports[path] = loaded_modules[abs_paths[path]]
|
||||
loaded_modules[mojom_abspath] = translate.OrderedModule(
|
||||
asts[mojom_abspath], module_path, imports)
|
||||
loaded_modules[mojom_abspath].metadata = dict(module_metadata)
|
||||
|
||||
|
||||
def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
|
||||
allowed_imports = set()
|
||||
processed_deps = set()
|
||||
|
||||
def collect(metadata_filename):
|
||||
processed_deps.add(metadata_filename)
|
||||
|
||||
# Paths in the metadata file are relative to the metadata file's dir.
|
||||
metadata_dir = os.path.abspath(os.path.dirname(metadata_filename))
|
||||
|
||||
def to_abs(s):
|
||||
return os.path.normpath(os.path.join(metadata_dir, s))
|
||||
|
||||
with open(metadata_filename) as f:
|
||||
metadata = json.load(f)
|
||||
allowed_imports.update(
|
||||
[os.path.normcase(to_abs(s)) for s in metadata['sources']])
|
||||
for dep_metadata in metadata['deps']:
|
||||
dep_metadata = to_abs(dep_metadata)
|
||||
if dep_metadata not in processed_deps:
|
||||
collect(dep_metadata)
|
||||
|
||||
collect(build_metadata_filename)
|
||||
return allowed_imports
|
||||
|
||||
|
||||
# multiprocessing helper.
|
||||
def _ParseAstHelper(mojom_abspath, enabled_features):
|
||||
with codecs.open(mojom_abspath, encoding='utf-8') as f:
|
||||
ast = parser.Parse(f.read(), mojom_abspath)
|
||||
conditional_features.RemoveDisabledDefinitions(ast, enabled_features)
|
||||
return mojom_abspath, ast
|
||||
|
||||
|
||||
# multiprocessing helper.
|
||||
def _SerializeHelper(mojom_abspath, mojom_path):
|
||||
module_path = os.path.join(_SerializeHelper.output_root_path,
|
||||
_GetModuleFilename(mojom_path))
|
||||
module_dir = os.path.dirname(module_path)
|
||||
if not os.path.exists(module_dir):
|
||||
try:
|
||||
# Python 2 doesn't support exist_ok on makedirs(), so we just ignore
|
||||
# that failure if it happens. It's possible during build due to races
|
||||
# among build steps with module outputs in the same directory.
|
||||
os.makedirs(module_dir)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
with open(module_path, 'wb') as f:
|
||||
_SerializeHelper.loaded_modules[mojom_abspath].Dump(f)
|
||||
|
||||
|
||||
class _ExceptionWrapper:
|
||||
def __init__(self):
|
||||
# Do not capture exception object to ensure pickling works.
|
||||
self.formatted_trace = traceback.format_exc()
|
||||
|
||||
|
||||
class _FuncWrapper:
|
||||
"""Marshals exceptions and spreads args."""
|
||||
|
||||
def __init__(self, func):
|
||||
self._func = func
|
||||
|
||||
def __call__(self, args):
|
||||
# multiprocessing does not gracefully handle excptions.
|
||||
# https://crbug.com/1219044
|
||||
try:
|
||||
return self._func(*args)
|
||||
except: # pylint: disable=bare-except
|
||||
return _ExceptionWrapper()
|
||||
|
||||
|
||||
def _Shard(target_func, arg_list, processes=None):
|
||||
arg_list = list(arg_list)
|
||||
if processes is None:
|
||||
processes = multiprocessing.cpu_count()
|
||||
# Seems optimal to have each process perform at least 2 tasks.
|
||||
processes = min(processes, len(arg_list) // 2)
|
||||
|
||||
if sys.platform == 'win32':
|
||||
# TODO(crbug.com/1190269) - we can't use more than 56
|
||||
# cores on Windows or Python3 may hang.
|
||||
processes = min(processes, 56)
|
||||
|
||||
# Don't spin up processes unless there is enough work to merit doing so.
|
||||
if not _ENABLE_MULTIPROCESSING or processes < 2:
|
||||
for arg_tuple in arg_list:
|
||||
yield target_func(*arg_tuple)
|
||||
return
|
||||
|
||||
pool = multiprocessing.Pool(processes=processes)
|
||||
try:
|
||||
wrapped_func = _FuncWrapper(target_func)
|
||||
for result in pool.imap_unordered(wrapped_func, arg_list):
|
||||
if isinstance(result, _ExceptionWrapper):
|
||||
sys.stderr.write(result.formatted_trace)
|
||||
sys.exit(1)
|
||||
yield result
|
||||
finally:
|
||||
pool.close()
|
||||
pool.join() # Needed on Windows to avoid WindowsError during terminate.
|
||||
pool.terminate()
|
||||
|
||||
|
||||
def _ParseMojoms(mojom_files,
|
||||
input_root_paths,
|
||||
output_root_path,
|
||||
module_root_paths,
|
||||
enabled_features,
|
||||
module_metadata,
|
||||
allowed_imports=None):
|
||||
"""Parses a set of mojom files and produces serialized module outputs.
|
||||
|
||||
Args:
|
||||
mojom_files: A list of mojom files to process. Paths must be absolute paths
|
||||
which fall within one of the input or output root paths.
|
||||
input_root_paths: A list of absolute filesystem paths which may be used to
|
||||
resolve relative mojom file paths.
|
||||
output_root_path: An absolute filesystem path which will service as the root
|
||||
for all emitted artifacts. Artifacts produced from a given mojom file
|
||||
are based on the mojom's relative path, rebased onto this path.
|
||||
Additionally, the script expects this root to contain already-generated
|
||||
modules for any transitive dependencies not listed in mojom_files.
|
||||
module_root_paths: A list of absolute filesystem paths which contain
|
||||
already-generated modules for any non-transitive dependencies.
|
||||
enabled_features: A list of enabled feature names, controlling which AST
|
||||
nodes are filtered by [EnableIf] or [EnableIfNot] attributes.
|
||||
module_metadata: A list of 2-tuples representing metadata key-value pairs to
|
||||
attach to each compiled module output.
|
||||
|
||||
Returns:
|
||||
None.
|
||||
|
||||
Upon completion, a mojom-module file will be saved for each input mojom.
|
||||
"""
|
||||
assert input_root_paths
|
||||
assert output_root_path
|
||||
|
||||
loaded_mojom_asts = {}
|
||||
loaded_modules = {}
|
||||
input_dependencies = defaultdict(set)
|
||||
mojom_files_to_parse = dict((os.path.normcase(abs_path),
|
||||
RebaseAbsolutePath(abs_path, input_root_paths))
|
||||
for abs_path in mojom_files)
|
||||
abs_paths = dict(
|
||||
(path, abs_path) for abs_path, path in mojom_files_to_parse.items())
|
||||
|
||||
logging.info('Parsing %d .mojom into ASTs', len(mojom_files_to_parse))
|
||||
map_args = ((mojom_abspath, enabled_features)
|
||||
for mojom_abspath in mojom_files_to_parse)
|
||||
for mojom_abspath, ast in _Shard(_ParseAstHelper, map_args):
|
||||
loaded_mojom_asts[mojom_abspath] = ast
|
||||
|
||||
logging.info('Processing dependencies')
|
||||
for mojom_abspath, ast in sorted(loaded_mojom_asts.items()):
|
||||
invalid_imports = []
|
||||
for imp in ast.import_list:
|
||||
import_abspath = _ResolveRelativeImportPath(imp.import_filename,
|
||||
input_root_paths)
|
||||
if allowed_imports and import_abspath not in allowed_imports:
|
||||
invalid_imports.append(imp.import_filename)
|
||||
|
||||
abs_paths[imp.import_filename] = import_abspath
|
||||
if import_abspath in mojom_files_to_parse:
|
||||
# This import is in the input list, so we're going to translate it
|
||||
# into a module below; however it's also a dependency of another input
|
||||
# module. We retain record of dependencies to help with input
|
||||
# processing later.
|
||||
input_dependencies[mojom_abspath].add(
|
||||
(import_abspath, imp.import_filename))
|
||||
elif import_abspath not in loaded_modules:
|
||||
# We have an import that isn't being parsed right now. It must already
|
||||
# be parsed and have a module file sitting in a corresponding output
|
||||
# location.
|
||||
module_path = _GetModuleFilename(imp.import_filename)
|
||||
module_abspath = _ResolveRelativeImportPath(
|
||||
module_path, module_root_paths + [output_root_path])
|
||||
with open(module_abspath, 'rb') as module_file:
|
||||
loaded_modules[import_abspath] = module.Module.Load(module_file)
|
||||
|
||||
if invalid_imports:
|
||||
raise ValueError(
|
||||
'\nThe file %s imports the following files not allowed by build '
|
||||
'dependencies:\n\n%s\n' % (mojom_abspath, '\n'.join(invalid_imports)))
|
||||
logging.info('Loaded %d modules from dependencies', len(loaded_modules))
|
||||
|
||||
# At this point all transitive imports not listed as inputs have been loaded
|
||||
# and we have a complete dependency tree of the unprocessed inputs. Now we can
|
||||
# load all the inputs, resolving dependencies among them recursively as we go.
|
||||
logging.info('Ensuring inputs are loaded')
|
||||
num_existing_modules_loaded = len(loaded_modules)
|
||||
for mojom_abspath, mojom_path in mojom_files_to_parse.items():
|
||||
_EnsureInputLoaded(mojom_abspath, mojom_path, abs_paths, loaded_mojom_asts,
|
||||
input_dependencies, loaded_modules, module_metadata)
|
||||
assert (num_existing_modules_loaded +
|
||||
len(mojom_files_to_parse) == len(loaded_modules))
|
||||
|
||||
# Now we have fully translated modules for every input and every transitive
|
||||
# dependency. We can dump the modules to disk for other tools to use.
|
||||
logging.info('Serializing %d modules', len(mojom_files_to_parse))
|
||||
|
||||
# Windows does not use fork() for multiprocessing, so we'd need to pass
|
||||
# loaded_module via IPC rather than via globals. Doing so is slower than not
|
||||
# using multiprocessing.
|
||||
_SerializeHelper.loaded_modules = loaded_modules
|
||||
_SerializeHelper.output_root_path = output_root_path
|
||||
# Doesn't seem to help past 4. Perhaps IO bound here?
|
||||
processes = 4 if _MULTIPROCESSING_USES_FORK else 0
|
||||
map_args = mojom_files_to_parse.items()
|
||||
for _ in _Shard(_SerializeHelper, map_args, processes=processes):
|
||||
pass
|
||||
|
||||
|
||||
def Run(command_line):
|
||||
debug_logging = os.environ.get('MOJOM_PARSER_DEBUG', '0') != '0'
|
||||
logging.basicConfig(level=logging.DEBUG if debug_logging else logging.WARNING,
|
||||
format='%(levelname).1s %(relativeCreated)6d %(message)s')
|
||||
logging.info('Started (%s)', os.path.basename(sys.argv[0]))
|
||||
|
||||
arg_parser = argparse.ArgumentParser(
|
||||
description="""
|
||||
Parses one or more mojom files and produces corresponding module outputs fully
|
||||
describing the definitions therein. The output is exhaustive, stable, and
|
||||
sufficient for another tool to consume and emit e.g. usable language
|
||||
bindings based on the original mojoms.""",
|
||||
epilog="""
|
||||
Note that each transitive import dependency reachable from the input mojoms must
|
||||
either also be listed as an input or must have its corresponding compiled module
|
||||
already present in the provided output root.""")
|
||||
|
||||
arg_parser.add_argument(
|
||||
'--input-root',
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='ROOT',
|
||||
dest='input_root_paths',
|
||||
help='Adds ROOT to the set of root paths against which relative input '
|
||||
'paths should be resolved. Provided root paths are always searched '
|
||||
'in order from longest absolute path to shortest.')
|
||||
arg_parser.add_argument(
|
||||
'--output-root',
|
||||
action='store',
|
||||
required=True,
|
||||
dest='output_root_path',
|
||||
metavar='ROOT',
|
||||
help='Use ROOT as the root path in which the parser should emit compiled '
|
||||
'modules for each processed input mojom. The path of emitted module is '
|
||||
'based on the relative input path, rebased onto this root. Note that '
|
||||
'ROOT is also searched for existing modules of any transitive imports '
|
||||
'which were not included in the set of inputs.')
|
||||
arg_parser.add_argument(
|
||||
'--module-root',
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='ROOT',
|
||||
dest='module_root_paths',
|
||||
help='Adds ROOT to the set of root paths to search for existing modules '
|
||||
'of non-transitive imports. Provided root paths are always searched in '
|
||||
'order from longest absolute path to shortest.')
|
||||
arg_parser.add_argument(
|
||||
'--mojoms',
|
||||
nargs='+',
|
||||
dest='mojom_files',
|
||||
default=[],
|
||||
metavar='MOJOM_FILE',
|
||||
help='Input mojom filename(s). Each filename must be either an absolute '
|
||||
'path which falls within one of the given input or output roots, or a '
|
||||
'relative path the parser will attempt to resolve using each of those '
|
||||
'roots in unspecified order.')
|
||||
arg_parser.add_argument(
|
||||
'--mojom-file-list',
|
||||
action='store',
|
||||
metavar='LIST_FILENAME',
|
||||
help='Input file whose contents are a list of mojoms to process. This '
|
||||
'may be provided in lieu of --mojoms to avoid hitting command line '
|
||||
'length limtations')
|
||||
arg_parser.add_argument(
|
||||
'--enable-feature',
|
||||
dest='enabled_features',
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='FEATURE',
|
||||
help='Enables a named feature when parsing the given mojoms. Features '
|
||||
'are identified by arbitrary string values. Specifying this flag with a '
|
||||
'given FEATURE name will cause the parser to process any syntax elements '
|
||||
'tagged with an [EnableIf=FEATURE] or [EnableIfNot] attribute. If this '
|
||||
'flag is not provided for a given FEATURE, such tagged elements are '
|
||||
'discarded by the parser and will not be present in the compiled output.')
|
||||
arg_parser.add_argument(
|
||||
'--check-imports',
|
||||
dest='build_metadata_filename',
|
||||
action='store',
|
||||
metavar='METADATA_FILENAME',
|
||||
help='Instructs the parser to check imports against a set of allowed '
|
||||
'imports. Allowed imports are based on build metadata within '
|
||||
'METADATA_FILENAME. This is a JSON file with a `sources` key listing '
|
||||
'paths to the set of input mojom files being processed by this parser '
|
||||
'run, and a `deps` key listing paths to metadata files for any '
|
||||
'dependencies of these inputs. This feature can be used to implement '
|
||||
'build-time dependency checking for mojom imports, where each build '
|
||||
'metadata file corresponds to a build target in the dependency graph of '
|
||||
'a typical build system.')
|
||||
arg_parser.add_argument(
|
||||
'--add-module-metadata',
|
||||
dest='module_metadata',
|
||||
default=[],
|
||||
action='append',
|
||||
metavar='KEY=VALUE',
|
||||
help='Adds a metadata key-value pair to the output module. This can be '
|
||||
'used by build toolchains to augment parsed mojom modules with product-'
|
||||
'specific metadata for later extraction and use by custom bindings '
|
||||
'generators.')
|
||||
|
||||
args, _ = arg_parser.parse_known_args(command_line)
|
||||
if args.mojom_file_list:
|
||||
with open(args.mojom_file_list) as f:
|
||||
args.mojom_files.extend(f.read().split())
|
||||
|
||||
if not args.mojom_files:
|
||||
raise ValueError(
|
||||
'Must list at least one mojom file via --mojoms or --mojom-file-list')
|
||||
|
||||
mojom_files = list(map(os.path.abspath, args.mojom_files))
|
||||
input_roots = list(map(os.path.abspath, args.input_root_paths))
|
||||
output_root = os.path.abspath(args.output_root_path)
|
||||
module_roots = list(map(os.path.abspath, args.module_root_paths))
|
||||
|
||||
if args.build_metadata_filename:
|
||||
allowed_imports = _CollectAllowedImportsFromBuildMetadata(
|
||||
args.build_metadata_filename)
|
||||
else:
|
||||
allowed_imports = None
|
||||
|
||||
module_metadata = list(
|
||||
map(lambda kvp: tuple(kvp.split('=')), args.module_metadata))
|
||||
_ParseMojoms(mojom_files, input_roots, output_root, module_roots,
|
||||
args.enabled_features, module_metadata, allowed_imports)
|
||||
logging.info('Finished')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
Run(sys.argv[1:])
|
||||
# Exit without running GC, which can save multiple seconds due to the large
|
||||
# number of object created. But flush is necessary as os._exit doesn't do
|
||||
# that.
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
os._exit(0)
|
|
@ -0,0 +1,73 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
import mojom_parser
|
||||
|
||||
from mojom.generate import module
|
||||
|
||||
|
||||
class MojomParserTestCase(unittest.TestCase):
|
||||
"""Tests covering the behavior defined by the main mojom_parser.py script.
|
||||
This includes behavior around input and output path manipulation, dependency
|
||||
resolution, and module serialization and deserialization."""
|
||||
|
||||
def __init__(self, method_name):
|
||||
super().__init__(method_name)
|
||||
self._temp_dir = None
|
||||
|
||||
def setUp(self):
|
||||
self._temp_dir = tempfile.mkdtemp()
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self._temp_dir)
|
||||
self._temp_dir = None
|
||||
|
||||
def GetPath(self, path):
|
||||
assert not os.path.isabs(path)
|
||||
return os.path.join(self._temp_dir, path)
|
||||
|
||||
def GetModulePath(self, path):
|
||||
assert not os.path.isabs(path)
|
||||
return os.path.join(self.GetPath('out'), path) + '-module'
|
||||
|
||||
def WriteFile(self, path, contents):
|
||||
full_path = self.GetPath(path)
|
||||
dirname = os.path.dirname(full_path)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
with open(full_path, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
def LoadModule(self, mojom_path):
|
||||
with open(self.GetModulePath(mojom_path), 'rb') as f:
|
||||
return module.Module.Load(f)
|
||||
|
||||
def ParseMojoms(self, mojoms, metadata=None):
|
||||
"""Parse all input mojoms relative the temp dir."""
|
||||
out_dir = self.GetPath('out')
|
||||
args = [
|
||||
'--input-root', self._temp_dir, '--input-root', out_dir,
|
||||
'--output-root', out_dir, '--mojoms'
|
||||
] + list(map(lambda mojom: os.path.join(self._temp_dir, mojom), mojoms))
|
||||
if metadata:
|
||||
args.extend(['--check-imports', self.GetPath(metadata)])
|
||||
mojom_parser.Run(args)
|
||||
|
||||
def ExtractTypes(self, mojom):
|
||||
filename = 'test.mojom'
|
||||
self.WriteFile(filename, mojom)
|
||||
self.ParseMojoms([filename])
|
||||
m = self.LoadModule(filename)
|
||||
definitions = {}
|
||||
for kinds in (m.enums, m.structs, m.unions, m.interfaces, m.features):
|
||||
for kind in kinds:
|
||||
definitions[kind.mojom_name] = kind
|
||||
return definitions
|
|
@ -0,0 +1,186 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import json
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class MojomParserTest(MojomParserTestCase):
|
||||
"""Tests covering the behavior defined by the main mojom_parser.py script.
|
||||
This includes behavior around input and output path manipulation, dependency
|
||||
resolution, and module serialization and deserialization."""
|
||||
|
||||
def testBasicParse(self):
|
||||
"""Basic test to verify that we can parse a mojom file and get a module."""
|
||||
mojom = 'foo/bar.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
module test;
|
||||
enum TestEnum { kFoo };
|
||||
""")
|
||||
self.ParseMojoms([mojom])
|
||||
|
||||
m = self.LoadModule(mojom)
|
||||
self.assertEqual('foo/bar.mojom', m.path)
|
||||
self.assertEqual('test', m.mojom_namespace)
|
||||
self.assertEqual(1, len(m.enums))
|
||||
|
||||
def testBasicParseWithAbsolutePaths(self):
|
||||
"""Verifies that we can parse a mojom file given an absolute path input."""
|
||||
mojom = 'foo/bar.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
module test;
|
||||
enum TestEnum { kFoo };
|
||||
""")
|
||||
self.ParseMojoms([self.GetPath(mojom)])
|
||||
|
||||
m = self.LoadModule(mojom)
|
||||
self.assertEqual('foo/bar.mojom', m.path)
|
||||
self.assertEqual('test', m.mojom_namespace)
|
||||
self.assertEqual(1, len(m.enums))
|
||||
|
||||
def testImport(self):
|
||||
"""Verify imports within the same set of mojom inputs."""
|
||||
a = 'a.mojom'
|
||||
b = 'b.mojom'
|
||||
self.WriteFile(
|
||||
a, """\
|
||||
module a;
|
||||
import "b.mojom";
|
||||
struct Foo { b.Bar bar; };""")
|
||||
self.WriteFile(b, """\
|
||||
module b;
|
||||
struct Bar {};""")
|
||||
self.ParseMojoms([a, b])
|
||||
|
||||
ma = self.LoadModule(a)
|
||||
mb = self.LoadModule(b)
|
||||
self.assertEqual('a.mojom', ma.path)
|
||||
self.assertEqual('b.mojom', mb.path)
|
||||
self.assertEqual(1, len(ma.imports))
|
||||
self.assertEqual(mb, ma.imports[0])
|
||||
|
||||
def testPreProcessedImport(self):
|
||||
"""Verify imports processed by a previous parser execution can be loaded
|
||||
properly when parsing a dependent mojom."""
|
||||
a = 'a.mojom'
|
||||
self.WriteFile(a, """\
|
||||
module a;
|
||||
struct Bar {};""")
|
||||
self.ParseMojoms([a])
|
||||
|
||||
b = 'b.mojom'
|
||||
self.WriteFile(
|
||||
b, """\
|
||||
module b;
|
||||
import "a.mojom";
|
||||
struct Foo { a.Bar bar; };""")
|
||||
self.ParseMojoms([b])
|
||||
|
||||
def testMissingImport(self):
|
||||
"""Verify that an import fails if the imported mojom does not exist."""
|
||||
a = 'a.mojom'
|
||||
self.WriteFile(
|
||||
a, """\
|
||||
module a;
|
||||
import "non-existent.mojom";
|
||||
struct Bar {};""")
|
||||
with self.assertRaisesRegexp(ValueError, "does not exist"):
|
||||
self.ParseMojoms([a])
|
||||
|
||||
def testUnparsedImport(self):
|
||||
"""Verify that an import fails if the imported mojom is not in the set of
|
||||
mojoms provided to the parser on this execution AND there is no pre-existing
|
||||
parsed output module already on disk for it."""
|
||||
a = 'a.mojom'
|
||||
b = 'b.mojom'
|
||||
self.WriteFile(a, """\
|
||||
module a;
|
||||
struct Bar {};""")
|
||||
self.WriteFile(
|
||||
b, """\
|
||||
module b;
|
||||
import "a.mojom";
|
||||
struct Foo { a.Bar bar; };""")
|
||||
|
||||
# a.mojom has not been parsed yet, so its import will fail when processing
|
||||
# b.mojom here.
|
||||
with self.assertRaisesRegexp(ValueError, "does not exist"):
|
||||
self.ParseMojoms([b])
|
||||
|
||||
def testCheckImportsBasic(self):
|
||||
"""Verify that the parser can handle --check-imports with a valid set of
|
||||
inputs, including support for transitive dependency resolution."""
|
||||
a = 'a.mojom'
|
||||
a_metadata = 'out/a.build_metadata'
|
||||
b = 'b.mojom'
|
||||
b_metadata = 'out/b.build_metadata'
|
||||
c = 'c.mojom'
|
||||
c_metadata = 'out/c.build_metadata'
|
||||
self.WriteFile(a_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(a)],
|
||||
"deps": []
|
||||
}))
|
||||
self.WriteFile(
|
||||
b_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(b)],
|
||||
"deps": [self.GetPath(a_metadata)]
|
||||
}))
|
||||
self.WriteFile(
|
||||
c_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(c)],
|
||||
"deps": [self.GetPath(b_metadata)]
|
||||
}))
|
||||
self.WriteFile(a, """\
|
||||
module a;
|
||||
struct Bar {};""")
|
||||
self.WriteFile(
|
||||
b, """\
|
||||
module b;
|
||||
import "a.mojom";
|
||||
struct Foo { a.Bar bar; };""")
|
||||
self.WriteFile(
|
||||
c, """\
|
||||
module c;
|
||||
import "a.mojom";
|
||||
import "b.mojom";
|
||||
struct Baz { b.Foo foo; };""")
|
||||
self.ParseMojoms([a], metadata=a_metadata)
|
||||
self.ParseMojoms([b], metadata=b_metadata)
|
||||
self.ParseMojoms([c], metadata=c_metadata)
|
||||
|
||||
def testCheckImportsMissing(self):
|
||||
"""Verify that the parser rejects valid input mojoms when imports don't
|
||||
agree with build metadata given via --check-imports."""
|
||||
a = 'a.mojom'
|
||||
a_metadata = 'out/a.build_metadata'
|
||||
b = 'b.mojom'
|
||||
b_metadata = 'out/b.build_metadata'
|
||||
self.WriteFile(a_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(a)],
|
||||
"deps": []
|
||||
}))
|
||||
self.WriteFile(b_metadata,
|
||||
json.dumps({
|
||||
"sources": [self.GetPath(b)],
|
||||
"deps": []
|
||||
}))
|
||||
self.WriteFile(a, """\
|
||||
module a;
|
||||
struct Bar {};""")
|
||||
self.WriteFile(
|
||||
b, """\
|
||||
module b;
|
||||
import "a.mojom";
|
||||
struct Foo { a.Bar bar; };""")
|
||||
|
||||
self.ParseMojoms([a], metadata=a_metadata)
|
||||
with self.assertRaisesRegexp(ValueError, "not allowed by build"):
|
||||
self.ParseMojoms([b], metadata=b_metadata)
|
|
@ -0,0 +1,127 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
from mojom.generate import module
|
||||
|
||||
|
||||
class StableAttributeTest(MojomParserTestCase):
|
||||
"""Tests covering usage of the [Stable] attribute."""
|
||||
|
||||
def testStableAttributeTagging(self):
|
||||
"""Verify that we recognize the [Stable] attribute on relevant definitions
|
||||
and the resulting parser outputs are tagged accordingly."""
|
||||
mojom = 'test.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
[Stable] enum TestEnum { kFoo };
|
||||
enum UnstableEnum { kBar };
|
||||
[Stable] struct TestStruct { TestEnum a; };
|
||||
struct UnstableStruct { UnstableEnum a; };
|
||||
[Stable] union TestUnion { TestEnum a; TestStruct b; };
|
||||
union UnstableUnion { UnstableEnum a; UnstableStruct b; };
|
||||
[Stable] interface TestInterface { Foo@0(TestUnion x) => (); };
|
||||
interface UnstableInterface { Foo(UnstableUnion x) => (); };
|
||||
""")
|
||||
self.ParseMojoms([mojom])
|
||||
|
||||
m = self.LoadModule(mojom)
|
||||
self.assertEqual(2, len(m.enums))
|
||||
self.assertTrue(m.enums[0].stable)
|
||||
self.assertFalse(m.enums[1].stable)
|
||||
self.assertEqual(2, len(m.structs))
|
||||
self.assertTrue(m.structs[0].stable)
|
||||
self.assertFalse(m.structs[1].stable)
|
||||
self.assertEqual(2, len(m.unions))
|
||||
self.assertTrue(m.unions[0].stable)
|
||||
self.assertFalse(m.unions[1].stable)
|
||||
self.assertEqual(2, len(m.interfaces))
|
||||
self.assertTrue(m.interfaces[0].stable)
|
||||
self.assertFalse(m.interfaces[1].stable)
|
||||
|
||||
def testStableStruct(self):
|
||||
"""A [Stable] struct is valid if all its fields are also stable."""
|
||||
self.ExtractTypes('[Stable] struct S {};')
|
||||
self.ExtractTypes('[Stable] struct S { int32 x; bool b; };')
|
||||
self.ExtractTypes('[Stable] enum E { A }; [Stable] struct S { E e; };')
|
||||
self.ExtractTypes('[Stable] struct S {}; [Stable] struct T { S s; };')
|
||||
self.ExtractTypes(
|
||||
'[Stable] struct S {}; [Stable] struct T { array<S> ss; };')
|
||||
self.ExtractTypes(
|
||||
'[Stable] interface F {}; [Stable] struct T { pending_remote<F> f; };')
|
||||
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||
self.ExtractTypes('enum E { A }; [Stable] struct S { E e; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on X'):
|
||||
self.ExtractTypes('struct X {}; [Stable] struct S { X x; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] struct S { array<T> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] struct S { map<int32, T> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] struct S { map<T, int32> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||
self.ExtractTypes(
|
||||
'interface F {}; [Stable] struct S { pending_remote<F> f; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||
self.ExtractTypes(
|
||||
'interface F {}; [Stable] struct S { pending_receiver<F> f; };')
|
||||
|
||||
def testStableUnion(self):
|
||||
"""A [Stable] union is valid if all its fields' types are also stable."""
|
||||
self.ExtractTypes('[Stable] union U {};')
|
||||
self.ExtractTypes('[Stable] union U { int32 x; bool b; };')
|
||||
self.ExtractTypes('[Stable] enum E { A }; [Stable] union U { E e; };')
|
||||
self.ExtractTypes('[Stable] struct S {}; [Stable] union U { S s; };')
|
||||
self.ExtractTypes(
|
||||
'[Stable] struct S {}; [Stable] union U { array<S> ss; };')
|
||||
self.ExtractTypes(
|
||||
'[Stable] interface F {}; [Stable] union U { pending_remote<F> f; };')
|
||||
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||
self.ExtractTypes('enum E { A }; [Stable] union U { E e; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on X'):
|
||||
self.ExtractTypes('struct X {}; [Stable] union U { X x; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] union U { array<T> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] union U { map<int32, T> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||
self.ExtractTypes('struct T {}; [Stable] union U { map<T, int32> xs; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||
self.ExtractTypes(
|
||||
'interface F {}; [Stable] union U { pending_remote<F> f; };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||
self.ExtractTypes(
|
||||
'interface F {}; [Stable] union U { pending_receiver<F> f; };')
|
||||
|
||||
def testStableInterface(self):
|
||||
"""A [Stable] interface is valid if all its methods' parameter types are
|
||||
stable, including response parameters where applicable."""
|
||||
self.ExtractTypes('[Stable] interface F {};')
|
||||
self.ExtractTypes('[Stable] interface F { A@0(int32 x); };')
|
||||
self.ExtractTypes('[Stable] interface F { A@0(int32 x) => (bool b); };')
|
||||
self.ExtractTypes("""\
|
||||
[Stable] enum E { A, B, C };
|
||||
[Stable] struct S {};
|
||||
[Stable] interface F { A@0(E e, S s) => (bool b, array<S> s); };
|
||||
""")
|
||||
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||
self.ExtractTypes(
|
||||
'enum E { A, B, C }; [Stable] interface F { A@0(E e); };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||
self.ExtractTypes(
|
||||
'enum E { A, B, C }; [Stable] interface F { A@0(int32 x) => (E e); };'
|
||||
)
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on S'):
|
||||
self.ExtractTypes(
|
||||
'struct S {}; [Stable] interface F { A@0(int32 x) => (S s); };')
|
||||
with self.assertRaisesRegexp(Exception, 'because it depends on S'):
|
||||
self.ExtractTypes(
|
||||
'struct S {}; [Stable] interface F { A@0(S s) => (bool b); };')
|
||||
|
||||
with self.assertRaisesRegexp(Exception, 'explicit method ordinals'):
|
||||
self.ExtractTypes('[Stable] interface F { A() => (); };')
|
44
utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py
Normal file
44
utils/codegen/ipc/mojo/public/tools/mojom/union_unittest.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# Copyright 2022 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class UnionTest(MojomParserTestCase):
|
||||
"""Tests union parsing behavior."""
|
||||
|
||||
def testExtensibleMustHaveDefault(self):
|
||||
"""Verifies that extensible unions must have a default field."""
|
||||
mojom = 'foo.mojom'
|
||||
self.WriteFile(mojom, 'module foo; [Extensible] union U { bool x; };')
|
||||
with self.assertRaisesRegexp(Exception, 'must specify a \[Default\]'):
|
||||
self.ParseMojoms([mojom])
|
||||
|
||||
def testExtensibleSingleDefault(self):
|
||||
"""Verifies that extensible unions must not have multiple default fields."""
|
||||
mojom = 'foo.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
module foo;
|
||||
[Extensible] union U {
|
||||
[Default] bool x;
|
||||
[Default] bool y;
|
||||
};
|
||||
""")
|
||||
with self.assertRaisesRegexp(Exception, 'Multiple \[Default\] fields'):
|
||||
self.ParseMojoms([mojom])
|
||||
|
||||
def testExtensibleDefaultTypeValid(self):
|
||||
"""Verifies that an extensible union's default field must be nullable or
|
||||
integral type."""
|
||||
mojom = 'foo.mojom'
|
||||
self.WriteFile(
|
||||
mojom, """\
|
||||
module foo;
|
||||
[Extensible] union U {
|
||||
[Default] handle<message_pipe> p;
|
||||
};
|
||||
""")
|
||||
with self.assertRaisesRegexp(Exception, 'must be nullable or integral'):
|
||||
self.ParseMojoms([mojom])
|
|
@ -0,0 +1,458 @@
|
|||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from mojom.generate import module
|
||||
from mojom_parser_test_case import MojomParserTestCase
|
||||
|
||||
|
||||
class VersionCompatibilityTest(MojomParserTestCase):
|
||||
"""Tests covering compatibility between two versions of the same mojom type
|
||||
definition. This coverage ensures that we can reliably detect unsafe changes
|
||||
to definitions that are expected to tolerate version skew in production
|
||||
environments."""
|
||||
|
||||
def _GetTypeCompatibilityMap(self, old_mojom, new_mojom):
|
||||
"""Helper to support the implementation of assertBackwardCompatible and
|
||||
assertNotBackwardCompatible."""
|
||||
|
||||
old = self.ExtractTypes(old_mojom)
|
||||
new = self.ExtractTypes(new_mojom)
|
||||
self.assertEqual(set(old.keys()), set(new.keys()),
|
||||
'Old and new test mojoms should use the same type names.')
|
||||
|
||||
checker = module.BackwardCompatibilityChecker()
|
||||
compatibility_map = {}
|
||||
for name in old:
|
||||
try:
|
||||
compatibility_map[name] = checker.IsBackwardCompatible(
|
||||
new[name], old[name])
|
||||
except Exception:
|
||||
compatibility_map[name] = False
|
||||
return compatibility_map
|
||||
|
||||
def assertBackwardCompatible(self, old_mojom, new_mojom):
|
||||
compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
|
||||
for name, compatible in compatibility_map.items():
|
||||
if not compatible:
|
||||
raise AssertionError(
|
||||
'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n'
|
||||
'The new definition of %s should pass a backward-compatibiity '
|
||||
'check, but it does not.' % (old_mojom, new_mojom, name))
|
||||
|
||||
def assertNotBackwardCompatible(self, old_mojom, new_mojom):
|
||||
compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
|
||||
if all(compatibility_map.values()):
|
||||
raise AssertionError(
|
||||
'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n'
|
||||
'The new mojom should fail a backward-compatibility check, but it '
|
||||
'does not.' % (old_mojom, new_mojom))
|
||||
|
||||
def testNewNonExtensibleEnumValue(self):
|
||||
"""Adding a value to a non-extensible enum breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
|
||||
'enum E { kFoo, kBar, kBaz };')
|
||||
|
||||
def testNewNonExtensibleEnumValueWithMinVersion(self):
|
||||
"""Adding a value to a non-extensible enum breaks backward-compatibility,
|
||||
even with a new [MinVersion] specified for the value."""
|
||||
self.assertNotBackwardCompatible(
|
||||
'enum E { kFoo, kBar };', 'enum E { kFoo, kBar, [MinVersion=1] kBaz };')
|
||||
|
||||
def testNewValueInExistingVersion(self):
|
||||
"""Adding a value to an existing version is not allowed, even if the old
|
||||
enum was marked [Extensible]. Note that it is irrelevant whether or not the
|
||||
new enum is marked [Extensible]."""
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kFoo, kBar };',
|
||||
'enum E { kFoo, kBar, kBaz };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kFoo, kBar };',
|
||||
'[Extensible] enum E { [Default] kFoo, kBar, kBaz };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kFoo, [MinVersion=1] kBar };',
|
||||
'enum E { kFoo, [MinVersion=1] kBar, [MinVersion=1] kBaz };')
|
||||
|
||||
def testEnumValueRemoval(self):
|
||||
"""Removal of an enum value is never valid even for [Extensible] enums."""
|
||||
self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
|
||||
'enum E { kFoo };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kFoo, kBar };',
|
||||
'[Extensible] enum E { [Default] kFoo };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
|
||||
'[Extensible] enum E { [Default] kA, };')
|
||||
self.assertNotBackwardCompatible(
|
||||
"""[Extensible] enum E {
|
||||
[Default] kA,
|
||||
[MinVersion=1] kB,
|
||||
[MinVersion=1] kZ };""",
|
||||
'[Extensible] enum E { [Default] kA, [MinVersion=1] kB };')
|
||||
|
||||
def testNewExtensibleEnumValueWithMinVersion(self):
|
||||
"""Adding a new and properly [MinVersion]'d value to an [Extensible] enum
|
||||
is a backward-compatible change. Note that it is irrelevant whether or not
|
||||
the new enum is marked [Extensible]."""
|
||||
self.assertBackwardCompatible('[Extensible] enum E { [Default] kA, kB };',
|
||||
'enum E { kA, kB, [MinVersion=1] kC };')
|
||||
self.assertBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA, kB };',
|
||||
'[Extensible] enum E { [Default] kA, kB, [MinVersion=1] kC };')
|
||||
self.assertBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA, [MinVersion=1] kB };',
|
||||
"""[Extensible] enum E {
|
||||
[Default] kA,
|
||||
[MinVersion=1] kB,
|
||||
[MinVersion=2] kC };""")
|
||||
|
||||
def testRenameEnumValue(self):
|
||||
"""Renaming an enum value does not affect backward-compatibility. Only
|
||||
numeric value is relevant."""
|
||||
self.assertBackwardCompatible('enum E { kA, kB };', 'enum E { kX, kY };')
|
||||
|
||||
def testAddEnumValueAlias(self):
|
||||
"""Adding new enum fields does not affect backward-compatibility if it does
|
||||
not introduce any new numeric values."""
|
||||
self.assertBackwardCompatible(
|
||||
'enum E { kA, kB };', 'enum E { kA, kB, kC = kA, kD = 1, kE = kD };')
|
||||
|
||||
def testEnumIdentity(self):
|
||||
"""An unchanged enum is obviously backward-compatible."""
|
||||
self.assertBackwardCompatible('enum E { kA, kB, kC };',
|
||||
'enum E { kA, kB, kC };')
|
||||
|
||||
def testNewStructFieldUnversioned(self):
|
||||
"""Adding a new field to a struct without a new (i.e. higher than any
|
||||
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||
'struct S { string a; string b; };')
|
||||
|
||||
def testStructFieldRemoval(self):
|
||||
"""Removing a field from a struct breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; string b; };',
|
||||
'struct S { string a; };')
|
||||
|
||||
def testStructFieldTypeChange(self):
|
||||
"""Changing the type of an existing field always breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||
'struct S { array<int32> a; };')
|
||||
|
||||
def testStructFieldBecomingOptional(self):
|
||||
"""Changing a field from non-optional to optional breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||
'struct S { string? a; };')
|
||||
|
||||
def testStructFieldBecomingNonOptional(self):
|
||||
"""Changing a field from optional to non-optional breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string? a; };',
|
||||
'struct S { string a; };')
|
||||
|
||||
def testStructFieldOrderChange(self):
|
||||
"""Changing the order of fields breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('struct S { string a; bool b; };',
|
||||
'struct S { bool b; string a; };')
|
||||
self.assertNotBackwardCompatible('struct S { string a@0; bool b@1; };',
|
||||
'struct S { string a@1; bool b@0; };')
|
||||
|
||||
def testStructFieldMinVersionChange(self):
|
||||
"""Changing the MinVersion of a field breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible(
|
||||
'struct S { string a; [MinVersion=1] string? b; };',
|
||||
'struct S { string a; [MinVersion=2] string? b; };')
|
||||
|
||||
def testStructFieldTypeChange(self):
|
||||
"""If a struct field's own type definition changes, the containing struct
|
||||
is backward-compatible if and only if the field type's change is
|
||||
backward-compatible."""
|
||||
self.assertBackwardCompatible(
|
||||
'struct S {}; struct T { S s; };',
|
||||
'struct S { [MinVersion=1] int32 x; }; struct T { S s; };')
|
||||
self.assertBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA }; struct S { E e; };',
|
||||
"""[Extensible] enum E {
|
||||
[Default] kA,
|
||||
[MinVersion=1] kB };
|
||||
struct S { E e; };""")
|
||||
self.assertNotBackwardCompatible(
|
||||
'struct S {}; struct T { S s; };',
|
||||
'struct S { int32 x; }; struct T { S s; };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA }; struct S { E e; };',
|
||||
'[Extensible] enum E { [Default] kA, kB }; struct S { E e; };')
|
||||
|
||||
def testNewStructFieldWithInvalidMinVersion(self):
|
||||
"""Adding a new field using an existing MinVersion breaks backward-
|
||||
compatibility."""
|
||||
self.assertNotBackwardCompatible(
|
||||
"""\
|
||||
struct S {
|
||||
string a;
|
||||
[MinVersion=1] string? b;
|
||||
};
|
||||
""", """\
|
||||
struct S {
|
||||
string a;
|
||||
[MinVersion=1] string? b;
|
||||
[MinVersion=1] string? c;
|
||||
};""")
|
||||
|
||||
def testNewStructFieldWithValidMinVersion(self):
|
||||
"""Adding a new field is safe if tagged with a MinVersion greater than any
|
||||
previously used MinVersion in the struct."""
|
||||
self.assertBackwardCompatible(
|
||||
'struct S { int32 a; };',
|
||||
'struct S { int32 a; [MinVersion=1] int32 b; };')
|
||||
self.assertBackwardCompatible(
|
||||
'struct S { int32 a; [MinVersion=1] int32 b; };',
|
||||
'struct S { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
|
||||
|
||||
def testNewStructFieldNullableReference(self):
|
||||
"""Adding a new nullable reference-typed field is fine if versioned
|
||||
properly."""
|
||||
self.assertBackwardCompatible(
|
||||
'struct S { int32 a; };',
|
||||
'struct S { int32 a; [MinVersion=1] string? b; };')
|
||||
|
||||
def testStructFieldRename(self):
|
||||
"""Renaming a field has no effect on backward-compatibility."""
|
||||
self.assertBackwardCompatible('struct S { int32 x; bool b; };',
|
||||
'struct S { int32 a; bool b; };')
|
||||
|
||||
def testStructFieldReorderWithExplicitOrdinals(self):
|
||||
"""Reordering fields has no effect on backward-compatibility when field
|
||||
ordinals are explicitly labeled and remain unchanged."""
|
||||
self.assertBackwardCompatible('struct S { bool b@1; int32 a@0; };',
|
||||
'struct S { int32 a@0; bool b@1; };')
|
||||
|
||||
def testNewUnionFieldUnversioned(self):
|
||||
"""Adding a new field to a union without a new (i.e. higher than any
|
||||
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; };',
|
||||
'union U { string a; string b; };')
|
||||
|
||||
def testUnionFieldRemoval(self):
|
||||
"""Removing a field from a union breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; string b; };',
|
||||
'union U { string a; };')
|
||||
|
||||
def testUnionFieldTypeChange(self):
|
||||
"""Changing the type of an existing field always breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; };',
|
||||
'union U { array<int32> a; };')
|
||||
|
||||
def testUnionFieldBecomingOptional(self):
|
||||
"""Changing a field from non-optional to optional breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; };',
|
||||
'union U { string? a; };')
|
||||
|
||||
def testFieldNestedTypeChanged(self):
|
||||
"""Changing the definition of a nested type within a field (such as an array
|
||||
element or interface endpoint type) should only break backward-compatibility
|
||||
if the changes to that type are not backward-compatible."""
|
||||
self.assertBackwardCompatible(
|
||||
"""\
|
||||
struct S { string a; };
|
||||
struct T { array<S> ss; };
|
||||
""", """\
|
||||
struct S {
|
||||
string a;
|
||||
[MinVersion=1] string? b;
|
||||
};
|
||||
struct T { array<S> ss; };
|
||||
""")
|
||||
self.assertBackwardCompatible(
|
||||
"""\
|
||||
interface F { Do(); };
|
||||
struct S { pending_receiver<F> r; };
|
||||
""", """\
|
||||
interface F {
|
||||
Do();
|
||||
[MinVersion=1] Say();
|
||||
};
|
||||
struct S { pending_receiver<F> r; };
|
||||
""")
|
||||
|
||||
def testRecursiveTypeChange(self):
|
||||
"""Recursive types do not break the compatibility checker."""
|
||||
self.assertBackwardCompatible(
|
||||
"""\
|
||||
struct S {
|
||||
string a;
|
||||
array<S> others;
|
||||
};""", """\
|
||||
struct S {
|
||||
string a;
|
||||
array<S> others;
|
||||
[MinVersion=1] string? b;
|
||||
};""")
|
||||
|
||||
def testUnionFieldBecomingNonOptional(self):
|
||||
"""Changing a field from optional to non-optional breaks
|
||||
backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string? a; };',
|
||||
'union U { string a; };')
|
||||
|
||||
def testUnionFieldOrderChange(self):
|
||||
"""Changing the order of fields breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('union U { string a; bool b; };',
|
||||
'union U { bool b; string a; };')
|
||||
self.assertNotBackwardCompatible('union U { string a@0; bool b@1; };',
|
||||
'union U { string a@1; bool b@0; };')
|
||||
|
||||
def testUnionFieldMinVersionChange(self):
|
||||
"""Changing the MinVersion of a field breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible(
|
||||
'union U { string a; [MinVersion=1] string b; };',
|
||||
'union U { string a; [MinVersion=2] string b; };')
|
||||
|
||||
def testUnionFieldTypeChange(self):
|
||||
"""If a union field's own type definition changes, the containing union
|
||||
is backward-compatible if and only if the field type's change is
|
||||
backward-compatible."""
|
||||
self.assertBackwardCompatible(
|
||||
'struct S {}; union U { S s; };',
|
||||
'struct S { [MinVersion=1] int32 x; }; union U { S s; };')
|
||||
self.assertBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA }; union U { E e; };',
|
||||
"""[Extensible] enum E {
|
||||
[Default] kA,
|
||||
[MinVersion=1] kB };
|
||||
union U { E e; };""")
|
||||
self.assertNotBackwardCompatible(
|
||||
'struct S {}; union U { S s; };',
|
||||
'struct S { int32 x; }; union U { S s; };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'[Extensible] enum E { [Default] kA }; union U { E e; };',
|
||||
'[Extensible] enum E { [Default] kA, kB }; union U { E e; };')
|
||||
|
||||
def testNewUnionFieldWithInvalidMinVersion(self):
|
||||
"""Adding a new field using an existing MinVersion breaks backward-
|
||||
compatibility."""
|
||||
self.assertNotBackwardCompatible(
|
||||
"""\
|
||||
union U {
|
||||
string a;
|
||||
[MinVersion=1] string b;
|
||||
};
|
||||
""", """\
|
||||
union U {
|
||||
string a;
|
||||
[MinVersion=1] string b;
|
||||
[MinVersion=1] string c;
|
||||
};""")
|
||||
|
||||
def testNewUnionFieldWithValidMinVersion(self):
|
||||
"""Adding a new field is safe if tagged with a MinVersion greater than any
|
||||
previously used MinVersion in the union."""
|
||||
self.assertBackwardCompatible(
|
||||
'union U { int32 a; };',
|
||||
'union U { int32 a; [MinVersion=1] int32 b; };')
|
||||
self.assertBackwardCompatible(
|
||||
'union U { int32 a; [MinVersion=1] int32 b; };',
|
||||
'union U { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
|
||||
|
||||
def testUnionFieldRename(self):
|
||||
"""Renaming a field has no effect on backward-compatibility."""
|
||||
self.assertBackwardCompatible('union U { int32 x; bool b; };',
|
||||
'union U { int32 a; bool b; };')
|
||||
|
||||
def testUnionFieldReorderWithExplicitOrdinals(self):
|
||||
"""Reordering fields has no effect on backward-compatibility when field
|
||||
ordinals are explicitly labeled and remain unchanged."""
|
||||
self.assertBackwardCompatible('union U { bool b@1; int32 a@0; };',
|
||||
'union U { int32 a@0; bool b@1; };')
|
||||
|
||||
def testNewInterfaceMethodUnversioned(self):
|
||||
"""Adding a new method to an interface without a new (i.e. higher than any
|
||||
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||
'interface F { A(); B(); };')
|
||||
|
||||
def testInterfaceMethodRemoval(self):
|
||||
"""Removing a method from an interface breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('interface F { A(); B(); };',
|
||||
'interface F { A(); };')
|
||||
|
||||
def testInterfaceMethodParamsChanged(self):
|
||||
"""Changes to the parameter list are only backward-compatible if they meet
|
||||
backward-compatibility requirements of an equivalent struct definition."""
|
||||
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||
'interface F { A(int32 x); };')
|
||||
self.assertNotBackwardCompatible('interface F { A(int32 x); };',
|
||||
'interface F { A(bool x); };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'interface F { A(int32 x, [MinVersion=1] string? s); };', """\
|
||||
interface F {
|
||||
A(int32 x, [MinVersion=1] string? s, [MinVersion=1] int32 y);
|
||||
};""")
|
||||
|
||||
self.assertBackwardCompatible('interface F { A(int32 x); };',
|
||||
'interface F { A(int32 a); };')
|
||||
self.assertBackwardCompatible(
|
||||
'interface F { A(int32 x); };',
|
||||
'interface F { A(int32 x, [MinVersion=1] string? s); };')
|
||||
|
||||
self.assertBackwardCompatible(
|
||||
'struct S {}; interface F { A(S s); };',
|
||||
'struct S { [MinVersion=1] int32 x; }; interface F { A(S s); };')
|
||||
self.assertBackwardCompatible(
|
||||
'struct S {}; struct T {}; interface F { A(S s); };',
|
||||
'struct S {}; struct T {}; interface F { A(T s); };')
|
||||
self.assertNotBackwardCompatible(
|
||||
'struct S {}; struct T { int32 x; }; interface F { A(S s); };',
|
||||
'struct S {}; struct T { int32 x; }; interface F { A(T t); };')
|
||||
|
||||
def testInterfaceMethodReplyAdded(self):
|
||||
"""Adding a reply to a message breaks backward-compatibilty."""
|
||||
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||
'interface F { A() => (); };')
|
||||
|
||||
def testInterfaceMethodReplyRemoved(self):
|
||||
"""Removing a reply from a message breaks backward-compatibility."""
|
||||
self.assertNotBackwardCompatible('interface F { A() => (); };',
|
||||
'interface F { A(); };')
|
||||
|
||||
def testInterfaceMethodReplyParamsChanged(self):
|
||||
"""Similar to request parameters, a change to reply parameters is considered
|
||||
backward-compatible if it meets the same backward-compatibility
|
||||
requirements imposed on equivalent struct changes."""
|
||||
self.assertNotBackwardCompatible('interface F { A() => (); };',
|
||||
'interface F { A() => (int32 x); };')
|
||||
self.assertNotBackwardCompatible('interface F { A() => (int32 x); };',
|
||||
'interface F { A() => (); };')
|
||||
self.assertNotBackwardCompatible('interface F { A() => (bool x); };',
|
||||
'interface F { A() => (int32 x); };')
|
||||
|
||||
self.assertBackwardCompatible('interface F { A() => (int32 a); };',
|
||||
'interface F { A() => (int32 x); };')
|
||||
self.assertBackwardCompatible(
|
||||
'interface F { A() => (int32 x); };',
|
||||
'interface F { A() => (int32 x, [MinVersion] string? s); };')
|
||||
|
||||
def testNewInterfaceMethodWithInvalidMinVersion(self):
|
||||
"""Adding a new method to an existing version is not backward-compatible."""
|
||||
self.assertNotBackwardCompatible(
|
||||
"""\
|
||||
interface F {
|
||||
A();
|
||||
[MinVersion=1] B();
|
||||
};
|
||||
""", """\
|
||||
interface F {
|
||||
A();
|
||||
[MinVersion=1] B();
|
||||
[MinVersion=1] C();
|
||||
};
|
||||
""")
|
||||
|
||||
def testNewInterfaceMethodWithValidMinVersion(self):
|
||||
"""Adding a new method is fine as long as its MinVersion exceeds that of any
|
||||
method on the old interface definition."""
|
||||
self.assertBackwardCompatible('interface F { A(); };',
|
||||
'interface F { A(); [MinVersion=1] B(); };')
|
30
utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py
Executable file
30
utils/codegen/ipc/mojo/public/tools/run_all_python_unittests.py
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright 2020 The Chromium Authors
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
_TOOLS_DIR = os.path.dirname(__file__)
|
||||
_MOJOM_DIR = os.path.join(_TOOLS_DIR, 'mojom')
|
||||
_BINDINGS_DIR = os.path.join(_TOOLS_DIR, 'bindings')
|
||||
_SRC_DIR = os.path.join(_TOOLS_DIR, os.path.pardir, os.path.pardir,
|
||||
os.path.pardir)
|
||||
|
||||
# Ensure that the mojom library is discoverable.
|
||||
sys.path.append(_MOJOM_DIR)
|
||||
sys.path.append(_BINDINGS_DIR)
|
||||
|
||||
# Help Python find typ in //third_party/catapult/third_party/typ/
|
||||
sys.path.append(
|
||||
os.path.join(_SRC_DIR, 'third_party', 'catapult', 'third_party', 'typ'))
|
||||
import typ
|
||||
|
||||
|
||||
def Main():
|
||||
return typ.main(top_level_dirs=[_MOJOM_DIR, _BINDINGS_DIR])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(Main())
|
Loading…
Add table
Add a link
Reference in a new issue