utils: ipc: import mojo
Import mojo from the Chromium repository, so that we can use it for generating code for the IPC mechanism. The commit from which this was taken is: a079161ec8c6907b883f9cb84fc8c4e7896cb1d0 "Add PPAPI constructs for sending focus object to PdfAccessibilityTree" This tree has been pruned to remove directories that didn't have any necessary code: - mojo/* except for mojo/public - mojo core, docs, and misc files - mojo/public/* except for mojo/public/{tools,LICENSE} - language bindings for IPC, tests, and some mojo internals - mojo/public/tools/{fuzzers,chrome_ipc} - mojo/public/tools/bindings/generators - code generation for other languages No files were modified. Signed-off-by: Paul Elder <paul.elder@ideasonboard.com> Acked-by: Laurent Pinchart <laurent.pinchart@ideasonboard.com> Acked-by: Niklas Söderlund <niklas.soderlund@ragnatech.se> Acked-by: Kieran Bingham <kieran.bingham@ideasonboard.com>
This commit is contained in:
parent
3d624b745b
commit
82ba73535c
54 changed files with 12855 additions and 0 deletions
27
utils/ipc/mojo/public/LICENSE
Normal file
27
utils/ipc/mojo/public/LICENSE
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
// Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
// * Neither the name of Google Inc. nor the names of its
|
||||||
|
// contributors may be used to endorse or promote products derived from
|
||||||
|
// this software without specific prior written permission.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
6
utils/ipc/mojo/public/tools/.style.yapf
Normal file
6
utils/ipc/mojo/public/tools/.style.yapf
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
[style]
|
||||||
|
based_on_style = pep8
|
||||||
|
|
||||||
|
# New directories should use a .style.yapf that does not include the following:
|
||||||
|
column_limit = 80
|
||||||
|
indent_width = 2
|
18
utils/ipc/mojo/public/tools/BUILD.gn
Normal file
18
utils/ipc/mojo/public/tools/BUILD.gn
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
# The main target used to aggregate all unit tests for Python-based Mojo tools.
|
||||||
|
# This is used to generate a complete isolate which can be pushed to bots to run
|
||||||
|
# the tests.
|
||||||
|
group("mojo_python_unittests") {
|
||||||
|
data = [
|
||||||
|
"run_all_python_unittests.py",
|
||||||
|
"//testing/scripts/common.py",
|
||||||
|
"//testing/scripts/run_isolated_script_test.py",
|
||||||
|
"//testing/test_env.py",
|
||||||
|
"//testing/xvfb.py",
|
||||||
|
]
|
||||||
|
deps = [ "//mojo/public/tools/mojom/mojom:tests" ]
|
||||||
|
data_deps = [ "//third_party/catapult/third_party/typ/" ]
|
||||||
|
}
|
108
utils/ipc/mojo/public/tools/bindings/BUILD.gn
Normal file
108
utils/ipc/mojo/public/tools/bindings/BUILD.gn
Normal file
|
@ -0,0 +1,108 @@
|
||||||
|
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import("//mojo/public/tools/bindings/mojom.gni")
|
||||||
|
import("//third_party/jinja2/jinja2.gni")
|
||||||
|
|
||||||
|
action("precompile_templates") {
|
||||||
|
sources = mojom_generator_sources
|
||||||
|
sources += [
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/enum_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/enum_serialization_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/interface_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/interface_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/interface_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/interface_proxy_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/interface_request_validator_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/interface_response_validator_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/interface_stub_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-forward.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-import-headers.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-params-data.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-shared-internal.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-shared-message-ids.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-shared.cc.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-shared.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-test-utils.cc.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module-test-utils.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module.cc.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/module.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_data_view_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_data_view_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_serialization_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_traits_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_traits_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/struct_unserialized_message_context.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/union_data_view_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/union_data_view_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/union_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/union_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/union_serialization_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/union_traits_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/union_traits_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/validation_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/wrapper_class_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/wrapper_class_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/wrapper_class_template_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_declaration.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/cpp_templates/wrapper_union_class_template_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/constant_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/constants.java.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/data_types_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/enum.java.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/enum_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/header.java.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/interface.java.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/interface_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/interface_internal.java.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/struct.java.tmpl",
|
||||||
|
"$mojom_generator_root/generators/java_templates/union.java.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/enum_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/externs/interface_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/externs/module.externs.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/externs/struct_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/fuzzing.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/interface_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/lite/enum_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/lite/interface_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/lite/module_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/lite/mojom-lite.js.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/lite/struct_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/lite/union_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/module.amd.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/module_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/struct_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/union_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/js_templates/validation_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.cc.tmpl",
|
||||||
|
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.h.tmpl",
|
||||||
|
"$mojom_generator_root/generators/mojolpm_templates/mojolpm.proto.tmpl",
|
||||||
|
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_from_proto_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_to_proto_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/mojolpm_templates/mojolpm_traits_specialization_macros.tmpl",
|
||||||
|
"$mojom_generator_root/generators/ts_templates/module_definition.tmpl",
|
||||||
|
"$mojom_generator_root/generators/ts_templates/mojom.tmpl",
|
||||||
|
]
|
||||||
|
script = mojom_generator_script
|
||||||
|
|
||||||
|
inputs = jinja2_sources
|
||||||
|
outputs = [
|
||||||
|
"$target_gen_dir/cpp_templates.zip",
|
||||||
|
"$target_gen_dir/java_templates.zip",
|
||||||
|
"$target_gen_dir/mojolpm_templates.zip",
|
||||||
|
"$target_gen_dir/js_templates.zip",
|
||||||
|
"$target_gen_dir/ts_templates.zip",
|
||||||
|
]
|
||||||
|
args = [
|
||||||
|
"-o",
|
||||||
|
rebase_path(target_gen_dir, root_build_dir),
|
||||||
|
"--use_bundled_pylibs",
|
||||||
|
"precompile",
|
||||||
|
]
|
||||||
|
}
|
816
utils/ipc/mojo/public/tools/bindings/README.md
Normal file
816
utils/ipc/mojo/public/tools/bindings/README.md
Normal file
|
@ -0,0 +1,816 @@
|
||||||
|
# Mojom Interface Definition Language (IDL)
|
||||||
|
This document is a subset of the [Mojo documentation](/mojo/README.md).
|
||||||
|
|
||||||
|
[TOC]
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
Mojom is the IDL for Mojo interfaces. Given a `.mojom` file, the
|
||||||
|
[bindings
|
||||||
|
generator](https://cs.chromium.org/chromium/src/mojo/public/tools/bindings/) can
|
||||||
|
output bindings for any supported language: **C++**, **JavaScript**, or
|
||||||
|
**Java**.
|
||||||
|
|
||||||
|
For a trivial example consider the following hypothetical Mojom file we write to
|
||||||
|
`//services/widget/public/mojom/frobinator.mojom`:
|
||||||
|
|
||||||
|
```
|
||||||
|
module widget.mojom;
|
||||||
|
|
||||||
|
interface Frobinator {
|
||||||
|
Frobinate();
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
This defines a single [interface](#Interfaces) named `Frobinator` in a
|
||||||
|
[module](#Modules) named `widget.mojom` (and thus fully qualified in Mojom as
|
||||||
|
`widget.mojom.Frobinator`.) Note that many interfaces and/or other types of
|
||||||
|
definitions (structs, enums, *etc.*) may be included in a single Mojom file.
|
||||||
|
|
||||||
|
If we add a corresponding GN target to
|
||||||
|
`//services/widget/public/mojom/BUILD.gn`:
|
||||||
|
|
||||||
|
```
|
||||||
|
import("mojo/public/tools/bindings/mojom.gni")
|
||||||
|
|
||||||
|
mojom("mojom") {
|
||||||
|
sources = [
|
||||||
|
"frobinator.mojom",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
and then build this target:
|
||||||
|
|
||||||
|
```
|
||||||
|
ninja -C out/r services/widget/public/mojom
|
||||||
|
```
|
||||||
|
|
||||||
|
we'll find several generated sources in our output directory:
|
||||||
|
|
||||||
|
```
|
||||||
|
out/r/gen/services/widget/public/mojom/frobinator.mojom.cc
|
||||||
|
out/r/gen/services/widget/public/mojom/frobinator.mojom.h
|
||||||
|
out/r/gen/services/widget/public/mojom/frobinator.mojom-shared.h
|
||||||
|
etc...
|
||||||
|
```
|
||||||
|
|
||||||
|
Each of these generated source modules includes a set of definitions
|
||||||
|
representing the Mojom contents in C++. You can also build or depend on suffixed
|
||||||
|
target names to get bindings for other languages. For example,
|
||||||
|
|
||||||
|
```
|
||||||
|
ninja -C out/r services/widget/public/mojom:mojom_js
|
||||||
|
ninja -C out/r services/widget/public/mojom:mojom_java
|
||||||
|
```
|
||||||
|
|
||||||
|
would generate JavaScript and Java bindings respectively, in the same generated
|
||||||
|
output directory.
|
||||||
|
|
||||||
|
For more details regarding the generated
|
||||||
|
outputs please see
|
||||||
|
[documentation for individual target languages](#Generated-Code-For-Target-Languages).
|
||||||
|
|
||||||
|
## Mojom Syntax
|
||||||
|
|
||||||
|
Mojom IDL allows developers to define **structs**, **unions**, **interfaces**,
|
||||||
|
**constants**, and **enums**, all within the context of a **module**. These
|
||||||
|
definitions are used to generate code in the supported target languages at build
|
||||||
|
time.
|
||||||
|
|
||||||
|
Mojom files may **import** other Mojom files in order to reference their
|
||||||
|
definitions.
|
||||||
|
|
||||||
|
### Primitive Types
|
||||||
|
Mojom supports a few basic data types which may be composed into structs or used
|
||||||
|
for message parameters.
|
||||||
|
|
||||||
|
| Type | Description
|
||||||
|
|-------------------------------|-------------------------------------------------------|
|
||||||
|
| `bool` | Boolean type (`true` or `false`.)
|
||||||
|
| `int8`, `uint8` | Signed or unsigned 8-bit integer.
|
||||||
|
| `int16`, `uint16` | Signed or unsigned 16-bit integer.
|
||||||
|
| `int32`, `uint32` | Signed or unsigned 32-bit integer.
|
||||||
|
| `int64`, `uint64` | Signed or unsigned 64-bit integer.
|
||||||
|
| `float`, `double` | 32- or 64-bit floating point number.
|
||||||
|
| `string` | UTF-8 encoded string.
|
||||||
|
| `array<T>` | Array of any Mojom type *T*; for example, `array<uint8>` or `array<array<string>>`.
|
||||||
|
| `array<T, N>` | Fixed-length array of any Mojom type *T*. The parameter *N* must be an integral constant.
|
||||||
|
| `map<S, T>` | Associated array maping values of type *S* to values of type *T*. *S* may be a `string`, `enum`, or numeric type.
|
||||||
|
| `handle` | Generic Mojo handle. May be any type of handle, including a wrapped native platform handle.
|
||||||
|
| `handle<message_pipe>` | Generic message pipe handle.
|
||||||
|
| `handle<shared_buffer>` | Shared buffer handle.
|
||||||
|
| `handle<data_pipe_producer>` | Data pipe producer handle.
|
||||||
|
| `handle<data_pipe_consumer>` | Data pipe consumer handle.
|
||||||
|
| `handle<platform>` | A native platform/OS handle.
|
||||||
|
| *`pending_remote<InterfaceType>`* | Any user-defined Mojom interface type. This is sugar for a strongly-typed message pipe handle which should eventually be used to make outgoing calls on the interface.
|
||||||
|
| *`pending_receiver<InterfaceType>`* | A pending receiver for any user-defined Mojom interface type. This is sugar for a more strongly-typed message pipe handle which is expected to receive request messages and should therefore eventually be bound to an implementation of the interface.
|
||||||
|
| *`pending_associated_remote<InterfaceType>`* | An associated interface handle. See [Associated Interfaces](#Associated-Interfaces)
|
||||||
|
| *`pending_associated_receiver<InterfaceType>`* | A pending associated receiver. See [Associated Interfaces](#Associated-Interfaces)
|
||||||
|
| *T*? | An optional (nullable) value. Primitive numeric types (integers, floats, booleans, and enums) are not nullable. All other types are nullable.
|
||||||
|
|
||||||
|
### Modules
|
||||||
|
|
||||||
|
Every Mojom file may optionally specify a single **module** to which it belongs.
|
||||||
|
|
||||||
|
This is used strictly for aggregaging all defined symbols therein within a
|
||||||
|
common Mojom namespace. The specific impact this has on generated binidngs code
|
||||||
|
varies for each target language. For example, if the following Mojom is used to
|
||||||
|
generate bindings:
|
||||||
|
|
||||||
|
```
|
||||||
|
module business.stuff;
|
||||||
|
|
||||||
|
interface MoneyGenerator {
|
||||||
|
GenerateMoney();
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Generated C++ bindings will define a class interface `MoneyGenerator` in the
|
||||||
|
`business::stuff` namespace, while Java bindings will define an interface
|
||||||
|
`MoneyGenerator` in the `org.chromium.business.stuff` package. JavaScript
|
||||||
|
bindings at this time are unaffected by module declarations.
|
||||||
|
|
||||||
|
**NOTE:** By convention in the Chromium codebase, **all** Mojom files should
|
||||||
|
declare a module name with at least (and preferrably exactly) one top-level name
|
||||||
|
as well as an inner `mojom` module suffix. *e.g.*, `chrome.mojom`,
|
||||||
|
`business.mojom`, *etc.*
|
||||||
|
|
||||||
|
This convention makes it easy to tell which symbols are generated by Mojom when
|
||||||
|
reading non-Mojom code, and it also avoids namespace collisions in the fairly
|
||||||
|
common scenario where you have a real C++ or Java `Foo` along with a
|
||||||
|
corresponding Mojom `Foo` for its serialized representation.
|
||||||
|
|
||||||
|
### Imports
|
||||||
|
|
||||||
|
If your Mojom references definitions from other Mojom files, you must **import**
|
||||||
|
those files. Import syntax is as follows:
|
||||||
|
|
||||||
|
```
|
||||||
|
import "services/widget/public/mojom/frobinator.mojom";
|
||||||
|
```
|
||||||
|
|
||||||
|
Import paths are always relative to the top-level directory.
|
||||||
|
|
||||||
|
Note that circular imports are **not** supported.
|
||||||
|
|
||||||
|
### Structs
|
||||||
|
|
||||||
|
Structs are defined using the **struct** keyword, and they provide a way to
|
||||||
|
group related fields together:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
struct StringPair {
|
||||||
|
string first;
|
||||||
|
string second;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Struct fields may be comprised of any of the types listed above in the
|
||||||
|
[Primitive Types](#Primitive-Types) section.
|
||||||
|
|
||||||
|
Default values may be specified as long as they are constant:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
struct Request {
|
||||||
|
int32 id = -1;
|
||||||
|
string details;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
What follows is a fairly
|
||||||
|
comprehensive example using the supported field types:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
struct StringPair {
|
||||||
|
string first;
|
||||||
|
string second;
|
||||||
|
};
|
||||||
|
|
||||||
|
enum AnEnum {
|
||||||
|
YES,
|
||||||
|
NO
|
||||||
|
};
|
||||||
|
|
||||||
|
interface SampleInterface {
|
||||||
|
DoStuff();
|
||||||
|
};
|
||||||
|
|
||||||
|
struct AllTheThings {
|
||||||
|
// Note that these types can never be marked nullable!
|
||||||
|
bool boolean_value;
|
||||||
|
int8 signed_8bit_value = 42;
|
||||||
|
uint8 unsigned_8bit_value;
|
||||||
|
int16 signed_16bit_value;
|
||||||
|
uint16 unsigned_16bit_value;
|
||||||
|
int32 signed_32bit_value;
|
||||||
|
uint32 unsigned_32bit_value;
|
||||||
|
int64 signed_64bit_value;
|
||||||
|
uint64 unsigned_64bit_value;
|
||||||
|
float float_value_32bit;
|
||||||
|
double float_value_64bit;
|
||||||
|
AnEnum enum_value = AnEnum.YES;
|
||||||
|
|
||||||
|
// Strings may be nullable.
|
||||||
|
string? maybe_a_string_maybe_not;
|
||||||
|
|
||||||
|
// Structs may contain other structs. These may also be nullable.
|
||||||
|
StringPair some_strings;
|
||||||
|
StringPair? maybe_some_more_strings;
|
||||||
|
|
||||||
|
// In fact structs can also be nested, though in practice you must always make
|
||||||
|
// such fields nullable -- otherwise messages would need to be infinitely long
|
||||||
|
// in order to pass validation!
|
||||||
|
AllTheThings? more_things;
|
||||||
|
|
||||||
|
// Arrays may be templated over any Mojom type, and are always nullable:
|
||||||
|
array<int32> numbers;
|
||||||
|
array<int32>? maybe_more_numbers;
|
||||||
|
|
||||||
|
// Arrays of arrays of arrays... are fine.
|
||||||
|
array<array<array<AnEnum>>> this_works_but_really_plz_stop;
|
||||||
|
|
||||||
|
// The element type may be nullable if it's a type which is allowed to be
|
||||||
|
// nullable.
|
||||||
|
array<AllTheThings?> more_maybe_things;
|
||||||
|
|
||||||
|
// Fixed-size arrays get some extra validation on the receiving end to ensure
|
||||||
|
// that the correct number of elements is always received.
|
||||||
|
array<uint64, 2> uuid;
|
||||||
|
|
||||||
|
// Maps follow many of the same rules as arrays. Key types may be any
|
||||||
|
// non-handle, non-collection type, and value types may be any supported
|
||||||
|
// struct field type. Maps may also be nullable.
|
||||||
|
map<string, int32> one_map;
|
||||||
|
map<AnEnum, string>? maybe_another_map;
|
||||||
|
map<StringPair, AllTheThings?>? maybe_a_pretty_weird_but_valid_map;
|
||||||
|
map<StringPair, map<int32, array<map<string, string>?>?>?> ridiculous;
|
||||||
|
|
||||||
|
// And finally, all handle types are valid as struct fields and may be
|
||||||
|
// nullable. Note that interfaces and interface requests (the "Foo" and
|
||||||
|
// "Foo&" type syntax respectively) are just strongly-typed message pipe
|
||||||
|
// handles.
|
||||||
|
handle generic_handle;
|
||||||
|
handle<data_pipe_consumer> reader;
|
||||||
|
handle<data_pipe_producer>? maybe_writer;
|
||||||
|
handle<shared_buffer> dumping_ground;
|
||||||
|
handle<message_pipe> raw_message_pipe;
|
||||||
|
pending_remote<SampleInterface>? maybe_a_sample_interface_client_pipe;
|
||||||
|
pending_receiver<SampleInterface> non_nullable_sample_pending_receiver;
|
||||||
|
pending_receiver<SampleInterface>? nullable_sample_pending_receiver;
|
||||||
|
pending_associated_remote<SampleInterface> associated_interface_client;
|
||||||
|
pending_associated_receiver<SampleInterface> associated_pending_receiver;
|
||||||
|
pending_associated_receiver<SampleInterface>? maybe_another_pending_receiver;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
For details on how all of these different types translate to usable generated
|
||||||
|
code, see
|
||||||
|
[documentation for individual target languages](#Generated-Code-For-Target-Languages).
|
||||||
|
|
||||||
|
### Unions
|
||||||
|
|
||||||
|
Mojom supports tagged unions using the **union** keyword. A union is a
|
||||||
|
collection of fields which may taken the value of any single one of those fields
|
||||||
|
at a time. Thus they provide a way to represent a variant value type while
|
||||||
|
minimizing storage requirements.
|
||||||
|
|
||||||
|
Union fields may be of any type supported by [struct](#Structs) fields. For
|
||||||
|
example:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
union ExampleUnion {
|
||||||
|
string str;
|
||||||
|
StringPair pair;
|
||||||
|
int64 id;
|
||||||
|
array<uint64, 2> guid;
|
||||||
|
SampleInterface iface;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
For details on how unions like this translate to generated bindings code, see
|
||||||
|
[documentation for individual target languages](#Generated-Code-For-Target-Languages).
|
||||||
|
|
||||||
|
### Enumeration Types
|
||||||
|
|
||||||
|
Enumeration types may be defined using the **enum** keyword either directly
|
||||||
|
within a module or nested within the namespace of some struct or interface:
|
||||||
|
|
||||||
|
```
|
||||||
|
module business.mojom;
|
||||||
|
|
||||||
|
enum Department {
|
||||||
|
SALES = 0,
|
||||||
|
DEV,
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Employee {
|
||||||
|
enum Type {
|
||||||
|
FULL_TIME,
|
||||||
|
PART_TIME,
|
||||||
|
};
|
||||||
|
|
||||||
|
Type type;
|
||||||
|
// ...
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Similar to C-style enums, individual values may be explicitly assigned within an
|
||||||
|
enum definition. By default, values are based at zero and increment by
|
||||||
|
1 sequentially.
|
||||||
|
|
||||||
|
The effect of nested definitions on generated bindings varies depending on the
|
||||||
|
target language. See [documentation for individual target languages](#Generated-Code-For-Target-Languages)
|
||||||
|
|
||||||
|
### Constants
|
||||||
|
|
||||||
|
Constants may be defined using the **const** keyword either directly within a
|
||||||
|
module or nested within the namespace of some struct or interface:
|
||||||
|
|
||||||
|
```
|
||||||
|
module business.mojom;
|
||||||
|
|
||||||
|
const string kServiceName = "business";
|
||||||
|
|
||||||
|
struct Employee {
|
||||||
|
const uint64 kInvalidId = 0;
|
||||||
|
|
||||||
|
enum Type {
|
||||||
|
FULL_TIME,
|
||||||
|
PART_TIME,
|
||||||
|
};
|
||||||
|
|
||||||
|
uint64 id = kInvalidId;
|
||||||
|
Type type;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
The effect of nested definitions on generated bindings varies depending on the
|
||||||
|
target language. See [documentation for individual target languages](#Generated-Code-For-Target-Languages)
|
||||||
|
|
||||||
|
### Interfaces
|
||||||
|
|
||||||
|
An **interface** is a logical bundle of parameterized request messages. Each
|
||||||
|
request message may optionally define a parameterized response message. Here's
|
||||||
|
an example to define an interface `Foo` with various kinds of requests:
|
||||||
|
|
||||||
|
```
|
||||||
|
interface Foo {
|
||||||
|
// A request which takes no arguments and expects no response.
|
||||||
|
MyMessage();
|
||||||
|
|
||||||
|
// A request which has some arguments and expects no response.
|
||||||
|
MyOtherMessage(string name, array<uint8> bytes);
|
||||||
|
|
||||||
|
// A request which expects a single-argument response.
|
||||||
|
MyMessageWithResponse(string command) => (bool success);
|
||||||
|
|
||||||
|
// A request which expects a response with multiple arguments.
|
||||||
|
MyMessageWithMoarResponse(string a, string b) => (int8 c, int8 d);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Anything which is a valid struct field type (see [Structs](#Structs)) is also a
|
||||||
|
valid request or response argument type. The type notation is the same for both.
|
||||||
|
|
||||||
|
### Attributes
|
||||||
|
|
||||||
|
Mojom definitions may have their meaning altered by **attributes**, specified
|
||||||
|
with a syntax similar to Java or C# attributes. There are a handle of
|
||||||
|
interesting attributes supported today.
|
||||||
|
|
||||||
|
**`[Sync]`**
|
||||||
|
: The `Sync` attribute may be specified for any interface method which expects
|
||||||
|
a response. This makes it so that callers of the method can wait
|
||||||
|
synchronously for a response. See
|
||||||
|
[Synchronous Calls](/mojo/public/cpp/bindings/README.md#Synchronous-Calls)
|
||||||
|
in the C++ bindings documentation. Note that sync methods are only actually
|
||||||
|
synchronous when called from C++.
|
||||||
|
|
||||||
|
**`[Extensible]`**
|
||||||
|
: The `Extensible` attribute may be specified for any enum definition. This
|
||||||
|
essentially disables builtin range validation when receiving values of the
|
||||||
|
enum type in a message, allowing older bindings to tolerate unrecognized
|
||||||
|
values from newer versions of the enum.
|
||||||
|
|
||||||
|
**`[Native]`**
|
||||||
|
: The `Native` attribute may be specified for an empty struct declaration to
|
||||||
|
provide a nominal bridge between Mojo IPC and legacy `IPC::ParamTraits` or
|
||||||
|
`IPC_STRUCT_TRAITS*` macros.
|
||||||
|
See
|
||||||
|
[Repurposing Legacy IPC Traits](/docs/mojo_ipc_conversion.md#repurposing-and-invocations)
|
||||||
|
for more details. Note support for this attribute is strictly limited to C++
|
||||||
|
bindings generation.
|
||||||
|
|
||||||
|
**`[MinVersion=N]`**
|
||||||
|
: The `MinVersion` attribute is used to specify the version at which a given
|
||||||
|
field, enum value, interface method, or method parameter was introduced.
|
||||||
|
See [Versioning](#Versioning) for more details.
|
||||||
|
|
||||||
|
**`[Stable]`**
|
||||||
|
: The `Stable` attribute specifies that a given mojom type or interface
|
||||||
|
definition can be considered stable over time, meaning it is safe to use for
|
||||||
|
things like persistent storage or communication between independent
|
||||||
|
version-skewed binaries. Stable definitions may only depend on builtin mojom
|
||||||
|
types or other stable definitions, and changes to such definitions MUST
|
||||||
|
preserve backward-compatibility through appropriate use of versioning.
|
||||||
|
Backward-compatibility of changes is enforced in the Chromium tree using a
|
||||||
|
strict presubmit check. See [Versioning](#Versioning) for more details on
|
||||||
|
backward-compatibility constraints.
|
||||||
|
|
||||||
|
**`[EnableIf=value]`**
|
||||||
|
: The `EnableIf` attribute is used to conditionally enable definitions when
|
||||||
|
the mojom is parsed. If the `mojom` target in the GN file does not include
|
||||||
|
the matching `value` in the list of `enabled_features`, the definition
|
||||||
|
will be disabled. This is useful for mojom definitions that only make
|
||||||
|
sense on one platform. Note that the `EnableIf` attribute can only be set
|
||||||
|
once per definition.
|
||||||
|
|
||||||
|
## Generated Code For Target Languages
|
||||||
|
|
||||||
|
When the bindings generator successfully processes an input Mojom file, it emits
|
||||||
|
corresponding code for each supported target language. For more details on how
|
||||||
|
Mojom concepts translate to a given target langauge, please refer to the
|
||||||
|
bindings API documentation for that language:
|
||||||
|
|
||||||
|
* [C++ Bindings](/mojo/public/cpp/bindings/README.md)
|
||||||
|
* [JavaScript Bindings](/mojo/public/js/README.md)
|
||||||
|
* [Java Bindings](/mojo/public/java/bindings/README.md)
|
||||||
|
|
||||||
|
## Message Validation
|
||||||
|
|
||||||
|
Regardless of target language, all interface messages are validated during
|
||||||
|
deserialization before they are dispatched to a receiving implementation of the
|
||||||
|
interface. This helps to ensure consitent validation across interfaces without
|
||||||
|
leaving the burden to developers and security reviewers every time a new message
|
||||||
|
is added.
|
||||||
|
|
||||||
|
If a message fails validation, it is never dispatched. Instead a **connection
|
||||||
|
error** is raised on the binding object (see
|
||||||
|
[C++ Connection Errors](/mojo/public/cpp/bindings/README.md#Connection-Errors),
|
||||||
|
[Java Connection Errors](/mojo/public/java/bindings/README.md#Connection-Errors),
|
||||||
|
or
|
||||||
|
[JavaScript Connection Errors](/mojo/public/js/README.md#Connection-Errors) for
|
||||||
|
details.)
|
||||||
|
|
||||||
|
Some baseline level of validation is done automatically for primitive Mojom
|
||||||
|
types.
|
||||||
|
|
||||||
|
### Non-Nullable Objects
|
||||||
|
|
||||||
|
Mojom fields or parameter values (*e.g.*, structs, interfaces, arrays, *etc.*)
|
||||||
|
may be marked nullable in Mojom definitions (see
|
||||||
|
[Primitive Types](#Primitive-Types).) If a field or parameter is **not** marked
|
||||||
|
nullable but a message is received with a null value in its place, that message
|
||||||
|
will fail validation.
|
||||||
|
|
||||||
|
### Enums
|
||||||
|
|
||||||
|
Enums declared in Mojom are automatically validated against the range of legal
|
||||||
|
values. For example if a Mojom declares the enum:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
enum AdvancedBoolean {
|
||||||
|
TRUE = 0,
|
||||||
|
FALSE = 1,
|
||||||
|
FILE_NOT_FOUND = 2,
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
and a message is received with the integral value 3 (or anything other than 0,
|
||||||
|
1, or 2) in place of some `AdvancedBoolean` field or parameter, the message will
|
||||||
|
fail validation.
|
||||||
|
|
||||||
|
*** note
|
||||||
|
NOTE: It's possible to avoid this type of validation error by explicitly marking
|
||||||
|
an enum as [Extensible](#Attributes) if you anticipate your enum being exchanged
|
||||||
|
between two different versions of the binding interface. See
|
||||||
|
[Versioning](#Versioning).
|
||||||
|
***
|
||||||
|
|
||||||
|
### Other failures
|
||||||
|
|
||||||
|
There are a host of internal validation errors that may occur when a malformed
|
||||||
|
message is received, but developers should not be concerned with these
|
||||||
|
specifically; in general they can only result from internal bindings bugs,
|
||||||
|
compromised processes, or some remote endpoint making a dubious effort to
|
||||||
|
manually encode their own bindings messages.
|
||||||
|
|
||||||
|
### Custom Validation
|
||||||
|
|
||||||
|
It's also possible for developers to define custom validation logic for specific
|
||||||
|
Mojom struct types by exploiting the
|
||||||
|
[type mapping](/mojo/public/cpp/bindings/README.md#Type-Mapping) system for C++
|
||||||
|
bindings. Messages rejected by custom validation logic trigger the same
|
||||||
|
validation failure behavior as the built-in type validation routines.
|
||||||
|
|
||||||
|
## Associated Interfaces
|
||||||
|
|
||||||
|
As mentioned in the [Primitive Types](#Primitive-Types) section above, pending_remote
|
||||||
|
and pending_receiver fields and parameters may be marked as `associated`. This
|
||||||
|
essentially means that they are piggy-backed on some other interface's message
|
||||||
|
pipe.
|
||||||
|
|
||||||
|
Because individual interface message pipes operate independently there can be no
|
||||||
|
relative ordering guarantees among them. Associated interfaces are useful when
|
||||||
|
one interface needs to guarantee strict FIFO ordering with respect to one or
|
||||||
|
more other interfaces, as they allow interfaces to share a single pipe.
|
||||||
|
|
||||||
|
Currently associated interfaces are only supported in generated C++ bindings.
|
||||||
|
See the documentation for
|
||||||
|
[C++ Associated Interfaces](/mojo/public/cpp/bindings/README.md#Associated-Interfaces).
|
||||||
|
|
||||||
|
## Versioning
|
||||||
|
|
||||||
|
### Overview
|
||||||
|
|
||||||
|
*** note
|
||||||
|
**NOTE:** You don't need to worry about versioning if you don't care about
|
||||||
|
backwards compatibility. Specifically, all parts of Chrome are updated
|
||||||
|
atomically today and there is not yet any possibility of any two Chrome
|
||||||
|
processes communicating with two different versions of any given Mojom
|
||||||
|
interface.
|
||||||
|
***
|
||||||
|
|
||||||
|
Services extend their interfaces to support new features over time, and clients
|
||||||
|
want to use those new features when they are available. If services and clients
|
||||||
|
are not updated at the same time, it's important for them to be able to
|
||||||
|
communicate with each other using different snapshots (versions) of their
|
||||||
|
interfaces.
|
||||||
|
|
||||||
|
This document shows how to extend Mojom interfaces in a backwards-compatible
|
||||||
|
way. Changing interfaces in a non-backwards-compatible way is not discussed,
|
||||||
|
because in that case communication between different interface versions is
|
||||||
|
impossible anyway.
|
||||||
|
|
||||||
|
### Versioned Structs
|
||||||
|
|
||||||
|
You can use the `MinVersion` [attribute](#Attributes) to indicate from which
|
||||||
|
version a struct field is introduced. Assume you have the following struct:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
struct Employee {
|
||||||
|
uint64 employee_id;
|
||||||
|
string name;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
and you would like to add a birthday field. You can do:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
struct Employee {
|
||||||
|
uint64 employee_id;
|
||||||
|
string name;
|
||||||
|
[MinVersion=1] Date? birthday;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
By default, fields belong to version 0. New fields must be appended to the
|
||||||
|
struct definition (*i.e*., existing fields must not change **ordinal value**)
|
||||||
|
with the `MinVersion` attribute set to a number greater than any previous
|
||||||
|
existing versions.
|
||||||
|
|
||||||
|
*** note
|
||||||
|
**NOTE:** do not change existing fields in versioned structs, as this is
|
||||||
|
not backwards-compatible. Instead, rename the old field to make its
|
||||||
|
deprecation clear and add a new field with the new version number.
|
||||||
|
***
|
||||||
|
|
||||||
|
**Ordinal value** refers to the relative positional layout of a struct's fields
|
||||||
|
(and an interface's methods) when encoded in a message. Implicitly, ordinal
|
||||||
|
numbers are assigned to fields according to lexical position. In the example
|
||||||
|
above, `employee_id` has an ordinal value of 0 and `name` has an ordinal value
|
||||||
|
of 1.
|
||||||
|
|
||||||
|
Ordinal values can be specified explicitly using `**@**` notation, subject to
|
||||||
|
the following hard constraints:
|
||||||
|
|
||||||
|
* For any given struct or interface, if any field or method explicitly specifies
|
||||||
|
an ordinal value, all fields or methods must explicitly specify an ordinal
|
||||||
|
value.
|
||||||
|
* For an *N*-field struct or *N*-method interface, the set of explicitly
|
||||||
|
assigned ordinal values must be limited to the range *[0, N-1]*. Interfaces
|
||||||
|
should include placeholder methods to fill the ordinal positions of removed
|
||||||
|
methods (for example "Unused_Message_7@7()" or "RemovedMessage@42()", etc).
|
||||||
|
|
||||||
|
You may reorder fields, but you must ensure that the ordinal values of existing
|
||||||
|
fields remain unchanged. For example, the following struct remains
|
||||||
|
backwards-compatible:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
struct Employee {
|
||||||
|
uint64 employee_id@0;
|
||||||
|
[MinVersion=1] Date? birthday@2;
|
||||||
|
string name@1;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
*** note
|
||||||
|
**NOTE:** Newly added fields of Mojo object or handle types MUST be nullable.
|
||||||
|
See [Primitive Types](#Primitive-Types).
|
||||||
|
***
|
||||||
|
|
||||||
|
### Versioned Interfaces
|
||||||
|
|
||||||
|
There are two dimensions on which an interface can be extended
|
||||||
|
|
||||||
|
**Appending New Parameters To Existing Methods**
|
||||||
|
: Parameter lists are treated as structs internally, so all the rules of
|
||||||
|
versioned structs apply to method parameter lists. The only difference is
|
||||||
|
that the version number is scoped to the whole interface rather than to any
|
||||||
|
individual parameter list.
|
||||||
|
|
||||||
|
Please note that adding a response to a message which did not previously
|
||||||
|
expect a response is a not a backwards-compatible change.
|
||||||
|
|
||||||
|
**Appending New Methods**
|
||||||
|
: Similarly, you can reorder methods with explicit ordinal values as long as
|
||||||
|
the ordinal values of existing methods are unchanged.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
// Old version:
|
||||||
|
interface HumanResourceDatabase {
|
||||||
|
AddEmployee(Employee employee) => (bool success);
|
||||||
|
QueryEmployee(uint64 id) => (Employee? employee);
|
||||||
|
};
|
||||||
|
|
||||||
|
// New version:
|
||||||
|
interface HumanResourceDatabase {
|
||||||
|
AddEmployee(Employee employee) => (bool success);
|
||||||
|
|
||||||
|
QueryEmployee(uint64 id, [MinVersion=1] bool retrieve_finger_print)
|
||||||
|
=> (Employee? employee,
|
||||||
|
[MinVersion=1] array<uint8>? finger_print);
|
||||||
|
|
||||||
|
[MinVersion=1]
|
||||||
|
AttachFingerPrint(uint64 id, array<uint8> finger_print)
|
||||||
|
=> (bool success);
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
Similar to [versioned structs](#Versioned-Structs), when you pass the parameter
|
||||||
|
list of a request or response method to a destination using an older version of
|
||||||
|
an interface, unrecognized fields are silently discarded. However, if the method
|
||||||
|
call itself is not recognized, it is considered a validation error and the
|
||||||
|
receiver will close its end of the interface pipe. For example, if a client on
|
||||||
|
version 1 of the above interface sends an `AttachFingerPrint` request to an
|
||||||
|
implementation of version 0, the client will be disconnected.
|
||||||
|
|
||||||
|
Bindings target languages that support versioning expose means to query or
|
||||||
|
assert the remote version from a client handle (*e.g.*, an
|
||||||
|
`mojo::Remote<T>` in C++ bindings.)
|
||||||
|
|
||||||
|
See
|
||||||
|
[C++ Versioning Considerations](/mojo/public/cpp/bindings/README.md#Versioning-Considerations)
|
||||||
|
and
|
||||||
|
[Java Versioning Considerations](/mojo/public/java/bindings/README.md#Versioning-Considerations)
|
||||||
|
|
||||||
|
### Versioned Enums
|
||||||
|
|
||||||
|
**By default, enums are non-extensible**, which means that generated message
|
||||||
|
validation code does not expect to see new values in the future. When an unknown
|
||||||
|
value is seen for a non-extensible enum field or parameter, a validation error
|
||||||
|
is raised.
|
||||||
|
|
||||||
|
If you want an enum to be extensible in the future, you can apply the
|
||||||
|
`[Extensible]` [attribute](#Attributes):
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
[Extensible]
|
||||||
|
enum Department {
|
||||||
|
SALES,
|
||||||
|
DEV,
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
And later you can extend this enum without breaking backwards compatibility:
|
||||||
|
|
||||||
|
``` cpp
|
||||||
|
[Extensible]
|
||||||
|
enum Department {
|
||||||
|
SALES,
|
||||||
|
DEV,
|
||||||
|
[MinVersion=1] RESEARCH,
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
*** note
|
||||||
|
**NOTE:** For versioned enum definitions, the use of a `[MinVersion]` attribute
|
||||||
|
is strictly for documentation purposes. It has no impact on the generated code.
|
||||||
|
***
|
||||||
|
|
||||||
|
With extensible enums, bound interface implementations may receive unknown enum
|
||||||
|
values and will need to deal with them gracefully. See
|
||||||
|
[C++ Versioning Considerations](/mojo/public/cpp/bindings/README.md#Versioning-Considerations)
|
||||||
|
for details.
|
||||||
|
|
||||||
|
## Grammar Reference
|
||||||
|
|
||||||
|
Below is the (BNF-ish) context-free grammar of the Mojom language:
|
||||||
|
|
||||||
|
```
|
||||||
|
MojomFile = StatementList
|
||||||
|
StatementList = Statement StatementList | Statement
|
||||||
|
Statement = ModuleStatement | ImportStatement | Definition
|
||||||
|
|
||||||
|
ModuleStatement = AttributeSection "module" Identifier ";"
|
||||||
|
ImportStatement = "import" StringLiteral ";"
|
||||||
|
Definition = Struct Union Interface Enum Const
|
||||||
|
|
||||||
|
AttributeSection = <empty> | "[" AttributeList "]"
|
||||||
|
AttributeList = <empty> | NonEmptyAttributeList
|
||||||
|
NonEmptyAttributeList = Attribute
|
||||||
|
| Attribute "," NonEmptyAttributeList
|
||||||
|
Attribute = Name
|
||||||
|
| Name "=" Name
|
||||||
|
| Name "=" Literal
|
||||||
|
|
||||||
|
Struct = AttributeSection "struct" Name "{" StructBody "}" ";"
|
||||||
|
| AttributeSection "struct" Name ";"
|
||||||
|
StructBody = <empty>
|
||||||
|
| StructBody Const
|
||||||
|
| StructBody Enum
|
||||||
|
| StructBody StructField
|
||||||
|
StructField = AttributeSection TypeSpec Name Ordinal Default ";"
|
||||||
|
|
||||||
|
Union = AttributeSection "union" Name "{" UnionBody "}" ";"
|
||||||
|
UnionBody = <empty> | UnionBody UnionField
|
||||||
|
UnionField = AttributeSection TypeSpec Name Ordinal ";"
|
||||||
|
|
||||||
|
Interface = AttributeSection "interface" Name "{" InterfaceBody "}" ";"
|
||||||
|
InterfaceBody = <empty>
|
||||||
|
| InterfaceBody Const
|
||||||
|
| InterfaceBody Enum
|
||||||
|
| InterfaceBody Method
|
||||||
|
Method = AttributeSection Name Ordinal "(" ParamterList ")" Response ";"
|
||||||
|
ParameterList = <empty> | NonEmptyParameterList
|
||||||
|
NonEmptyParameterList = Parameter
|
||||||
|
| Parameter "," NonEmptyParameterList
|
||||||
|
Parameter = AttributeSection TypeSpec Name Ordinal
|
||||||
|
Response = <empty> | "=>" "(" ParameterList ")"
|
||||||
|
|
||||||
|
TypeSpec = TypeName "?" | TypeName
|
||||||
|
TypeName = BasicTypeName
|
||||||
|
| Array
|
||||||
|
| FixedArray
|
||||||
|
| Map
|
||||||
|
| InterfaceRequest
|
||||||
|
BasicTypeName = Identifier | "associated" Identifier | HandleType | NumericType
|
||||||
|
NumericType = "bool" | "int8" | "uint8" | "int16" | "uint16" | "int32"
|
||||||
|
| "uint32" | "int64" | "uint64" | "float" | "double"
|
||||||
|
HandleType = "handle" | "handle" "<" SpecificHandleType ">"
|
||||||
|
SpecificHandleType = "message_pipe"
|
||||||
|
| "shared_buffer"
|
||||||
|
| "data_pipe_consumer"
|
||||||
|
| "data_pipe_producer"
|
||||||
|
| "platform"
|
||||||
|
Array = "array" "<" TypeSpec ">"
|
||||||
|
FixedArray = "array" "<" TypeSpec "," IntConstDec ">"
|
||||||
|
Map = "map" "<" Identifier "," TypeSpec ">"
|
||||||
|
InterfaceRequest = Identifier "&" | "associated" Identifier "&"
|
||||||
|
|
||||||
|
Ordinal = <empty> | OrdinalValue
|
||||||
|
|
||||||
|
Default = <empty> | "=" Constant
|
||||||
|
|
||||||
|
Enum = AttributeSection "enum" Name "{" NonEmptyEnumValueList "}" ";"
|
||||||
|
| AttributeSection "enum" Name "{" NonEmptyEnumValueList "," "}" ";"
|
||||||
|
NonEmptyEnumValueList = EnumValue | NonEmptyEnumValueList "," EnumValue
|
||||||
|
EnumValue = AttributeSection Name
|
||||||
|
| AttributeSection Name "=" Integer
|
||||||
|
| AttributeSection Name "=" Identifier
|
||||||
|
|
||||||
|
Const = "const" TypeSpec Name "=" Constant ";"
|
||||||
|
|
||||||
|
Constant = Literal | Identifier ";"
|
||||||
|
|
||||||
|
Identifier = Name | Name "." Identifier
|
||||||
|
|
||||||
|
Literal = Integer | Float | "true" | "false" | "default" | StringLiteral
|
||||||
|
|
||||||
|
Integer = IntConst | "+" IntConst | "-" IntConst
|
||||||
|
IntConst = IntConstDec | IntConstHex
|
||||||
|
|
||||||
|
Float = FloatConst | "+" FloatConst | "-" FloatConst
|
||||||
|
|
||||||
|
; The rules below are for tokens matched strictly according to the given regexes
|
||||||
|
|
||||||
|
Identifier = /[a-zA-Z_][0-9a-zA-Z_]*/
|
||||||
|
IntConstDec = /0|(1-9[0-9]*)/
|
||||||
|
IntConstHex = /0[xX][0-9a-fA-F]+/
|
||||||
|
OrdinalValue = /@(0|(1-9[0-9]*))/
|
||||||
|
FloatConst = ... # Imagine it's close enough to C-style float syntax.
|
||||||
|
StringLiteral = ... # Imagine it's close enough to C-style string literals, including escapes.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Additional Documentation
|
||||||
|
|
||||||
|
[Mojom Message Format](https://docs.google.com/document/d/13pv9cFh5YKuBggDBQ1-AL8VReF-IYpFOFpRfvWFrwio/edit)
|
||||||
|
: Describes the wire format used by Mojo bindings interfaces over message
|
||||||
|
pipes.
|
||||||
|
|
||||||
|
[Input Format of Mojom Message Validation Tests](https://docs.google.com/document/d/1-y-2IYctyX2NPaLxJjpJfzVNWCC2SR2MJAD9MpIytHQ/edit)
|
||||||
|
: Describes a text format used to facilitate bindings message validation
|
||||||
|
tests.
|
|
@ -0,0 +1,51 @@
|
||||||
|
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
_typemap_imports = [
|
||||||
|
"//chrome/chrome_cleaner/mojom/typemaps/typemaps.gni",
|
||||||
|
"//chrome/common/importer/typemaps.gni",
|
||||||
|
"//chrome/common/media_router/mojom/typemaps.gni",
|
||||||
|
"//chrome/typemaps.gni",
|
||||||
|
"//chromecast/typemaps.gni",
|
||||||
|
"//chromeos/typemaps.gni",
|
||||||
|
"//chromeos/components/multidevice/mojom/typemaps.gni",
|
||||||
|
"//chromeos/services/cros_healthd/public/mojom/typemaps.gni",
|
||||||
|
"//chromeos/services/device_sync/public/mojom/typemaps.gni",
|
||||||
|
"//chromeos/services/network_config/public/mojom/typemaps.gni",
|
||||||
|
"//chromeos/services/secure_channel/public/mojom/typemaps.gni",
|
||||||
|
"//components/arc/mojom/typemaps.gni",
|
||||||
|
"//components/chromeos_camera/common/typemaps.gni",
|
||||||
|
"//components/services/storage/public/cpp/filesystem/typemaps.gni",
|
||||||
|
"//components/sync/mojom/typemaps.gni",
|
||||||
|
"//components/typemaps.gni",
|
||||||
|
"//content/browser/typemaps.gni",
|
||||||
|
"//content/public/common/typemaps.gni",
|
||||||
|
"//sandbox/mac/mojom/typemaps.gni",
|
||||||
|
"//services/media_session/public/cpp/typemaps.gni",
|
||||||
|
"//services/proxy_resolver/public/cpp/typemaps.gni",
|
||||||
|
"//services/resource_coordinator/public/cpp/typemaps.gni",
|
||||||
|
"//services/service_manager/public/cpp/typemaps.gni",
|
||||||
|
"//services/tracing/public/mojom/typemaps.gni",
|
||||||
|
]
|
||||||
|
|
||||||
|
_typemaps = []
|
||||||
|
foreach(typemap_import, _typemap_imports) {
|
||||||
|
# Avoid reassignment error by assigning to empty scope first.
|
||||||
|
_imported = {
|
||||||
|
}
|
||||||
|
_imported = read_file(typemap_import, "scope")
|
||||||
|
_typemaps += _imported.typemaps
|
||||||
|
}
|
||||||
|
|
||||||
|
typemaps = []
|
||||||
|
foreach(typemap, _typemaps) {
|
||||||
|
typemaps += [
|
||||||
|
{
|
||||||
|
filename = typemap
|
||||||
|
config = read_file(typemap, "scope")
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
component_macro_suffix = ""
|
27
utils/ipc/mojo/public/tools/bindings/compile_typescript.py
Normal file
27
utils/ipc/mojo/public/tools/bindings/compile_typescript.py
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# Copyright 2019 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
_HERE_PATH = os.path.dirname(__file__)
|
||||||
|
_SRC_PATH = os.path.normpath(os.path.join(_HERE_PATH, '..', '..', '..', '..'))
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(_SRC_PATH, 'third_party', 'node'))
|
||||||
|
import node
|
||||||
|
import node_modules
|
||||||
|
|
||||||
|
def main(argv):
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('--tsconfig_path', required=True)
|
||||||
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
|
result = node.RunNode([node_modules.PathToTypescript()] +
|
||||||
|
['--project', args.tsconfig_path])
|
||||||
|
if len(result) != 0:
|
||||||
|
raise RuntimeError('Failed to compile Typescript: \n%s' % result)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main(sys.argv[1:])
|
54
utils/ipc/mojo/public/tools/bindings/concatenate-files.py
Executable file
54
utils/ipc/mojo/public/tools/bindings/concatenate-files.py
Executable file
|
@ -0,0 +1,54 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2019 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
#
|
||||||
|
# This utility concatenates several files into one. On Unix-like systems
|
||||||
|
# it is equivalent to:
|
||||||
|
# cat file1 file2 file3 ...files... > target
|
||||||
|
#
|
||||||
|
# The reason for writing a separate utility is that 'cat' is not available
|
||||||
|
# on all supported build platforms, but Python is, and hence this provides
|
||||||
|
# us with an easy and uniform way of doing this on all platforms.
|
||||||
|
|
||||||
|
# for py2/py3 compatibility
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
|
||||||
|
def Concatenate(filenames):
|
||||||
|
"""Concatenate files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
files: Array of file names.
|
||||||
|
The last name is the target; all earlier ones are sources.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True, if the operation was successful.
|
||||||
|
"""
|
||||||
|
if len(filenames) < 2:
|
||||||
|
print("An error occurred generating %s:\nNothing to do." % filenames[-1])
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(filenames[-1], "wb") as target:
|
||||||
|
for filename in filenames[:-1]:
|
||||||
|
with open(filename, "rb") as current:
|
||||||
|
target.write(current.read())
|
||||||
|
return True
|
||||||
|
except IOError as e:
|
||||||
|
print("An error occurred when writing %s:\n%s" % (filenames[-1], e))
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = optparse.OptionParser()
|
||||||
|
parser.set_usage("""Concatenate several files into one.
|
||||||
|
Equivalent to: cat file1 ... > target.""")
|
||||||
|
(_options, args) = parser.parse_args()
|
||||||
|
exit(0 if Concatenate(args) else 1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,73 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Simple utility which concatenates a set of files into a single output file
|
||||||
|
while also stripping any goog.provide or goog.require lines. This allows us to
|
||||||
|
provide a very primitive sort of "compilation" without any extra toolchain
|
||||||
|
support and without having to modify otherwise compilable sources in the tree
|
||||||
|
which use these directives.
|
||||||
|
|
||||||
|
goog.provide lines are replaced with an equivalent invocation of
|
||||||
|
mojo.internal.exportModule, which accomplishes essentially the same thing in an
|
||||||
|
uncompiled context. A singular exception is made for the 'mojo.internal' export,
|
||||||
|
which is instead replaced with an inlined assignment to initialize the
|
||||||
|
namespace.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import optparse
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
_MOJO_INTERNAL_MODULE_NAME = "mojo.internal"
|
||||||
|
_MOJO_EXPORT_MODULE_SYMBOL = "mojo.internal.exportModule"
|
||||||
|
|
||||||
|
|
||||||
|
def FilterLine(filename, line, output):
|
||||||
|
if line.startswith("goog.require"):
|
||||||
|
return
|
||||||
|
|
||||||
|
if line.startswith("goog.provide"):
|
||||||
|
match = re.match("goog.provide\('([^']+)'\);", line)
|
||||||
|
if not match:
|
||||||
|
print("Invalid goog.provide line in %s:\n%s" % (filename, line))
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
module_name = match.group(1)
|
||||||
|
if module_name == _MOJO_INTERNAL_MODULE_NAME:
|
||||||
|
output.write("self.mojo = { internal: {} };")
|
||||||
|
else:
|
||||||
|
output.write("%s('%s');\n" % (_MOJO_EXPORT_MODULE_SYMBOL, module_name))
|
||||||
|
return
|
||||||
|
|
||||||
|
output.write(line)
|
||||||
|
|
||||||
|
def ConcatenateAndReplaceExports(filenames):
|
||||||
|
if (len(filenames) < 2):
|
||||||
|
print("At least two filenames (one input and the output) are required.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(filenames[-1], "w") as target:
|
||||||
|
for filename in filenames[:-1]:
|
||||||
|
with open(filename, "r") as current:
|
||||||
|
for line in current.readlines():
|
||||||
|
FilterLine(filename, line, target)
|
||||||
|
return True
|
||||||
|
except IOError as e:
|
||||||
|
print("Error generating %s\n: %s" % (filenames[-1], e))
|
||||||
|
return False
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = optparse.OptionParser()
|
||||||
|
parser.set_usage("""file1 [file2...] outfile
|
||||||
|
Concatenate several files into one, stripping Closure provide and
|
||||||
|
require directives along the way.""")
|
||||||
|
(_, args) = parser.parse_args()
|
||||||
|
exit(0 if ConcatenateAndReplaceExports(args) else 1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
36
utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
Executable file
36
utils/ipc/mojo/public/tools/bindings/format_typemap_generator_args.py
Executable file
|
@ -0,0 +1,36 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# This utility converts mojom dependencies into their corresponding typemap
|
||||||
|
# paths and formats them to be consumed by generate_type_mappings.py.
|
||||||
|
|
||||||
|
|
||||||
|
def FormatTypemap(typemap_filename):
|
||||||
|
# A simple typemap is valid Python with a minor alteration.
|
||||||
|
with open(typemap_filename) as f:
|
||||||
|
typemap_content = f.read().replace('=\n', '=')
|
||||||
|
typemap = {}
|
||||||
|
exec typemap_content in typemap
|
||||||
|
|
||||||
|
for header in typemap.get('public_headers', []):
|
||||||
|
yield 'public_headers=%s' % header
|
||||||
|
for header in typemap.get('traits_headers', []):
|
||||||
|
yield 'traits_headers=%s' % header
|
||||||
|
for header in typemap.get('type_mappings', []):
|
||||||
|
yield 'type_mappings=%s' % header
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
typemaps = sys.argv[1:]
|
||||||
|
print(' '.join('--start-typemap %s' % ' '.join(FormatTypemap(typemap))
|
||||||
|
for typemap in typemaps))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
52
utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
Normal file
52
utils/ipc/mojo/public/tools/bindings/gen_data_files_list.py
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Generates a list of all files in a directory.
|
||||||
|
|
||||||
|
This script takes in a directory and an output file name as input.
|
||||||
|
It then reads the directory and creates a list of all file names
|
||||||
|
in that directory. The list is written to the output file.
|
||||||
|
There is also an option to pass in '-p' or '--pattern'
|
||||||
|
which will check each file name against a regular expression
|
||||||
|
pattern that is passed in. Only files which match the regex
|
||||||
|
will be written to the list.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from cStringIO import StringIO
|
||||||
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0,
|
||||||
|
os.path.join(
|
||||||
|
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||||
|
|
||||||
|
from mojom.generate.generator import WriteFile
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = OptionParser()
|
||||||
|
parser.add_option('-d', '--directory', help='Read files from DIRECTORY')
|
||||||
|
parser.add_option('-o', '--output', help='Write list to FILE')
|
||||||
|
parser.add_option('-p',
|
||||||
|
'--pattern',
|
||||||
|
help='Only reads files that name matches PATTERN',
|
||||||
|
default=".")
|
||||||
|
(options, _) = parser.parse_args()
|
||||||
|
pattern = re.compile(options.pattern)
|
||||||
|
files = [f for f in os.listdir(options.directory) if pattern.match(f)]
|
||||||
|
|
||||||
|
stream = StringIO()
|
||||||
|
for f in files:
|
||||||
|
print(f, file=stream)
|
||||||
|
|
||||||
|
WriteFile(stream.getvalue(), options.output)
|
||||||
|
stream.close()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
187
utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
Executable file
187
utils/ipc/mojo/public/tools/bindings/generate_type_mappings.py
Executable file
|
@ -0,0 +1,187 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Generates a JSON typemap from its command-line arguments and dependencies.
|
||||||
|
|
||||||
|
Each typemap should be specified in an command-line argument of the form
|
||||||
|
key=value, with an argument of "--start-typemap" preceding each typemap.
|
||||||
|
|
||||||
|
For example,
|
||||||
|
generate_type_mappings.py --output=foo.typemap --start-typemap \\
|
||||||
|
public_headers=foo.h traits_headers=foo_traits.h \\
|
||||||
|
type_mappings=mojom.Foo=FooImpl
|
||||||
|
|
||||||
|
generates a foo.typemap containing
|
||||||
|
{
|
||||||
|
"c++": {
|
||||||
|
"mojom.Foo": {
|
||||||
|
"typename": "FooImpl",
|
||||||
|
"traits_headers": [
|
||||||
|
"foo_traits.h"
|
||||||
|
],
|
||||||
|
"public_headers": [
|
||||||
|
"foo.h"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Then,
|
||||||
|
generate_type_mappings.py --dependency foo.typemap --output=bar.typemap \\
|
||||||
|
--start-typemap public_headers=bar.h traits_headers=bar_traits.h \\
|
||||||
|
type_mappings=mojom.Bar=BarImpl
|
||||||
|
|
||||||
|
generates a bar.typemap containing
|
||||||
|
{
|
||||||
|
"c++": {
|
||||||
|
"mojom.Bar": {
|
||||||
|
"typename": "BarImpl",
|
||||||
|
"traits_headers": [
|
||||||
|
"bar_traits.h"
|
||||||
|
],
|
||||||
|
"public_headers": [
|
||||||
|
"bar.h"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"mojom.Foo": {
|
||||||
|
"typename": "FooImpl",
|
||||||
|
"traits_headers": [
|
||||||
|
"foo_traits.h"
|
||||||
|
],
|
||||||
|
"public_headers": [
|
||||||
|
"foo.h"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0,
|
||||||
|
os.path.join(
|
||||||
|
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||||
|
|
||||||
|
from mojom.generate.generator import WriteFile
|
||||||
|
|
||||||
|
def ReadTypemap(path):
|
||||||
|
with open(path) as f:
|
||||||
|
return json.load(f)['c++']
|
||||||
|
|
||||||
|
|
||||||
|
def ParseTypemapArgs(args):
|
||||||
|
typemaps = [s for s in '\n'.join(args).split('--start-typemap\n') if s]
|
||||||
|
result = {}
|
||||||
|
for typemap in typemaps:
|
||||||
|
result.update(ParseTypemap(typemap))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def LoadCppTypemapConfig(path):
|
||||||
|
configs = {}
|
||||||
|
with open(path) as f:
|
||||||
|
for config in json.load(f):
|
||||||
|
for entry in config['types']:
|
||||||
|
configs[entry['mojom']] = {
|
||||||
|
'typename': entry['cpp'],
|
||||||
|
'public_headers': config.get('traits_headers', []),
|
||||||
|
'traits_headers': config.get('traits_private_headers', []),
|
||||||
|
'copyable_pass_by_value': entry.get('copyable_pass_by_value',
|
||||||
|
False),
|
||||||
|
'force_serialize': entry.get('force_serialize', False),
|
||||||
|
'hashable': entry.get('hashable', False),
|
||||||
|
'move_only': entry.get('move_only', False),
|
||||||
|
'nullable_is_same_type': entry.get('nullable_is_same_type', False),
|
||||||
|
'non_copyable_non_movable': False,
|
||||||
|
}
|
||||||
|
return configs
|
||||||
|
|
||||||
|
|
||||||
|
def ParseTypemap(typemap):
|
||||||
|
values = {'type_mappings': [], 'public_headers': [], 'traits_headers': []}
|
||||||
|
for line in typemap.split('\n'):
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
key, _, value = line.partition('=')
|
||||||
|
values[key].append(value.lstrip('/'))
|
||||||
|
result = {}
|
||||||
|
mapping_pattern = \
|
||||||
|
re.compile(r"""^([^=]+) # mojom type
|
||||||
|
=
|
||||||
|
([^[]+) # native type
|
||||||
|
(?:\[([^]]+)\])?$ # optional attribute in square brackets
|
||||||
|
""", re.X)
|
||||||
|
for typename in values['type_mappings']:
|
||||||
|
match_result = mapping_pattern.match(typename)
|
||||||
|
assert match_result, (
|
||||||
|
"Cannot parse entry in the \"type_mappings\" section: %s" % typename)
|
||||||
|
|
||||||
|
mojom_type = match_result.group(1)
|
||||||
|
native_type = match_result.group(2)
|
||||||
|
attributes = []
|
||||||
|
if match_result.group(3):
|
||||||
|
attributes = match_result.group(3).split(',')
|
||||||
|
|
||||||
|
assert mojom_type not in result, (
|
||||||
|
"Cannot map multiple native types (%s, %s) to the same mojom type: %s" %
|
||||||
|
(result[mojom_type]['typename'], native_type, mojom_type))
|
||||||
|
|
||||||
|
result[mojom_type] = {
|
||||||
|
'public_headers': values['public_headers'],
|
||||||
|
'traits_headers': values['traits_headers'],
|
||||||
|
'typename': native_type,
|
||||||
|
|
||||||
|
# Attributes supported for individual mappings.
|
||||||
|
'copyable_pass_by_value': 'copyable_pass_by_value' in attributes,
|
||||||
|
'force_serialize': 'force_serialize' in attributes,
|
||||||
|
'hashable': 'hashable' in attributes,
|
||||||
|
'move_only': 'move_only' in attributes,
|
||||||
|
'non_copyable_non_movable': 'non_copyable_non_movable' in attributes,
|
||||||
|
'nullable_is_same_type': 'nullable_is_same_type' in attributes,
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description=__doc__,
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter)
|
||||||
|
parser.add_argument(
|
||||||
|
'--dependency',
|
||||||
|
type=str,
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
help=('A path to another JSON typemap to merge into the output. '
|
||||||
|
'This may be repeated to merge multiple typemaps.'))
|
||||||
|
parser.add_argument(
|
||||||
|
'--cpp-typemap-config',
|
||||||
|
type=str,
|
||||||
|
action='store',
|
||||||
|
dest='cpp_config_path',
|
||||||
|
help=('A path to a single JSON-formatted typemap config as emitted by'
|
||||||
|
'GN when processing a mojom_cpp_typemap build rule.'))
|
||||||
|
parser.add_argument('--output',
|
||||||
|
type=str,
|
||||||
|
required=True,
|
||||||
|
help='The path to which to write the generated JSON.')
|
||||||
|
params, typemap_params = parser.parse_known_args()
|
||||||
|
typemaps = ParseTypemapArgs(typemap_params)
|
||||||
|
if params.cpp_config_path:
|
||||||
|
typemaps.update(LoadCppTypemapConfig(params.cpp_config_path))
|
||||||
|
missing = [path for path in params.dependency if not os.path.exists(path)]
|
||||||
|
if missing:
|
||||||
|
raise IOError('Missing dependencies: %s' % ', '.join(missing))
|
||||||
|
for path in params.dependency:
|
||||||
|
typemaps.update(ReadTypemap(path))
|
||||||
|
|
||||||
|
WriteFile(json.dumps({'c++': typemaps}, indent=2), params.output)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
1941
utils/ipc/mojo/public/tools/bindings/mojom.gni
Normal file
1941
utils/ipc/mojo/public/tools/bindings/mojom.gni
Normal file
File diff suppressed because it is too large
Load diff
390
utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
Executable file
390
utils/ipc/mojo/public/tools/bindings/mojom_bindings_generator.py
Executable file
|
@ -0,0 +1,390 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""The frontend for the Mojo bindings system."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
import hashlib
|
||||||
|
import importlib
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import pprint
|
||||||
|
import re
|
||||||
|
import struct
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Disable lint check for finding modules:
|
||||||
|
# pylint: disable=F0401
|
||||||
|
|
||||||
|
def _GetDirAbove(dirname):
|
||||||
|
"""Returns the directory "above" this file containing |dirname| (which must
|
||||||
|
also be "above" this file)."""
|
||||||
|
path = os.path.abspath(__file__)
|
||||||
|
while True:
|
||||||
|
path, tail = os.path.split(path)
|
||||||
|
assert tail
|
||||||
|
if tail == dirname:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0,
|
||||||
|
os.path.join(
|
||||||
|
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "mojom"))
|
||||||
|
|
||||||
|
from mojom.error import Error
|
||||||
|
import mojom.fileutil as fileutil
|
||||||
|
from mojom.generate.module import Module
|
||||||
|
from mojom.generate import template_expander
|
||||||
|
from mojom.generate import translate
|
||||||
|
from mojom.generate.generator import WriteFile
|
||||||
|
|
||||||
|
sys.path.append(
|
||||||
|
os.path.join(_GetDirAbove("mojo"), "tools", "diagnosis"))
|
||||||
|
import crbug_1001171
|
||||||
|
|
||||||
|
|
||||||
|
_BUILTIN_GENERATORS = {
|
||||||
|
"c++": "mojom_cpp_generator",
|
||||||
|
"javascript": "mojom_js_generator",
|
||||||
|
"java": "mojom_java_generator",
|
||||||
|
"mojolpm": "mojom_mojolpm_generator",
|
||||||
|
"typescript": "mojom_ts_generator",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def LoadGenerators(generators_string):
|
||||||
|
if not generators_string:
|
||||||
|
return [] # No generators.
|
||||||
|
|
||||||
|
generators = {}
|
||||||
|
for generator_name in [s.strip() for s in generators_string.split(",")]:
|
||||||
|
language = generator_name.lower()
|
||||||
|
if language not in _BUILTIN_GENERATORS:
|
||||||
|
print("Unknown generator name %s" % generator_name)
|
||||||
|
sys.exit(1)
|
||||||
|
generator_module = importlib.import_module(
|
||||||
|
"generators.%s" % _BUILTIN_GENERATORS[language])
|
||||||
|
generators[language] = generator_module
|
||||||
|
return generators
|
||||||
|
|
||||||
|
|
||||||
|
def MakeImportStackMessage(imported_filename_stack):
|
||||||
|
"""Make a (human-readable) message listing a chain of imports. (Returned
|
||||||
|
string begins with a newline (if nonempty) and does not end with one.)"""
|
||||||
|
return ''.join(
|
||||||
|
reversed(["\n %s was imported by %s" % (a, b) for (a, b) in \
|
||||||
|
zip(imported_filename_stack[1:], imported_filename_stack)]))
|
||||||
|
|
||||||
|
|
||||||
|
class RelativePath(object):
|
||||||
|
"""Represents a path relative to the source tree or generated output dir."""
|
||||||
|
|
||||||
|
def __init__(self, path, source_root, output_dir):
|
||||||
|
self.path = path
|
||||||
|
if path.startswith(source_root):
|
||||||
|
self.root = source_root
|
||||||
|
elif path.startswith(output_dir):
|
||||||
|
self.root = output_dir
|
||||||
|
else:
|
||||||
|
raise Exception("Invalid input path %s" % path)
|
||||||
|
|
||||||
|
def relative_path(self):
|
||||||
|
return os.path.relpath(
|
||||||
|
os.path.abspath(self.path), os.path.abspath(self.root))
|
||||||
|
|
||||||
|
|
||||||
|
def _GetModulePath(path, output_dir):
|
||||||
|
return os.path.join(output_dir, path.relative_path() + '-module')
|
||||||
|
|
||||||
|
|
||||||
|
def ScrambleMethodOrdinals(interfaces, salt):
|
||||||
|
already_generated = set()
|
||||||
|
for interface in interfaces:
|
||||||
|
i = 0
|
||||||
|
already_generated.clear()
|
||||||
|
for method in interface.methods:
|
||||||
|
if method.explicit_ordinal is not None:
|
||||||
|
continue
|
||||||
|
while True:
|
||||||
|
i = i + 1
|
||||||
|
if i == 1000000:
|
||||||
|
raise Exception("Could not generate %d method ordinals for %s" %
|
||||||
|
(len(interface.methods), interface.mojom_name))
|
||||||
|
# Generate a scrambled method.ordinal value. The algorithm doesn't have
|
||||||
|
# to be very strong, cryptographically. It just needs to be non-trivial
|
||||||
|
# to guess the results without the secret salt, in order to make it
|
||||||
|
# harder for a compromised process to send fake Mojo messages.
|
||||||
|
sha256 = hashlib.sha256(salt)
|
||||||
|
sha256.update(interface.mojom_name.encode('utf-8'))
|
||||||
|
sha256.update(str(i).encode('utf-8'))
|
||||||
|
# Take the first 4 bytes as a little-endian uint32.
|
||||||
|
ordinal = struct.unpack('<L', sha256.digest()[:4])[0]
|
||||||
|
# Trim to 31 bits, so it always fits into a Java (signed) int.
|
||||||
|
ordinal = ordinal & 0x7fffffff
|
||||||
|
if ordinal in already_generated:
|
||||||
|
continue
|
||||||
|
already_generated.add(ordinal)
|
||||||
|
method.ordinal = ordinal
|
||||||
|
method.ordinal_comment = (
|
||||||
|
'The %s value is based on sha256(salt + "%s%d").' %
|
||||||
|
(ordinal, interface.mojom_name, i))
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
def ReadFileContents(filename):
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
return f.read()
|
||||||
|
|
||||||
|
|
||||||
|
class MojomProcessor(object):
|
||||||
|
"""Takes parsed mojom modules and generates language bindings from them.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_processed_files: {Dict[str, mojom.generate.module.Module]} Mapping from
|
||||||
|
relative mojom filename paths to the module AST for that mojom file.
|
||||||
|
"""
|
||||||
|
def __init__(self, should_generate):
|
||||||
|
self._should_generate = should_generate
|
||||||
|
self._processed_files = {}
|
||||||
|
self._typemap = {}
|
||||||
|
|
||||||
|
def LoadTypemaps(self, typemaps):
|
||||||
|
# Support some very simple single-line comments in typemap JSON.
|
||||||
|
comment_expr = r"^\s*//.*$"
|
||||||
|
def no_comments(line):
|
||||||
|
return not re.match(comment_expr, line)
|
||||||
|
for filename in typemaps:
|
||||||
|
with open(filename) as f:
|
||||||
|
typemaps = json.loads("".join(filter(no_comments, f.readlines())))
|
||||||
|
for language, typemap in typemaps.items():
|
||||||
|
language_map = self._typemap.get(language, {})
|
||||||
|
language_map.update(typemap)
|
||||||
|
self._typemap[language] = language_map
|
||||||
|
if 'c++' in self._typemap:
|
||||||
|
self._typemap['mojolpm'] = self._typemap['c++']
|
||||||
|
|
||||||
|
def _GenerateModule(self, args, remaining_args, generator_modules,
|
||||||
|
rel_filename, imported_filename_stack):
|
||||||
|
# Return the already-generated module.
|
||||||
|
if rel_filename.path in self._processed_files:
|
||||||
|
return self._processed_files[rel_filename.path]
|
||||||
|
|
||||||
|
if rel_filename.path in imported_filename_stack:
|
||||||
|
print("%s: Error: Circular dependency" % rel_filename.path + \
|
||||||
|
MakeImportStackMessage(imported_filename_stack + [rel_filename.path]))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
module_path = _GetModulePath(rel_filename, args.output_dir)
|
||||||
|
with open(module_path, 'rb') as f:
|
||||||
|
module = Module.Load(f)
|
||||||
|
|
||||||
|
if args.scrambled_message_id_salt_paths:
|
||||||
|
salt = b''.join(
|
||||||
|
map(ReadFileContents, args.scrambled_message_id_salt_paths))
|
||||||
|
ScrambleMethodOrdinals(module.interfaces, salt)
|
||||||
|
|
||||||
|
if self._should_generate(rel_filename.path):
|
||||||
|
for language, generator_module in generator_modules.items():
|
||||||
|
generator = generator_module.Generator(
|
||||||
|
module, args.output_dir, typemap=self._typemap.get(language, {}),
|
||||||
|
variant=args.variant, bytecode_path=args.bytecode_path,
|
||||||
|
for_blink=args.for_blink,
|
||||||
|
js_bindings_mode=args.js_bindings_mode,
|
||||||
|
js_generate_struct_deserializers=\
|
||||||
|
args.js_generate_struct_deserializers,
|
||||||
|
export_attribute=args.export_attribute,
|
||||||
|
export_header=args.export_header,
|
||||||
|
generate_non_variant_code=args.generate_non_variant_code,
|
||||||
|
support_lazy_serialization=args.support_lazy_serialization,
|
||||||
|
disallow_native_types=args.disallow_native_types,
|
||||||
|
disallow_interfaces=args.disallow_interfaces,
|
||||||
|
generate_message_ids=args.generate_message_ids,
|
||||||
|
generate_fuzzing=args.generate_fuzzing,
|
||||||
|
enable_kythe_annotations=args.enable_kythe_annotations,
|
||||||
|
extra_cpp_template_paths=args.extra_cpp_template_paths,
|
||||||
|
generate_extra_cpp_only=args.generate_extra_cpp_only)
|
||||||
|
filtered_args = []
|
||||||
|
if hasattr(generator_module, 'GENERATOR_PREFIX'):
|
||||||
|
prefix = '--' + generator_module.GENERATOR_PREFIX + '_'
|
||||||
|
filtered_args = [arg for arg in remaining_args
|
||||||
|
if arg.startswith(prefix)]
|
||||||
|
generator.GenerateFiles(filtered_args)
|
||||||
|
|
||||||
|
# Save result.
|
||||||
|
self._processed_files[rel_filename.path] = module
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
def _Generate(args, remaining_args):
|
||||||
|
if args.variant == "none":
|
||||||
|
args.variant = None
|
||||||
|
|
||||||
|
for idx, import_dir in enumerate(args.import_directories):
|
||||||
|
tokens = import_dir.split(":")
|
||||||
|
if len(tokens) >= 2:
|
||||||
|
args.import_directories[idx] = RelativePath(tokens[0], tokens[1],
|
||||||
|
args.output_dir)
|
||||||
|
else:
|
||||||
|
args.import_directories[idx] = RelativePath(tokens[0], args.depth,
|
||||||
|
args.output_dir)
|
||||||
|
generator_modules = LoadGenerators(args.generators_string)
|
||||||
|
|
||||||
|
fileutil.EnsureDirectoryExists(args.output_dir)
|
||||||
|
|
||||||
|
processor = MojomProcessor(lambda filename: filename in args.filename)
|
||||||
|
processor.LoadTypemaps(set(args.typemaps))
|
||||||
|
|
||||||
|
if args.filelist:
|
||||||
|
with open(args.filelist) as f:
|
||||||
|
args.filename.extend(f.read().split())
|
||||||
|
|
||||||
|
for filename in args.filename:
|
||||||
|
processor._GenerateModule(
|
||||||
|
args, remaining_args, generator_modules,
|
||||||
|
RelativePath(filename, args.depth, args.output_dir), [])
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def _Precompile(args, _):
|
||||||
|
generator_modules = LoadGenerators(",".join(_BUILTIN_GENERATORS.keys()))
|
||||||
|
|
||||||
|
template_expander.PrecompileTemplates(generator_modules, args.output_dir)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Generate bindings from mojom files.")
|
||||||
|
parser.add_argument("--use_bundled_pylibs", action="store_true",
|
||||||
|
help="use Python modules bundled in the SDK")
|
||||||
|
parser.add_argument(
|
||||||
|
"-o",
|
||||||
|
"--output_dir",
|
||||||
|
dest="output_dir",
|
||||||
|
default=".",
|
||||||
|
help="output directory for generated files")
|
||||||
|
|
||||||
|
subparsers = parser.add_subparsers()
|
||||||
|
|
||||||
|
generate_parser = subparsers.add_parser(
|
||||||
|
"generate", description="Generate bindings from mojom files.")
|
||||||
|
generate_parser.add_argument("filename", nargs="*",
|
||||||
|
help="mojom input file")
|
||||||
|
generate_parser.add_argument("--filelist", help="mojom input file list")
|
||||||
|
generate_parser.add_argument("-d", "--depth", dest="depth", default=".",
|
||||||
|
help="depth from source root")
|
||||||
|
generate_parser.add_argument("-g",
|
||||||
|
"--generators",
|
||||||
|
dest="generators_string",
|
||||||
|
metavar="GENERATORS",
|
||||||
|
default="c++,javascript,java,mojolpm",
|
||||||
|
help="comma-separated list of generators")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--gen_dir", dest="gen_directories", action="append", metavar="directory",
|
||||||
|
default=[], help="add a directory to be searched for the syntax trees.")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"-I", dest="import_directories", action="append", metavar="directory",
|
||||||
|
default=[],
|
||||||
|
help="add a directory to be searched for import files. The depth from "
|
||||||
|
"source root can be specified for each import by appending it after "
|
||||||
|
"a colon")
|
||||||
|
generate_parser.add_argument("--typemap", action="append", metavar="TYPEMAP",
|
||||||
|
default=[], dest="typemaps",
|
||||||
|
help="apply TYPEMAP to generated output")
|
||||||
|
generate_parser.add_argument("--variant", dest="variant", default=None,
|
||||||
|
help="output a named variant of the bindings")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--bytecode_path", required=True, help=(
|
||||||
|
"the path from which to load template bytecode; to generate template "
|
||||||
|
"bytecode, run %s precompile BYTECODE_PATH" % os.path.basename(
|
||||||
|
sys.argv[0])))
|
||||||
|
generate_parser.add_argument("--for_blink", action="store_true",
|
||||||
|
help="Use WTF types as generated types for mojo "
|
||||||
|
"string/array/map.")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--js_bindings_mode", choices=["new", "old"], default="old",
|
||||||
|
help="This option only affects the JavaScript bindings. The value could "
|
||||||
|
"be \"new\" to generate new-style lite JS bindings in addition to the "
|
||||||
|
"old, or \"old\" to only generate old bindings.")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--js_generate_struct_deserializers", action="store_true",
|
||||||
|
help="Generate javascript deserialize methods for structs in "
|
||||||
|
"mojom-lite.js file")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--export_attribute", default="",
|
||||||
|
help="Optional attribute to specify on class declaration to export it "
|
||||||
|
"for the component build.")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--export_header", default="",
|
||||||
|
help="Optional header to include in the generated headers to support the "
|
||||||
|
"component build.")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--generate_non_variant_code", action="store_true",
|
||||||
|
help="Generate code that is shared by different variants.")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--scrambled_message_id_salt_path",
|
||||||
|
dest="scrambled_message_id_salt_paths",
|
||||||
|
help="If non-empty, the path to a file whose contents should be used as"
|
||||||
|
"a salt for generating scrambled message IDs. If this switch is specified"
|
||||||
|
"more than once, the contents of all salt files are concatenated to form"
|
||||||
|
"the salt value.", default=[], action="append")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--support_lazy_serialization",
|
||||||
|
help="If set, generated bindings will serialize lazily when possible.",
|
||||||
|
action="store_true")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--extra_cpp_template_paths",
|
||||||
|
dest="extra_cpp_template_paths",
|
||||||
|
action="append",
|
||||||
|
metavar="path_to_template",
|
||||||
|
default=[],
|
||||||
|
help="Provide a path to a new template (.tmpl) that is used to generate "
|
||||||
|
"additional C++ source/header files ")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--generate_extra_cpp_only",
|
||||||
|
help="If set and extra_cpp_template_paths provided, will only generate"
|
||||||
|
"extra_cpp_template related C++ bindings",
|
||||||
|
action="store_true")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--disallow_native_types",
|
||||||
|
help="Disallows the [Native] attribute to be specified on structs or "
|
||||||
|
"enums within the mojom file.", action="store_true")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--disallow_interfaces",
|
||||||
|
help="Disallows interface definitions within the mojom file. It is an "
|
||||||
|
"error to specify this flag when processing a mojom file which defines "
|
||||||
|
"any interface.", action="store_true")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--generate_message_ids",
|
||||||
|
help="Generates only the message IDs header for C++ bindings. Note that "
|
||||||
|
"this flag only matters if --generate_non_variant_code is also "
|
||||||
|
"specified.", action="store_true")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--generate_fuzzing",
|
||||||
|
action="store_true",
|
||||||
|
help="Generates additional bindings for fuzzing in JS.")
|
||||||
|
generate_parser.add_argument(
|
||||||
|
"--enable_kythe_annotations",
|
||||||
|
action="store_true",
|
||||||
|
help="Adds annotations for kythe metadata generation.")
|
||||||
|
|
||||||
|
generate_parser.set_defaults(func=_Generate)
|
||||||
|
|
||||||
|
precompile_parser = subparsers.add_parser("precompile",
|
||||||
|
description="Precompile templates for the mojom bindings generator.")
|
||||||
|
precompile_parser.set_defaults(func=_Precompile)
|
||||||
|
|
||||||
|
args, remaining_args = parser.parse_known_args()
|
||||||
|
return args.func(args, remaining_args)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
with crbug_1001171.DumpStateOnLookupError():
|
||||||
|
sys.exit(main())
|
|
@ -0,0 +1,62 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mojom_bindings_generator import MakeImportStackMessage
|
||||||
|
from mojom_bindings_generator import ScrambleMethodOrdinals
|
||||||
|
|
||||||
|
|
||||||
|
class FakeIface(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.mojom_name = None
|
||||||
|
self.methods = None
|
||||||
|
|
||||||
|
|
||||||
|
class FakeMethod(object):
|
||||||
|
def __init__(self, explicit_ordinal=None):
|
||||||
|
self.explicit_ordinal = explicit_ordinal
|
||||||
|
self.ordinal = explicit_ordinal
|
||||||
|
self.ordinal_comment = None
|
||||||
|
|
||||||
|
|
||||||
|
class MojoBindingsGeneratorTest(unittest.TestCase):
|
||||||
|
"""Tests mojo_bindings_generator."""
|
||||||
|
|
||||||
|
def testMakeImportStackMessage(self):
|
||||||
|
"""Tests MakeImportStackMessage()."""
|
||||||
|
self.assertEqual(MakeImportStackMessage(["x"]), "")
|
||||||
|
self.assertEqual(MakeImportStackMessage(["x", "y"]),
|
||||||
|
"\n y was imported by x")
|
||||||
|
self.assertEqual(MakeImportStackMessage(["x", "y", "z"]),
|
||||||
|
"\n z was imported by y\n y was imported by x")
|
||||||
|
|
||||||
|
def testScrambleMethodOrdinals(self):
|
||||||
|
"""Tests ScrambleMethodOrdinals()."""
|
||||||
|
interface = FakeIface()
|
||||||
|
interface.mojom_name = 'RendererConfiguration'
|
||||||
|
interface.methods = [
|
||||||
|
FakeMethod(),
|
||||||
|
FakeMethod(),
|
||||||
|
FakeMethod(),
|
||||||
|
FakeMethod(explicit_ordinal=42)
|
||||||
|
]
|
||||||
|
ScrambleMethodOrdinals([interface], "foo".encode('utf-8'))
|
||||||
|
# These next three values are hard-coded. If the generation algorithm
|
||||||
|
# changes from being based on sha256(seed + interface.name + str(i)) then
|
||||||
|
# these numbers will obviously need to change too.
|
||||||
|
#
|
||||||
|
# Note that hashlib.sha256('fooRendererConfiguration1').digest()[:4] is
|
||||||
|
# '\xa5\xbc\xf9\xca' and that hex(1257880741) = '0x4af9bca5'. The
|
||||||
|
# difference in 0x4a vs 0xca is because we only take 31 bits.
|
||||||
|
self.assertEqual(interface.methods[0].ordinal, 1257880741)
|
||||||
|
self.assertEqual(interface.methods[1].ordinal, 631133653)
|
||||||
|
self.assertEqual(interface.methods[2].ordinal, 549336076)
|
||||||
|
|
||||||
|
# Explicit method ordinals should not be scrambled.
|
||||||
|
self.assertEqual(interface.methods[3].ordinal, 42)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
119
utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
Executable file
119
utils/ipc/mojo/public/tools/bindings/mojom_types_downgrader.py
Executable file
|
@ -0,0 +1,119 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Downgrades *.mojom files to the old mojo types for remotes and receivers."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import fnmatch
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
# List of patterns and replacements to match and use against the contents of a
|
||||||
|
# mojo file. Each replacement string will be used with Python string's format()
|
||||||
|
# function, so the '{}' substring is used to mark where the mojo type should go.
|
||||||
|
_MOJO_REPLACEMENTS = {
|
||||||
|
r'pending_remote': r'{}',
|
||||||
|
r'pending_receiver': r'{}&',
|
||||||
|
r'pending_associated_remote': r'associated {}',
|
||||||
|
r'pending_associated_receiver': r'associated {}&',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Pre-compiled regular expression that matches against any of the replacements.
|
||||||
|
_REGEXP_PATTERN = re.compile(
|
||||||
|
r'|'.join(
|
||||||
|
['{}\s*<\s*(.*?)\s*>'.format(k) for k in _MOJO_REPLACEMENTS.keys()]),
|
||||||
|
flags=re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
|
def ReplaceFunction(match_object):
|
||||||
|
"""Returns the right replacement for the string matched against the regexp."""
|
||||||
|
for index, (match, repl) in enumerate(_MOJO_REPLACEMENTS.items(), 1):
|
||||||
|
if match_object.group(0).startswith(match):
|
||||||
|
return repl.format(match_object.group(index))
|
||||||
|
|
||||||
|
|
||||||
|
def DowngradeFile(path, output_dir=None):
|
||||||
|
"""Downgrades the mojom file specified by |path| to the old mojo types.
|
||||||
|
|
||||||
|
Optionally pass |output_dir| to place the result under a separate output
|
||||||
|
directory, preserving the relative path to the file included in |path|.
|
||||||
|
"""
|
||||||
|
# Use a temporary file to dump the new contents after replacing the patterns.
|
||||||
|
with open(path) as src_mojo_file:
|
||||||
|
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tmp_mojo_file:
|
||||||
|
tmp_contents = _REGEXP_PATTERN.sub(ReplaceFunction, src_mojo_file.read())
|
||||||
|
tmp_mojo_file.write(tmp_contents)
|
||||||
|
|
||||||
|
# Files should be placed in the desired output directory
|
||||||
|
if output_dir:
|
||||||
|
output_filepath = os.path.join(output_dir, os.path.basename(path))
|
||||||
|
if not os.path.exists(output_dir):
|
||||||
|
os.makedirs(output_dir)
|
||||||
|
else:
|
||||||
|
output_filepath = path
|
||||||
|
|
||||||
|
# Write the new contents preserving the original file's attributes.
|
||||||
|
shutil.copystat(path, tmp_mojo_file.name)
|
||||||
|
shutil.move(tmp_mojo_file.name, output_filepath)
|
||||||
|
|
||||||
|
# Make sure to "touch" the new file so that access, modify and change times
|
||||||
|
# are always newer than the source file's, otherwise Modify time will be kept
|
||||||
|
# as per the call to shutil.copystat(), causing unnecessary generations of the
|
||||||
|
# output file in subsequent builds due to ninja considering it dirty.
|
||||||
|
os.utime(output_filepath, None)
|
||||||
|
|
||||||
|
|
||||||
|
def DowngradeDirectory(path, output_dir=None):
|
||||||
|
"""Downgrades mojom files inside directory |path| to the old mojo types.
|
||||||
|
|
||||||
|
Optionally pass |output_dir| to place the result under a separate output
|
||||||
|
directory, preserving the relative path to the file included in |path|.
|
||||||
|
"""
|
||||||
|
# We don't have recursive glob.glob() nor pathlib.Path.rglob() in Python 2.7
|
||||||
|
mojom_filepaths = []
|
||||||
|
for dir_path, _, filenames in os.walk(path):
|
||||||
|
for filename in fnmatch.filter(filenames, "*mojom"):
|
||||||
|
mojom_filepaths.append(os.path.join(dir_path, filename))
|
||||||
|
|
||||||
|
for path in mojom_filepaths:
|
||||||
|
absolute_dirpath = os.path.dirname(os.path.abspath(path))
|
||||||
|
if output_dir:
|
||||||
|
dest_dirpath = output_dir + absolute_dirpath
|
||||||
|
else:
|
||||||
|
dest_dirpath = absolute_dirpath
|
||||||
|
DowngradeFile(path, dest_dirpath)
|
||||||
|
|
||||||
|
|
||||||
|
def DowngradePath(src_path, output_dir=None):
|
||||||
|
"""Downgrades the mojom files pointed by |src_path| to the old mojo types.
|
||||||
|
|
||||||
|
Optionally pass |output_dir| to place the result under a separate output
|
||||||
|
directory, preserving the relative path to the file included in |path|.
|
||||||
|
"""
|
||||||
|
if os.path.isdir(src_path):
|
||||||
|
DowngradeDirectory(src_path, output_dir)
|
||||||
|
elif os.path.isfile(src_path):
|
||||||
|
DowngradeFile(src_path, output_dir)
|
||||||
|
else:
|
||||||
|
print(">>> {} not pointing to a valid file or directory".format(src_path))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Downgrade *.mojom files to use the old mojo types.")
|
||||||
|
parser.add_argument(
|
||||||
|
"srcpath", help="path to the file or directory to apply the conversion")
|
||||||
|
parser.add_argument(
|
||||||
|
"--outdir", help="the directory to place the converted file(s) under")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
DowngradePath(args.srcpath, args.outdir)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
57
utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
Executable file
57
utils/ipc/mojo/public/tools/bindings/validate_typemap_config.py
Executable file
|
@ -0,0 +1,57 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def CheckCppTypemapConfigs(target_name, config_filename, out_filename):
|
||||||
|
_SUPPORTED_CONFIG_KEYS = set([
|
||||||
|
'types', 'traits_headers', 'traits_private_headers', 'traits_sources',
|
||||||
|
'traits_deps', 'traits_public_deps'
|
||||||
|
])
|
||||||
|
_SUPPORTED_TYPE_KEYS = set([
|
||||||
|
'mojom', 'cpp', 'copyable_pass_by_value', 'force_serialize', 'hashable',
|
||||||
|
'move_only', 'nullable_is_same_type'
|
||||||
|
])
|
||||||
|
with open(config_filename, 'r') as f:
|
||||||
|
for config in json.load(f):
|
||||||
|
for key in config.keys():
|
||||||
|
if key not in _SUPPORTED_CONFIG_KEYS:
|
||||||
|
raise ValueError('Invalid typemap property "%s" when processing %s' %
|
||||||
|
(key, target_name))
|
||||||
|
|
||||||
|
types = config.get('types')
|
||||||
|
if not types:
|
||||||
|
raise ValueError('Typemap for %s must specify at least one type to map'
|
||||||
|
% target_name)
|
||||||
|
|
||||||
|
for entry in types:
|
||||||
|
for key in entry.keys():
|
||||||
|
if key not in _SUPPORTED_TYPE_KEYS:
|
||||||
|
raise IOError(
|
||||||
|
'Invalid type property "%s" in typemap for "%s" on target %s' %
|
||||||
|
(key, entry.get('mojom', '(unknown)'), target_name))
|
||||||
|
|
||||||
|
with open(out_filename, 'w') as f:
|
||||||
|
f.truncate(0)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
_, args = parser.parse_known_args()
|
||||||
|
if len(args) != 3:
|
||||||
|
print('Usage: validate_typemap_config.py target_name config_filename '
|
||||||
|
'stamp_filename')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
CheckCppTypemapConfigs(args[0], args[1], args[2])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
14
utils/ipc/mojo/public/tools/mojom/README.md
Normal file
14
utils/ipc/mojo/public/tools/mojom/README.md
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# The Mojom Parser
|
||||||
|
|
||||||
|
The Mojom format is an interface definition language (IDL) for describing
|
||||||
|
interprocess communication (IPC) messages and data types for use with the
|
||||||
|
low-level cross-platform
|
||||||
|
[Mojo IPC library](https://chromium.googlesource.com/chromium/src/+/master/mojo/public/c/system/README.md).
|
||||||
|
|
||||||
|
This directory consists of a `mojom` Python module, its tests, and supporting
|
||||||
|
command-line tools. The Python module implements the parser used by the
|
||||||
|
command-line tools and exposes an API to help external bindings generators emit
|
||||||
|
useful code from the parser's outputs.
|
||||||
|
|
||||||
|
TODO(https://crbug.com/1060464): Fill out this documentation once the library
|
||||||
|
and tools have stabilized.
|
170
utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
Executable file
170
utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility.py
Executable file
|
@ -0,0 +1,170 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Verifies backward-compatibility of mojom type changes.
|
||||||
|
|
||||||
|
Given a set of pre- and post-diff mojom file contents, and a root directory
|
||||||
|
for a project, this tool verifies that any changes to [Stable] mojom types are
|
||||||
|
backward-compatible with the previous version.
|
||||||
|
|
||||||
|
This can be used e.g. by a presubmit check to prevent developers from making
|
||||||
|
breaking changes to stable mojoms."""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import errno
|
||||||
|
import io
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import shutil
|
||||||
|
import six
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from mojom.generate import module
|
||||||
|
from mojom.generate import translate
|
||||||
|
from mojom.parse import parser
|
||||||
|
|
||||||
|
|
||||||
|
class ParseError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _ValidateDelta(root, delta):
|
||||||
|
"""Parses all modified mojoms (including all transitive mojom dependencies,
|
||||||
|
even if unmodified) to perform backward-compatibility checks on any types
|
||||||
|
marked with the [Stable] attribute.
|
||||||
|
|
||||||
|
Note that unlike the normal build-time parser in mojom_parser.py, this does
|
||||||
|
not produce or rely on cached module translations, but instead parses the full
|
||||||
|
transitive closure of a mojom's input dependencies all at once.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# First build a map of all files covered by the delta
|
||||||
|
affected_files = set()
|
||||||
|
old_files = {}
|
||||||
|
new_files = {}
|
||||||
|
for change in delta:
|
||||||
|
# TODO(crbug.com/953884): Use pathlib once we're migrated fully to Python 3.
|
||||||
|
filename = change['filename'].replace('\\', '/')
|
||||||
|
affected_files.add(filename)
|
||||||
|
if change['old']:
|
||||||
|
old_files[filename] = change['old']
|
||||||
|
if change['new']:
|
||||||
|
new_files[filename] = change['new']
|
||||||
|
|
||||||
|
# Parse and translate all mojoms relevant to the delta, including transitive
|
||||||
|
# imports that weren't modified.
|
||||||
|
unmodified_modules = {}
|
||||||
|
|
||||||
|
def parseMojom(mojom, file_overrides, override_modules):
|
||||||
|
if mojom in unmodified_modules or mojom in override_modules:
|
||||||
|
return
|
||||||
|
|
||||||
|
contents = file_overrides.get(mojom)
|
||||||
|
if contents:
|
||||||
|
modules = override_modules
|
||||||
|
else:
|
||||||
|
modules = unmodified_modules
|
||||||
|
with io.open(os.path.join(root, mojom), encoding='utf-8') as f:
|
||||||
|
contents = f.read()
|
||||||
|
|
||||||
|
try:
|
||||||
|
ast = parser.Parse(contents, mojom)
|
||||||
|
except Exception as e:
|
||||||
|
six.reraise(
|
||||||
|
ParseError,
|
||||||
|
'encountered exception {0} while parsing {1}'.format(e, mojom),
|
||||||
|
sys.exc_info()[2])
|
||||||
|
for imp in ast.import_list:
|
||||||
|
parseMojom(imp.import_filename, file_overrides, override_modules)
|
||||||
|
|
||||||
|
# Now that the transitive set of dependencies has been imported and parsed
|
||||||
|
# above, translate each mojom AST into a Module so that all types are fully
|
||||||
|
# defined and can be inspected.
|
||||||
|
all_modules = {}
|
||||||
|
all_modules.update(unmodified_modules)
|
||||||
|
all_modules.update(override_modules)
|
||||||
|
modules[mojom] = translate.OrderedModule(ast, mojom, all_modules)
|
||||||
|
|
||||||
|
old_modules = {}
|
||||||
|
for mojom in old_files.keys():
|
||||||
|
parseMojom(mojom, old_files, old_modules)
|
||||||
|
new_modules = {}
|
||||||
|
for mojom in new_files.keys():
|
||||||
|
parseMojom(mojom, new_files, new_modules)
|
||||||
|
|
||||||
|
# At this point we have a complete set of translated Modules from both the
|
||||||
|
# pre- and post-diff mojom contents. Now we can analyze backward-compatibility
|
||||||
|
# of the deltas.
|
||||||
|
#
|
||||||
|
# Note that for backward-compatibility checks we only care about types which
|
||||||
|
# were marked [Stable] before the diff. Types newly marked as [Stable] are not
|
||||||
|
# checked.
|
||||||
|
def collectTypes(modules):
|
||||||
|
types = {}
|
||||||
|
for m in modules.values():
|
||||||
|
for kinds in (m.enums, m.structs, m.unions, m.interfaces):
|
||||||
|
for kind in kinds:
|
||||||
|
types[kind.qualified_name] = kind
|
||||||
|
return types
|
||||||
|
|
||||||
|
old_types = collectTypes(old_modules)
|
||||||
|
new_types = collectTypes(new_modules)
|
||||||
|
|
||||||
|
# Collect any renamed types so they can be compared accordingly.
|
||||||
|
renamed_types = {}
|
||||||
|
for name, kind in new_types.items():
|
||||||
|
old_name = kind.attributes and kind.attributes.get('RenamedFrom')
|
||||||
|
if old_name:
|
||||||
|
renamed_types[old_name] = name
|
||||||
|
|
||||||
|
for qualified_name, kind in old_types.items():
|
||||||
|
if not kind.stable:
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_name = renamed_types.get(qualified_name, qualified_name)
|
||||||
|
if new_name not in new_types:
|
||||||
|
raise Exception(
|
||||||
|
'Stable type %s appears to be deleted by this change. If it was '
|
||||||
|
'renamed, please add a [RenamedFrom] attribute to the new type. This '
|
||||||
|
'can be deleted by a subsequent change.' % qualified_name)
|
||||||
|
|
||||||
|
if not new_types[new_name].IsBackwardCompatible(kind):
|
||||||
|
raise Exception('Stable type %s appears to have changed in a way which '
|
||||||
|
'breaks backward-compatibility. Please fix!\n\nIf you '
|
||||||
|
'believe this assessment to be incorrect, please file a '
|
||||||
|
'Chromium bug against the "Internals>Mojo>Bindings" '
|
||||||
|
'component.' % qualified_name)
|
||||||
|
|
||||||
|
|
||||||
|
def Run(command_line, delta=None):
|
||||||
|
"""Runs the tool with the given command_line. Normally this will read the
|
||||||
|
change description from stdin as a JSON-encoded list, but tests may pass a
|
||||||
|
delta directly for convenience."""
|
||||||
|
arg_parser = argparse.ArgumentParser(
|
||||||
|
description='Verifies backward-compatibility of mojom type changes.',
|
||||||
|
epilog="""
|
||||||
|
This tool reads a change description from stdin and verifies that all modified
|
||||||
|
[Stable] mojom types will retain backward-compatibility. The change description
|
||||||
|
must be a JSON-encoded list of objects, each with a "filename" key (path to a
|
||||||
|
changed mojom file, relative to ROOT); an "old" key whose value is a string of
|
||||||
|
the full file contents before the change, or null if the file is being added;
|
||||||
|
and a "new" key whose value is a string of the full file contents after the
|
||||||
|
change, or null if the file is being deleted.""")
|
||||||
|
arg_parser.add_argument(
|
||||||
|
'--src-root',
|
||||||
|
required=True,
|
||||||
|
action='store',
|
||||||
|
metavar='ROOT',
|
||||||
|
help='The root of the source tree in which the checked mojoms live.')
|
||||||
|
|
||||||
|
args, _ = arg_parser.parse_known_args(command_line)
|
||||||
|
if not delta:
|
||||||
|
delta = json.load(sys.stdin)
|
||||||
|
_ValidateDelta(args.src_root, delta)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
Run(sys.argv[1:])
|
260
utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
Executable file
260
utils/ipc/mojo/public/tools/mojom/check_stable_mojom_compatibility_unittest.py
Executable file
|
@ -0,0 +1,260 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import check_stable_mojom_compatibility
|
||||||
|
|
||||||
|
from mojom.generate import module
|
||||||
|
|
||||||
|
|
||||||
|
class Change(object):
|
||||||
|
"""Helper to clearly define a mojom file delta to be analyzed."""
|
||||||
|
|
||||||
|
def __init__(self, filename, old=None, new=None):
|
||||||
|
"""If old is None, this is a file addition. If new is None, this is a file
|
||||||
|
deletion. Otherwise it's a file change."""
|
||||||
|
self.filename = filename
|
||||||
|
self.old = old
|
||||||
|
self.new = new
|
||||||
|
|
||||||
|
|
||||||
|
class UnchangedFile(Change):
|
||||||
|
def __init__(self, filename, contents):
|
||||||
|
super(UnchangedFile, self).__init__(filename, old=contents, new=contents)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckStableMojomCompatibilityTest(unittest.TestCase):
|
||||||
|
"""Tests covering the behavior of the compatibility checking tool. Note that
|
||||||
|
details of different compatibility checks and relevant failure modes are NOT
|
||||||
|
covered by these tests. Those are instead covered by unittests in
|
||||||
|
version_compatibility_unittest.py. Additionally, the tests which ensure a
|
||||||
|
given set of [Stable] mojom definitions are indeed plausibly stable (i.e. they
|
||||||
|
have no unstable dependencies) are covered by stable_attribute_unittest.py.
|
||||||
|
|
||||||
|
These tests cover higher-level concerns of the compatibility checking tool,
|
||||||
|
like file or symbol, renames, changes spread over multiple files, etc."""
|
||||||
|
|
||||||
|
def verifyBackwardCompatibility(self, changes):
|
||||||
|
"""Helper for implementing assertBackwardCompatible and
|
||||||
|
assertNotBackwardCompatible"""
|
||||||
|
|
||||||
|
temp_dir = tempfile.mkdtemp()
|
||||||
|
for change in changes:
|
||||||
|
if change.old:
|
||||||
|
# Populate the old file on disk in our temporary fake source root
|
||||||
|
file_path = os.path.join(temp_dir, change.filename)
|
||||||
|
dir_path = os.path.dirname(file_path)
|
||||||
|
if not os.path.exists(dir_path):
|
||||||
|
os.makedirs(dir_path)
|
||||||
|
with open(file_path, 'w') as f:
|
||||||
|
f.write(change.old)
|
||||||
|
|
||||||
|
delta = []
|
||||||
|
for change in changes:
|
||||||
|
if change.old != change.new:
|
||||||
|
delta.append({
|
||||||
|
'filename': change.filename,
|
||||||
|
'old': change.old,
|
||||||
|
'new': change.new
|
||||||
|
})
|
||||||
|
|
||||||
|
try:
|
||||||
|
check_stable_mojom_compatibility.Run(['--src-root', temp_dir],
|
||||||
|
delta=delta)
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(temp_dir)
|
||||||
|
|
||||||
|
def assertBackwardCompatible(self, changes):
|
||||||
|
self.verifyBackwardCompatibility(changes)
|
||||||
|
|
||||||
|
def assertNotBackwardCompatible(self, changes):
|
||||||
|
try:
|
||||||
|
self.verifyBackwardCompatibility(changes)
|
||||||
|
except Exception:
|
||||||
|
return
|
||||||
|
|
||||||
|
raise Exception('Change unexpectedly passed a backward-compatibility check')
|
||||||
|
|
||||||
|
def testBasicCompatibility(self):
|
||||||
|
"""Minimal smoke test to verify acceptance of a simple valid change."""
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old='[Stable] struct S {};',
|
||||||
|
new='[Stable] struct S { [MinVersion=1] int32 x; };')
|
||||||
|
])
|
||||||
|
|
||||||
|
def testBasicIncompatibility(self):
|
||||||
|
"""Minimal smoke test to verify rejection of a simple invalid change."""
|
||||||
|
self.assertNotBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old='[Stable] struct S {};',
|
||||||
|
new='[Stable] struct S { int32 x; };')
|
||||||
|
])
|
||||||
|
|
||||||
|
def testIgnoreIfNotStable(self):
|
||||||
|
"""We don't care about types not marked [Stable]"""
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old='struct S {};',
|
||||||
|
new='struct S { int32 x; };')
|
||||||
|
])
|
||||||
|
|
||||||
|
def testRename(self):
|
||||||
|
"""We can do checks for renamed types."""
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old='[Stable] struct S {};',
|
||||||
|
new='[Stable, RenamedFrom="S"] struct T {};')
|
||||||
|
])
|
||||||
|
self.assertNotBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old='[Stable] struct S {};',
|
||||||
|
new='[Stable, RenamedFrom="S"] struct T { int32 x; };')
|
||||||
|
])
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old='[Stable] struct S {};',
|
||||||
|
new="""\
|
||||||
|
[Stable, RenamedFrom="S"]
|
||||||
|
struct T { [MinVersion=1] int32 x; };
|
||||||
|
""")
|
||||||
|
])
|
||||||
|
|
||||||
|
def testNewlyStable(self):
|
||||||
|
"""We don't care about types newly marked as [Stable]."""
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old='struct S {};',
|
||||||
|
new='[Stable] struct S { int32 x; };')
|
||||||
|
])
|
||||||
|
|
||||||
|
def testFileRename(self):
|
||||||
|
"""Make sure we can still do compatibility checks after a file rename."""
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
|
||||||
|
Change('bar/bar.mojom',
|
||||||
|
old=None,
|
||||||
|
new='[Stable] struct S { [MinVersion=1] int32 x; };')
|
||||||
|
])
|
||||||
|
self.assertNotBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom', old='[Stable] struct S {};', new=None),
|
||||||
|
Change('bar/bar.mojom', old=None, new='[Stable] struct S { int32 x; };')
|
||||||
|
])
|
||||||
|
|
||||||
|
def testWithImport(self):
|
||||||
|
"""Ensure that cross-module dependencies do not break the compatibility
|
||||||
|
checking tool."""
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old="""\
|
||||||
|
module foo;
|
||||||
|
[Stable] struct S {};
|
||||||
|
""",
|
||||||
|
new="""\
|
||||||
|
module foo;
|
||||||
|
[Stable] struct S { [MinVersion=2] int32 x; };
|
||||||
|
"""),
|
||||||
|
Change('bar/bar.mojom',
|
||||||
|
old="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable] struct T { foo.S s; };
|
||||||
|
""",
|
||||||
|
new="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable] struct T { foo.S s; [MinVersion=1] int32 y; };
|
||||||
|
""")
|
||||||
|
])
|
||||||
|
|
||||||
|
def testWithMovedDefinition(self):
|
||||||
|
"""If a definition moves from one file to another, we should still be able
|
||||||
|
to check compatibility accurately."""
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old="""\
|
||||||
|
module foo;
|
||||||
|
[Stable] struct S {};
|
||||||
|
""",
|
||||||
|
new="""\
|
||||||
|
module foo;
|
||||||
|
"""),
|
||||||
|
Change('bar/bar.mojom',
|
||||||
|
old="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable] struct T { foo.S s; };
|
||||||
|
""",
|
||||||
|
new="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable, RenamedFrom="foo.S"] struct S {
|
||||||
|
[MinVersion=2] int32 x;
|
||||||
|
};
|
||||||
|
[Stable] struct T { S s; [MinVersion=1] int32 y; };
|
||||||
|
""")
|
||||||
|
])
|
||||||
|
|
||||||
|
self.assertNotBackwardCompatible([
|
||||||
|
Change('foo/foo.mojom',
|
||||||
|
old="""\
|
||||||
|
module foo;
|
||||||
|
[Stable] struct S {};
|
||||||
|
""",
|
||||||
|
new="""\
|
||||||
|
module foo;
|
||||||
|
"""),
|
||||||
|
Change('bar/bar.mojom',
|
||||||
|
old="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable] struct T { foo.S s; };
|
||||||
|
""",
|
||||||
|
new="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable, RenamedFrom="foo.S"] struct S { int32 x; };
|
||||||
|
[Stable] struct T { S s; [MinVersion=1] int32 y; };
|
||||||
|
""")
|
||||||
|
])
|
||||||
|
|
||||||
|
def testWithUnmodifiedImport(self):
|
||||||
|
"""Unchanged files in the filesystem are still parsed by the compatibility
|
||||||
|
checking tool if they're imported by a changed file."""
|
||||||
|
self.assertBackwardCompatible([
|
||||||
|
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||||
|
Change('bar/bar.mojom',
|
||||||
|
old="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable] struct T { foo.S s; };
|
||||||
|
""",
|
||||||
|
new="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable] struct T { foo.S s; [MinVersion=1] int32 x; };
|
||||||
|
""")
|
||||||
|
])
|
||||||
|
|
||||||
|
self.assertNotBackwardCompatible([
|
||||||
|
UnchangedFile('foo/foo.mojom', 'module foo; [Stable] struct S {};'),
|
||||||
|
Change('bar/bar.mojom',
|
||||||
|
old="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable] struct T { foo.S s; };
|
||||||
|
""",
|
||||||
|
new="""\
|
||||||
|
module bar;
|
||||||
|
import "foo/foo.mojom";
|
||||||
|
[Stable] struct T { foo.S s; int32 x; };
|
||||||
|
""")
|
||||||
|
])
|
90
utils/ipc/mojo/public/tools/mojom/const_unittest.py
Normal file
90
utils/ipc/mojo/public/tools/mojom/const_unittest.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from mojom_parser_test_case import MojomParserTestCase
|
||||||
|
from mojom.generate import module as mojom
|
||||||
|
|
||||||
|
|
||||||
|
class ConstTest(MojomParserTestCase):
|
||||||
|
"""Tests constant parsing behavior."""
|
||||||
|
|
||||||
|
def testLiteralInt(self):
|
||||||
|
a_mojom = 'a.mojom'
|
||||||
|
self.WriteFile(a_mojom, 'const int32 k = 42;')
|
||||||
|
self.ParseMojoms([a_mojom])
|
||||||
|
a = self.LoadModule(a_mojom)
|
||||||
|
self.assertEqual(1, len(a.constants))
|
||||||
|
self.assertEqual('k', a.constants[0].mojom_name)
|
||||||
|
self.assertEqual('42', a.constants[0].value)
|
||||||
|
|
||||||
|
def testLiteralFloat(self):
|
||||||
|
a_mojom = 'a.mojom'
|
||||||
|
self.WriteFile(a_mojom, 'const float k = 42.5;')
|
||||||
|
self.ParseMojoms([a_mojom])
|
||||||
|
a = self.LoadModule(a_mojom)
|
||||||
|
self.assertEqual(1, len(a.constants))
|
||||||
|
self.assertEqual('k', a.constants[0].mojom_name)
|
||||||
|
self.assertEqual('42.5', a.constants[0].value)
|
||||||
|
|
||||||
|
def testLiteralString(self):
|
||||||
|
a_mojom = 'a.mojom'
|
||||||
|
self.WriteFile(a_mojom, 'const string k = "woot";')
|
||||||
|
self.ParseMojoms([a_mojom])
|
||||||
|
a = self.LoadModule(a_mojom)
|
||||||
|
self.assertEqual(1, len(a.constants))
|
||||||
|
self.assertEqual('k', a.constants[0].mojom_name)
|
||||||
|
self.assertEqual('"woot"', a.constants[0].value)
|
||||||
|
|
||||||
|
def testEnumConstant(self):
|
||||||
|
a_mojom = 'a.mojom'
|
||||||
|
self.WriteFile(a_mojom, 'module a; enum E { kA = 41, kB };')
|
||||||
|
b_mojom = 'b.mojom'
|
||||||
|
self.WriteFile(
|
||||||
|
b_mojom, """\
|
||||||
|
import "a.mojom";
|
||||||
|
const a.E kE1 = a.E.kB;
|
||||||
|
|
||||||
|
// We also allow value names to be unqualified, implying scope from the
|
||||||
|
// constant's type.
|
||||||
|
const a.E kE2 = kB;
|
||||||
|
""")
|
||||||
|
self.ParseMojoms([a_mojom, b_mojom])
|
||||||
|
a = self.LoadModule(a_mojom)
|
||||||
|
b = self.LoadModule(b_mojom)
|
||||||
|
self.assertEqual(1, len(a.enums))
|
||||||
|
self.assertEqual('E', a.enums[0].mojom_name)
|
||||||
|
self.assertEqual(2, len(b.constants))
|
||||||
|
self.assertEqual('kE1', b.constants[0].mojom_name)
|
||||||
|
self.assertEqual(a.enums[0], b.constants[0].kind)
|
||||||
|
self.assertEqual(a.enums[0].fields[1], b.constants[0].value.field)
|
||||||
|
self.assertEqual(42, b.constants[0].value.field.numeric_value)
|
||||||
|
self.assertEqual('kE2', b.constants[1].mojom_name)
|
||||||
|
self.assertEqual(a.enums[0].fields[1], b.constants[1].value.field)
|
||||||
|
self.assertEqual(42, b.constants[1].value.field.numeric_value)
|
||||||
|
|
||||||
|
def testConstantReference(self):
|
||||||
|
a_mojom = 'a.mojom'
|
||||||
|
self.WriteFile(a_mojom, 'const int32 kA = 42; const int32 kB = kA;')
|
||||||
|
self.ParseMojoms([a_mojom])
|
||||||
|
a = self.LoadModule(a_mojom)
|
||||||
|
self.assertEqual(2, len(a.constants))
|
||||||
|
self.assertEqual('kA', a.constants[0].mojom_name)
|
||||||
|
self.assertEqual('42', a.constants[0].value)
|
||||||
|
self.assertEqual('kB', a.constants[1].mojom_name)
|
||||||
|
self.assertEqual('42', a.constants[1].value)
|
||||||
|
|
||||||
|
def testImportedConstantReference(self):
|
||||||
|
a_mojom = 'a.mojom'
|
||||||
|
self.WriteFile(a_mojom, 'const int32 kA = 42;')
|
||||||
|
b_mojom = 'b.mojom'
|
||||||
|
self.WriteFile(b_mojom, 'import "a.mojom"; const int32 kB = kA;')
|
||||||
|
self.ParseMojoms([a_mojom, b_mojom])
|
||||||
|
a = self.LoadModule(a_mojom)
|
||||||
|
b = self.LoadModule(b_mojom)
|
||||||
|
self.assertEqual(1, len(a.constants))
|
||||||
|
self.assertEqual(1, len(b.constants))
|
||||||
|
self.assertEqual('kA', a.constants[0].mojom_name)
|
||||||
|
self.assertEqual('42', a.constants[0].value)
|
||||||
|
self.assertEqual('kB', b.constants[0].mojom_name)
|
||||||
|
self.assertEqual('42', b.constants[0].value)
|
92
utils/ipc/mojo/public/tools/mojom/enum_unittest.py
Normal file
92
utils/ipc/mojo/public/tools/mojom/enum_unittest.py
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from mojom_parser_test_case import MojomParserTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class EnumTest(MojomParserTestCase):
|
||||||
|
"""Tests enum parsing behavior."""
|
||||||
|
|
||||||
|
def testExplicitValues(self):
|
||||||
|
"""Verifies basic parsing of assigned integral values."""
|
||||||
|
types = self.ExtractTypes('enum E { kFoo=0, kBar=2, kBaz };')
|
||||||
|
self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
|
||||||
|
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||||
|
self.assertEqual('kBar', types['E'].fields[1].mojom_name)
|
||||||
|
self.assertEqual(2, types['E'].fields[1].numeric_value)
|
||||||
|
self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
|
||||||
|
self.assertEqual(3, types['E'].fields[2].numeric_value)
|
||||||
|
|
||||||
|
def testImplicitValues(self):
|
||||||
|
"""Verifies basic automatic assignment of integral values at parse time."""
|
||||||
|
types = self.ExtractTypes('enum E { kFoo, kBar, kBaz };')
|
||||||
|
self.assertEqual('kFoo', types['E'].fields[0].mojom_name)
|
||||||
|
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||||
|
self.assertEqual('kBar', types['E'].fields[1].mojom_name)
|
||||||
|
self.assertEqual(1, types['E'].fields[1].numeric_value)
|
||||||
|
self.assertEqual('kBaz', types['E'].fields[2].mojom_name)
|
||||||
|
self.assertEqual(2, types['E'].fields[2].numeric_value)
|
||||||
|
|
||||||
|
def testSameEnumReference(self):
|
||||||
|
"""Verifies that an enum value can be assigned from the value of another
|
||||||
|
field within the same enum."""
|
||||||
|
types = self.ExtractTypes('enum E { kA, kB, kFirst=kA };')
|
||||||
|
self.assertEqual('kA', types['E'].fields[0].mojom_name)
|
||||||
|
self.assertEqual(0, types['E'].fields[0].numeric_value)
|
||||||
|
self.assertEqual('kB', types['E'].fields[1].mojom_name)
|
||||||
|
self.assertEqual(1, types['E'].fields[1].numeric_value)
|
||||||
|
self.assertEqual('kFirst', types['E'].fields[2].mojom_name)
|
||||||
|
self.assertEqual(0, types['E'].fields[2].numeric_value)
|
||||||
|
|
||||||
|
def testSameModuleOtherEnumReference(self):
|
||||||
|
"""Verifies that an enum value can be assigned from the value of a field
|
||||||
|
in another enum within the same module."""
|
||||||
|
types = self.ExtractTypes('enum E { kA, kB }; enum F { kA = E.kB };')
|
||||||
|
self.assertEqual(1, types['F'].fields[0].numeric_value)
|
||||||
|
|
||||||
|
def testImportedEnumReference(self):
|
||||||
|
"""Verifies that an enum value can be assigned from the value of a field
|
||||||
|
in another enum within a different module."""
|
||||||
|
a_mojom = 'a.mojom'
|
||||||
|
self.WriteFile(a_mojom, 'module a; enum E { kFoo=42, kBar };')
|
||||||
|
b_mojom = 'b.mojom'
|
||||||
|
self.WriteFile(b_mojom,
|
||||||
|
'module b; import "a.mojom"; enum F { kFoo = a.E.kBar };')
|
||||||
|
self.ParseMojoms([a_mojom, b_mojom])
|
||||||
|
b = self.LoadModule(b_mojom)
|
||||||
|
|
||||||
|
self.assertEqual('F', b.enums[0].mojom_name)
|
||||||
|
self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
|
||||||
|
self.assertEqual(43, b.enums[0].fields[0].numeric_value)
|
||||||
|
|
||||||
|
def testConstantReference(self):
|
||||||
|
"""Verifies that an enum value can be assigned from the value of an
|
||||||
|
integral constant within the same module."""
|
||||||
|
types = self.ExtractTypes('const int32 kFoo = 42; enum E { kA = kFoo };')
|
||||||
|
self.assertEqual(42, types['E'].fields[0].numeric_value)
|
||||||
|
|
||||||
|
def testInvalidConstantReference(self):
|
||||||
|
"""Verifies that enum values cannot be assigned from the value of
|
||||||
|
non-integral constants."""
|
||||||
|
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||||
|
self.ExtractTypes('const float kFoo = 1.0; enum E { kA = kFoo };')
|
||||||
|
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||||
|
self.ExtractTypes('const double kFoo = 1.0; enum E { kA = kFoo };')
|
||||||
|
with self.assertRaisesRegexp(ValueError, 'not an integer'):
|
||||||
|
self.ExtractTypes('const string kFoo = "lol"; enum E { kA = kFoo };')
|
||||||
|
|
||||||
|
def testImportedConstantReference(self):
|
||||||
|
"""Verifies that an enum value can be assigned from the value of an integral
|
||||||
|
constant within an imported module."""
|
||||||
|
a_mojom = 'a.mojom'
|
||||||
|
self.WriteFile(a_mojom, 'module a; const int32 kFoo = 37;')
|
||||||
|
b_mojom = 'b.mojom'
|
||||||
|
self.WriteFile(b_mojom,
|
||||||
|
'module b; import "a.mojom"; enum F { kFoo = a.kFoo };')
|
||||||
|
self.ParseMojoms([a_mojom, b_mojom])
|
||||||
|
b = self.LoadModule(b_mojom)
|
||||||
|
|
||||||
|
self.assertEqual('F', b.enums[0].mojom_name)
|
||||||
|
self.assertEqual('kFoo', b.enums[0].fields[0].mojom_name)
|
||||||
|
self.assertEqual(37, b.enums[0].fields[0].numeric_value)
|
43
utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
Normal file
43
utils/ipc/mojo/public/tools/mojom/mojom/BUILD.gn
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
group("mojom") {
|
||||||
|
data = [
|
||||||
|
"__init__.py",
|
||||||
|
"error.py",
|
||||||
|
"fileutil.py",
|
||||||
|
"generate/__init__.py",
|
||||||
|
"generate/constant_resolver.py",
|
||||||
|
"generate/generator.py",
|
||||||
|
"generate/module.py",
|
||||||
|
"generate/pack.py",
|
||||||
|
"generate/template_expander.py",
|
||||||
|
"generate/translate.py",
|
||||||
|
"parse/__init__.py",
|
||||||
|
"parse/ast.py",
|
||||||
|
"parse/conditional_features.py",
|
||||||
|
"parse/lexer.py",
|
||||||
|
"parse/parser.py",
|
||||||
|
|
||||||
|
# Third-party module dependencies
|
||||||
|
"//third_party/jinja2/",
|
||||||
|
"//third_party/ply/",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
group("tests") {
|
||||||
|
data = [
|
||||||
|
"fileutil_unittest.py",
|
||||||
|
"generate/generator_unittest.py",
|
||||||
|
"generate/module_unittest.py",
|
||||||
|
"generate/pack_unittest.py",
|
||||||
|
"generate/translate_unittest.py",
|
||||||
|
"parse/ast_unittest.py",
|
||||||
|
"parse/conditional_features_unittest.py",
|
||||||
|
"parse/lexer_unittest.py",
|
||||||
|
"parse/parser_unittest.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
public_deps = [ ":mojom" ]
|
||||||
|
}
|
0
utils/ipc/mojo/public/tools/mojom/mojom/__init__.py
Normal file
0
utils/ipc/mojo/public/tools/mojom/mojom/__init__.py
Normal file
28
utils/ipc/mojo/public/tools/mojom/mojom/error.py
Normal file
28
utils/ipc/mojo/public/tools/mojom/mojom/error.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
|
||||||
|
class Error(Exception):
|
||||||
|
"""Base class for Mojo IDL bindings parser/generator errors."""
|
||||||
|
|
||||||
|
def __init__(self, filename, message, lineno=None, addenda=None, **kwargs):
|
||||||
|
"""|filename| is the (primary) file which caused the error, |message| is the
|
||||||
|
error message, |lineno| is the 1-based line number (or |None| if not
|
||||||
|
applicable/available), and |addenda| is a list of additional lines to append
|
||||||
|
to the final error message."""
|
||||||
|
Exception.__init__(self, **kwargs)
|
||||||
|
self.filename = filename
|
||||||
|
self.message = message
|
||||||
|
self.lineno = lineno
|
||||||
|
self.addenda = addenda
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.lineno:
|
||||||
|
s = "%s:%d: Error: %s" % (self.filename, self.lineno, self.message)
|
||||||
|
else:
|
||||||
|
s = "%s: Error: %s" % (self.filename, self.message)
|
||||||
|
return "\n".join([s] + self.addenda) if self.addenda else s
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return str(self)
|
45
utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
Normal file
45
utils/ipc/mojo/public/tools/mojom/mojom/fileutil.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import imp
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def _GetDirAbove(dirname):
|
||||||
|
"""Returns the directory "above" this file containing |dirname| (which must
|
||||||
|
also be "above" this file)."""
|
||||||
|
path = os.path.abspath(__file__)
|
||||||
|
while True:
|
||||||
|
path, tail = os.path.split(path)
|
||||||
|
if not tail:
|
||||||
|
return None
|
||||||
|
if tail == dirname:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def EnsureDirectoryExists(path, always_try_to_create=False):
|
||||||
|
"""A wrapper for os.makedirs that does not error if the directory already
|
||||||
|
exists. A different process could be racing to create this directory."""
|
||||||
|
|
||||||
|
if not os.path.exists(path) or always_try_to_create:
|
||||||
|
try:
|
||||||
|
os.makedirs(path)
|
||||||
|
except OSError as e:
|
||||||
|
# There may have been a race to create this directory.
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def AddLocalRepoThirdPartyDirToModulePath():
|
||||||
|
"""Helper function to find the top-level directory of this script's repository
|
||||||
|
assuming the script falls somewhere within a 'mojo' directory, and insert the
|
||||||
|
top-level 'third_party' directory early in the module search path. Used to
|
||||||
|
ensure that third-party dependencies provided within the repository itself
|
||||||
|
(e.g. Chromium sources include snapshots of jinja2 and ply) are preferred over
|
||||||
|
locally installed system library packages."""
|
||||||
|
toplevel_dir = _GetDirAbove('mojo')
|
||||||
|
if toplevel_dir:
|
||||||
|
sys.path.insert(1, os.path.join(toplevel_dir, 'third_party'))
|
40
utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
Normal file
40
utils/ipc/mojo/public/tools/mojom/mojom/fileutil_unittest.py
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os.path
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mojom import fileutil
|
||||||
|
|
||||||
|
|
||||||
|
class FileUtilTest(unittest.TestCase):
|
||||||
|
def testEnsureDirectoryExists(self):
|
||||||
|
"""Test that EnsureDirectoryExists fuctions correctly."""
|
||||||
|
|
||||||
|
temp_dir = tempfile.mkdtemp()
|
||||||
|
try:
|
||||||
|
self.assertTrue(os.path.exists(temp_dir))
|
||||||
|
|
||||||
|
# Directory does not exist, yet.
|
||||||
|
full = os.path.join(temp_dir, "foo", "bar")
|
||||||
|
self.assertFalse(os.path.exists(full))
|
||||||
|
|
||||||
|
# Create the directory.
|
||||||
|
fileutil.EnsureDirectoryExists(full)
|
||||||
|
self.assertTrue(os.path.exists(full))
|
||||||
|
|
||||||
|
# Trying to create it again does not cause an error.
|
||||||
|
fileutil.EnsureDirectoryExists(full)
|
||||||
|
self.assertTrue(os.path.exists(full))
|
||||||
|
|
||||||
|
# Bypass check for directory existence to tickle error handling that
|
||||||
|
# occurs in response to a race.
|
||||||
|
fileutil.EnsureDirectoryExists(full, always_try_to_create=True)
|
||||||
|
self.assertTrue(os.path.exists(full))
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(temp_dir)
|
|
@ -0,0 +1,93 @@
|
||||||
|
# Copyright 2015 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Resolves the values used for constants and enums."""
|
||||||
|
|
||||||
|
from itertools import ifilter
|
||||||
|
|
||||||
|
from mojom.generate import module as mojom
|
||||||
|
|
||||||
|
|
||||||
|
def ResolveConstants(module, expression_to_text):
|
||||||
|
in_progress = set()
|
||||||
|
computed = set()
|
||||||
|
|
||||||
|
def GetResolvedValue(named_value):
|
||||||
|
assert isinstance(named_value, (mojom.EnumValue, mojom.ConstantValue))
|
||||||
|
if isinstance(named_value, mojom.EnumValue):
|
||||||
|
field = next(
|
||||||
|
ifilter(lambda field: field.name == named_value.name,
|
||||||
|
named_value.enum.fields), None)
|
||||||
|
if not field:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Unable to get computed value for field %s of enum %s' %
|
||||||
|
(named_value.name, named_value.enum.name))
|
||||||
|
if field not in computed:
|
||||||
|
ResolveEnum(named_value.enum)
|
||||||
|
return field.resolved_value
|
||||||
|
else:
|
||||||
|
ResolveConstant(named_value.constant)
|
||||||
|
named_value.resolved_value = named_value.constant.resolved_value
|
||||||
|
return named_value.resolved_value
|
||||||
|
|
||||||
|
def ResolveConstant(constant):
|
||||||
|
if constant in computed:
|
||||||
|
return
|
||||||
|
if constant in in_progress:
|
||||||
|
raise RuntimeError('Circular dependency for constant: %s' % constant.name)
|
||||||
|
in_progress.add(constant)
|
||||||
|
if isinstance(constant.value, (mojom.EnumValue, mojom.ConstantValue)):
|
||||||
|
resolved_value = GetResolvedValue(constant.value)
|
||||||
|
else:
|
||||||
|
resolved_value = expression_to_text(constant.value)
|
||||||
|
constant.resolved_value = resolved_value
|
||||||
|
in_progress.remove(constant)
|
||||||
|
computed.add(constant)
|
||||||
|
|
||||||
|
def ResolveEnum(enum):
|
||||||
|
def ResolveEnumField(enum, field, default_value):
|
||||||
|
if field in computed:
|
||||||
|
return
|
||||||
|
if field in in_progress:
|
||||||
|
raise RuntimeError('Circular dependency for enum: %s' % enum.name)
|
||||||
|
in_progress.add(field)
|
||||||
|
if field.value:
|
||||||
|
if isinstance(field.value, mojom.EnumValue):
|
||||||
|
resolved_value = GetResolvedValue(field.value)
|
||||||
|
elif isinstance(field.value, str):
|
||||||
|
resolved_value = int(field.value, 0)
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Unexpected value: %s' % field.value)
|
||||||
|
else:
|
||||||
|
resolved_value = default_value
|
||||||
|
field.resolved_value = resolved_value
|
||||||
|
in_progress.remove(field)
|
||||||
|
computed.add(field)
|
||||||
|
|
||||||
|
current_value = 0
|
||||||
|
for field in enum.fields:
|
||||||
|
ResolveEnumField(enum, field, current_value)
|
||||||
|
current_value = field.resolved_value + 1
|
||||||
|
|
||||||
|
for constant in module.constants:
|
||||||
|
ResolveConstant(constant)
|
||||||
|
|
||||||
|
for enum in module.enums:
|
||||||
|
ResolveEnum(enum)
|
||||||
|
|
||||||
|
for struct in module.structs:
|
||||||
|
for constant in struct.constants:
|
||||||
|
ResolveConstant(constant)
|
||||||
|
for enum in struct.enums:
|
||||||
|
ResolveEnum(enum)
|
||||||
|
for field in struct.fields:
|
||||||
|
if isinstance(field.default, (mojom.ConstantValue, mojom.EnumValue)):
|
||||||
|
field.default.resolved_value = GetResolvedValue(field.default)
|
||||||
|
|
||||||
|
for interface in module.interfaces:
|
||||||
|
for constant in interface.constants:
|
||||||
|
ResolveConstant(constant)
|
||||||
|
for enum in interface.enums:
|
||||||
|
ResolveEnum(enum)
|
||||||
|
|
||||||
|
return module
|
325
utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
Normal file
325
utils/ipc/mojo/public/tools/mojom/mojom/generate/generator.py
Normal file
|
@ -0,0 +1,325 @@
|
||||||
|
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Code shared by the various language-specific code generators."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from functools import partial
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
|
||||||
|
from mojom import fileutil
|
||||||
|
from mojom.generate import module as mojom
|
||||||
|
from mojom.generate import pack
|
||||||
|
|
||||||
|
|
||||||
|
def ExpectedArraySize(kind):
|
||||||
|
if mojom.IsArrayKind(kind):
|
||||||
|
return kind.length
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def SplitCamelCase(identifier):
|
||||||
|
"""Splits a camel-cased |identifier| and returns a list of lower-cased
|
||||||
|
strings.
|
||||||
|
"""
|
||||||
|
# Add underscores after uppercase letters when appropriate. An uppercase
|
||||||
|
# letter is considered the end of a word if it is followed by an upper and a
|
||||||
|
# lower. E.g. URLLoaderFactory -> URL_LoaderFactory
|
||||||
|
identifier = re.sub('([A-Z][0-9]*)(?=[A-Z][0-9]*[a-z])', r'\1_', identifier)
|
||||||
|
# Add underscores after lowercase letters when appropriate. A lowercase letter
|
||||||
|
# is considered the end of a word if it is followed by an upper.
|
||||||
|
# E.g. URLLoaderFactory -> URLLoader_Factory
|
||||||
|
identifier = re.sub('([a-z][0-9]*)(?=[A-Z])', r'\1_', identifier)
|
||||||
|
return [x.lower() for x in identifier.split('_')]
|
||||||
|
|
||||||
|
|
||||||
|
def ToCamel(identifier, lower_initial=False, digits_split=False, delimiter='_'):
|
||||||
|
"""Splits |identifier| using |delimiter|, makes the first character of each
|
||||||
|
word uppercased (but makes the first character of the first word lowercased
|
||||||
|
if |lower_initial| is set to True), and joins the words. Please note that for
|
||||||
|
each word, all the characters except the first one are untouched.
|
||||||
|
"""
|
||||||
|
result = ''
|
||||||
|
capitalize_next = True
|
||||||
|
for i in range(len(identifier)):
|
||||||
|
if identifier[i] == delimiter:
|
||||||
|
capitalize_next = True
|
||||||
|
elif digits_split and identifier[i].isdigit():
|
||||||
|
capitalize_next = True
|
||||||
|
result += identifier[i]
|
||||||
|
elif capitalize_next:
|
||||||
|
capitalize_next = False
|
||||||
|
result += identifier[i].upper()
|
||||||
|
else:
|
||||||
|
result += identifier[i]
|
||||||
|
|
||||||
|
if lower_initial and result:
|
||||||
|
result = result[0].lower() + result[1:]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _ToSnakeCase(identifier, upper=False):
|
||||||
|
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||||
|
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||||
|
"URL_LOADER_FACTORY" if upper, otherwise "url_loader_factory".
|
||||||
|
"""
|
||||||
|
words = SplitCamelCase(identifier)
|
||||||
|
if words[0] == 'k' and len(words) > 1:
|
||||||
|
words = words[1:]
|
||||||
|
|
||||||
|
# Variables cannot start with a digit
|
||||||
|
if (words[0][0].isdigit()):
|
||||||
|
words[0] = '_' + words[0]
|
||||||
|
|
||||||
|
|
||||||
|
if upper:
|
||||||
|
words = map(lambda x: x.upper(), words)
|
||||||
|
|
||||||
|
return '_'.join(words)
|
||||||
|
|
||||||
|
|
||||||
|
def ToUpperSnakeCase(identifier):
|
||||||
|
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||||
|
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||||
|
"URL_LOADER_FACTORY".
|
||||||
|
"""
|
||||||
|
return _ToSnakeCase(identifier, upper=True)
|
||||||
|
|
||||||
|
|
||||||
|
def ToLowerSnakeCase(identifier):
|
||||||
|
"""Splits camel-cased |identifier| into lower case words, removes the first
|
||||||
|
word if it's "k" and joins them using "_" e.g. for "URLLoaderFactory", returns
|
||||||
|
"url_loader_factory".
|
||||||
|
"""
|
||||||
|
return _ToSnakeCase(identifier, upper=False)
|
||||||
|
|
||||||
|
|
||||||
|
class Stylizer(object):
|
||||||
|
"""Stylizers specify naming rules to map mojom names to names in generated
|
||||||
|
code. For example, if you would like method_name in mojom to be mapped to
|
||||||
|
MethodName in the generated code, you need to define a subclass of Stylizer
|
||||||
|
and override StylizeMethod to do the conversion."""
|
||||||
|
|
||||||
|
def StylizeConstant(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeField(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeStruct(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeUnion(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeParameter(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeMethod(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeInterface(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeEnumField(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeEnum(self, mojom_name):
|
||||||
|
return mojom_name
|
||||||
|
|
||||||
|
def StylizeModule(self, mojom_namespace):
|
||||||
|
return mojom_namespace
|
||||||
|
|
||||||
|
|
||||||
|
def WriteFile(contents, full_path):
|
||||||
|
# If |contents| is same with the file content, we skip updating.
|
||||||
|
if os.path.isfile(full_path):
|
||||||
|
with open(full_path, 'rb') as destination_file:
|
||||||
|
if destination_file.read() == contents:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Make sure the containing directory exists.
|
||||||
|
full_dir = os.path.dirname(full_path)
|
||||||
|
fileutil.EnsureDirectoryExists(full_dir)
|
||||||
|
|
||||||
|
# Dump the data to disk.
|
||||||
|
with open(full_path, "wb") as f:
|
||||||
|
if not isinstance(contents, bytes):
|
||||||
|
f.write(contents.encode('utf-8'))
|
||||||
|
else:
|
||||||
|
f.write(contents)
|
||||||
|
|
||||||
|
|
||||||
|
def AddComputedData(module):
|
||||||
|
"""Adds computed data to the given module. The data is computed once and
|
||||||
|
used repeatedly in the generation process."""
|
||||||
|
|
||||||
|
def _AddStructComputedData(exported, struct):
|
||||||
|
struct.packed = pack.PackedStruct(struct)
|
||||||
|
struct.bytes = pack.GetByteLayout(struct.packed)
|
||||||
|
struct.versions = pack.GetVersionInfo(struct.packed)
|
||||||
|
struct.exported = exported
|
||||||
|
|
||||||
|
def _AddInterfaceComputedData(interface):
|
||||||
|
interface.version = 0
|
||||||
|
for method in interface.methods:
|
||||||
|
# this field is never scrambled
|
||||||
|
method.sequential_ordinal = method.ordinal
|
||||||
|
|
||||||
|
if method.min_version is not None:
|
||||||
|
interface.version = max(interface.version, method.min_version)
|
||||||
|
|
||||||
|
method.param_struct = _GetStructFromMethod(method)
|
||||||
|
if interface.stable:
|
||||||
|
method.param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
|
||||||
|
if method.explicit_ordinal is None:
|
||||||
|
raise Exception(
|
||||||
|
'Stable interfaces must declare explicit method ordinals. The '
|
||||||
|
'method %s on stable interface %s does not declare an explicit '
|
||||||
|
'ordinal.' % (method.mojom_name, interface.qualified_name))
|
||||||
|
interface.version = max(interface.version,
|
||||||
|
method.param_struct.versions[-1].version)
|
||||||
|
|
||||||
|
if method.response_parameters is not None:
|
||||||
|
method.response_param_struct = _GetResponseStructFromMethod(method)
|
||||||
|
if interface.stable:
|
||||||
|
method.response_param_struct.attributes[mojom.ATTRIBUTE_STABLE] = True
|
||||||
|
interface.version = max(
|
||||||
|
interface.version,
|
||||||
|
method.response_param_struct.versions[-1].version)
|
||||||
|
else:
|
||||||
|
method.response_param_struct = None
|
||||||
|
|
||||||
|
def _GetStructFromMethod(method):
|
||||||
|
"""Converts a method's parameters into the fields of a struct."""
|
||||||
|
params_class = "%s_%s_Params" % (method.interface.mojom_name,
|
||||||
|
method.mojom_name)
|
||||||
|
struct = mojom.Struct(params_class,
|
||||||
|
module=method.interface.module,
|
||||||
|
attributes={})
|
||||||
|
for param in method.parameters:
|
||||||
|
struct.AddField(
|
||||||
|
param.mojom_name,
|
||||||
|
param.kind,
|
||||||
|
param.ordinal,
|
||||||
|
attributes=param.attributes)
|
||||||
|
_AddStructComputedData(False, struct)
|
||||||
|
return struct
|
||||||
|
|
||||||
|
def _GetResponseStructFromMethod(method):
|
||||||
|
"""Converts a method's response_parameters into the fields of a struct."""
|
||||||
|
params_class = "%s_%s_ResponseParams" % (method.interface.mojom_name,
|
||||||
|
method.mojom_name)
|
||||||
|
struct = mojom.Struct(params_class,
|
||||||
|
module=method.interface.module,
|
||||||
|
attributes={})
|
||||||
|
for param in method.response_parameters:
|
||||||
|
struct.AddField(
|
||||||
|
param.mojom_name,
|
||||||
|
param.kind,
|
||||||
|
param.ordinal,
|
||||||
|
attributes=param.attributes)
|
||||||
|
_AddStructComputedData(False, struct)
|
||||||
|
return struct
|
||||||
|
|
||||||
|
for struct in module.structs:
|
||||||
|
_AddStructComputedData(True, struct)
|
||||||
|
for interface in module.interfaces:
|
||||||
|
_AddInterfaceComputedData(interface)
|
||||||
|
|
||||||
|
|
||||||
|
class Generator(object):
|
||||||
|
# Pass |output_dir| to emit files to disk. Omit |output_dir| to echo all
|
||||||
|
# files to stdout.
|
||||||
|
def __init__(self,
|
||||||
|
module,
|
||||||
|
output_dir=None,
|
||||||
|
typemap=None,
|
||||||
|
variant=None,
|
||||||
|
bytecode_path=None,
|
||||||
|
for_blink=False,
|
||||||
|
js_bindings_mode="new",
|
||||||
|
js_generate_struct_deserializers=False,
|
||||||
|
export_attribute=None,
|
||||||
|
export_header=None,
|
||||||
|
generate_non_variant_code=False,
|
||||||
|
support_lazy_serialization=False,
|
||||||
|
disallow_native_types=False,
|
||||||
|
disallow_interfaces=False,
|
||||||
|
generate_message_ids=False,
|
||||||
|
generate_fuzzing=False,
|
||||||
|
enable_kythe_annotations=False,
|
||||||
|
extra_cpp_template_paths=None,
|
||||||
|
generate_extra_cpp_only=False):
|
||||||
|
self.module = module
|
||||||
|
self.output_dir = output_dir
|
||||||
|
self.typemap = typemap or {}
|
||||||
|
self.variant = variant
|
||||||
|
self.bytecode_path = bytecode_path
|
||||||
|
self.for_blink = for_blink
|
||||||
|
self.js_bindings_mode = js_bindings_mode
|
||||||
|
self.js_generate_struct_deserializers = js_generate_struct_deserializers
|
||||||
|
self.export_attribute = export_attribute
|
||||||
|
self.export_header = export_header
|
||||||
|
self.generate_non_variant_code = generate_non_variant_code
|
||||||
|
self.support_lazy_serialization = support_lazy_serialization
|
||||||
|
self.disallow_native_types = disallow_native_types
|
||||||
|
self.disallow_interfaces = disallow_interfaces
|
||||||
|
self.generate_message_ids = generate_message_ids
|
||||||
|
self.generate_fuzzing = generate_fuzzing
|
||||||
|
self.enable_kythe_annotations = enable_kythe_annotations
|
||||||
|
self.extra_cpp_template_paths = extra_cpp_template_paths
|
||||||
|
self.generate_extra_cpp_only = generate_extra_cpp_only
|
||||||
|
|
||||||
|
def Write(self, contents, filename):
|
||||||
|
if self.output_dir is None:
|
||||||
|
print(contents)
|
||||||
|
return
|
||||||
|
full_path = os.path.join(self.output_dir, filename)
|
||||||
|
WriteFile(contents, full_path)
|
||||||
|
|
||||||
|
def OptimizeEmpty(self, contents):
|
||||||
|
# Look for .cc files that contain no actual code. There are many of these
|
||||||
|
# and they collectively take a while to compile.
|
||||||
|
lines = contents.splitlines()
|
||||||
|
|
||||||
|
for line in lines:
|
||||||
|
if line.startswith('#') or line.startswith('//'):
|
||||||
|
continue
|
||||||
|
if re.match(r'namespace .* {', line) or re.match(r'}.*//.*namespace',
|
||||||
|
line):
|
||||||
|
continue
|
||||||
|
if line.strip():
|
||||||
|
# There is some actual code - return the unmodified contents.
|
||||||
|
return contents
|
||||||
|
|
||||||
|
# If we reach here then we have a .cc file with no actual code. The
|
||||||
|
# includes are therefore unneeded and can be removed.
|
||||||
|
new_lines = [line for line in lines if not line.startswith('#include')]
|
||||||
|
if len(new_lines) < len(lines):
|
||||||
|
new_lines.append('')
|
||||||
|
new_lines.append('// Includes removed due to no code being generated.')
|
||||||
|
return '\n'.join(new_lines)
|
||||||
|
|
||||||
|
def WriteWithComment(self, contents, filename):
|
||||||
|
generator_name = "mojom_bindings_generator.py"
|
||||||
|
comment = r"// %s is auto generated by %s, do not edit" % (filename,
|
||||||
|
generator_name)
|
||||||
|
contents = comment + '\n' + '\n' + contents;
|
||||||
|
if filename.endswith('.cc'):
|
||||||
|
contents = self.OptimizeEmpty(contents)
|
||||||
|
self.Write(contents, filename)
|
||||||
|
|
||||||
|
def GenerateFiles(self, args):
|
||||||
|
raise NotImplementedError("Subclasses must override/implement this method")
|
||||||
|
|
||||||
|
def GetJinjaParameters(self):
|
||||||
|
"""Returns default constructor parameters for the jinja environment."""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def GetGlobals(self):
|
||||||
|
"""Returns global mappings for the template generation."""
|
||||||
|
return {}
|
|
@ -0,0 +1,74 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
def _GetDirAbove(dirname):
|
||||||
|
"""Returns the directory "above" this file containing |dirname| (which must
|
||||||
|
also be "above" this file)."""
|
||||||
|
path = os.path.abspath(__file__)
|
||||||
|
while True:
|
||||||
|
path, tail = os.path.split(path)
|
||||||
|
assert tail
|
||||||
|
if tail == dirname:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
imp.find_module("mojom")
|
||||||
|
except ImportError:
|
||||||
|
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
|
||||||
|
from mojom.generate import generator
|
||||||
|
|
||||||
|
|
||||||
|
class StringManipulationTest(unittest.TestCase):
|
||||||
|
"""generator contains some string utilities, this tests only those."""
|
||||||
|
|
||||||
|
def testSplitCamelCase(self):
|
||||||
|
self.assertEquals(["camel", "case"], generator.SplitCamelCase("CamelCase"))
|
||||||
|
self.assertEquals(["url", "loader", "factory"],
|
||||||
|
generator.SplitCamelCase('URLLoaderFactory'))
|
||||||
|
self.assertEquals(["get99", "entries"],
|
||||||
|
generator.SplitCamelCase('Get99Entries'))
|
||||||
|
self.assertEquals(["get99entries"],
|
||||||
|
generator.SplitCamelCase('Get99entries'))
|
||||||
|
|
||||||
|
def testToCamel(self):
|
||||||
|
self.assertEquals("CamelCase", generator.ToCamel("camel_case"))
|
||||||
|
self.assertEquals("CAMELCASE", generator.ToCamel("CAMEL_CASE"))
|
||||||
|
self.assertEquals("camelCase",
|
||||||
|
generator.ToCamel("camel_case", lower_initial=True))
|
||||||
|
self.assertEquals("CamelCase", generator.ToCamel(
|
||||||
|
"camel case", delimiter=' '))
|
||||||
|
self.assertEquals("CaMelCaSe", generator.ToCamel("caMel_caSe"))
|
||||||
|
self.assertEquals("L2Tp", generator.ToCamel("l2tp", digits_split=True))
|
||||||
|
self.assertEquals("l2tp", generator.ToCamel("l2tp", lower_initial=True))
|
||||||
|
|
||||||
|
def testToSnakeCase(self):
|
||||||
|
self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCase"))
|
||||||
|
self.assertEquals("snake_case", generator.ToLowerSnakeCase("snakeCase"))
|
||||||
|
self.assertEquals("snake_case", generator.ToLowerSnakeCase("SnakeCASE"))
|
||||||
|
self.assertEquals("snake_d3d11_case",
|
||||||
|
generator.ToLowerSnakeCase("SnakeD3D11Case"))
|
||||||
|
self.assertEquals("snake_d3d11_case",
|
||||||
|
generator.ToLowerSnakeCase("SnakeD3d11Case"))
|
||||||
|
self.assertEquals("snake_d3d11_case",
|
||||||
|
generator.ToLowerSnakeCase("snakeD3d11Case"))
|
||||||
|
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCase"))
|
||||||
|
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("snakeCase"))
|
||||||
|
self.assertEquals("SNAKE_CASE", generator.ToUpperSnakeCase("SnakeCASE"))
|
||||||
|
self.assertEquals("SNAKE_D3D11_CASE",
|
||||||
|
generator.ToUpperSnakeCase("SnakeD3D11Case"))
|
||||||
|
self.assertEquals("SNAKE_D3D11_CASE",
|
||||||
|
generator.ToUpperSnakeCase("SnakeD3d11Case"))
|
||||||
|
self.assertEquals("SNAKE_D3D11_CASE",
|
||||||
|
generator.ToUpperSnakeCase("snakeD3d11Case"))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
1635
utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
Normal file
1635
utils/ipc/mojo/public/tools/mojom/mojom/generate/module.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,31 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mojom.generate import module as mojom
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleTest(unittest.TestCase):
|
||||||
|
def testNonInterfaceAsInterfaceRequest(self):
|
||||||
|
"""Tests that a non-interface cannot be used for interface requests."""
|
||||||
|
module = mojom.Module('test_module', 'test_namespace')
|
||||||
|
struct = mojom.Struct('TestStruct', module=module)
|
||||||
|
with self.assertRaises(Exception) as e:
|
||||||
|
mojom.InterfaceRequest(struct)
|
||||||
|
self.assertEquals(
|
||||||
|
e.exception.__str__(),
|
||||||
|
'Interface request requires \'x:TestStruct\' to be an interface.')
|
||||||
|
|
||||||
|
def testNonInterfaceAsAssociatedInterface(self):
|
||||||
|
"""Tests that a non-interface type cannot be used for associated interfaces.
|
||||||
|
"""
|
||||||
|
module = mojom.Module('test_module', 'test_namespace')
|
||||||
|
struct = mojom.Struct('TestStruct', module=module)
|
||||||
|
with self.assertRaises(Exception) as e:
|
||||||
|
mojom.AssociatedInterface(struct)
|
||||||
|
self.assertEquals(
|
||||||
|
e.exception.__str__(),
|
||||||
|
'Associated interface requires \'x:TestStruct\' to be an interface.')
|
258
utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
Normal file
258
utils/ipc/mojo/public/tools/mojom/mojom/generate/pack.py
Normal file
|
@ -0,0 +1,258 @@
|
||||||
|
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from mojom.generate import module as mojom
|
||||||
|
|
||||||
|
# This module provides a mechanism for determining the packed order and offsets
|
||||||
|
# of a mojom.Struct.
|
||||||
|
#
|
||||||
|
# ps = pack.PackedStruct(struct)
|
||||||
|
# ps.packed_fields will access a list of PackedField objects, each of which
|
||||||
|
# will have an offset, a size and a bit (for mojom.BOOLs).
|
||||||
|
|
||||||
|
# Size of struct header in bytes: num_bytes [4B] + version [4B].
|
||||||
|
HEADER_SIZE = 8
|
||||||
|
|
||||||
|
|
||||||
|
class PackedField(object):
|
||||||
|
kind_to_size = {
|
||||||
|
mojom.BOOL: 1,
|
||||||
|
mojom.INT8: 1,
|
||||||
|
mojom.UINT8: 1,
|
||||||
|
mojom.INT16: 2,
|
||||||
|
mojom.UINT16: 2,
|
||||||
|
mojom.INT32: 4,
|
||||||
|
mojom.UINT32: 4,
|
||||||
|
mojom.FLOAT: 4,
|
||||||
|
mojom.HANDLE: 4,
|
||||||
|
mojom.MSGPIPE: 4,
|
||||||
|
mojom.SHAREDBUFFER: 4,
|
||||||
|
mojom.PLATFORMHANDLE: 4,
|
||||||
|
mojom.DCPIPE: 4,
|
||||||
|
mojom.DPPIPE: 4,
|
||||||
|
mojom.NULLABLE_HANDLE: 4,
|
||||||
|
mojom.NULLABLE_MSGPIPE: 4,
|
||||||
|
mojom.NULLABLE_SHAREDBUFFER: 4,
|
||||||
|
mojom.NULLABLE_PLATFORMHANDLE: 4,
|
||||||
|
mojom.NULLABLE_DCPIPE: 4,
|
||||||
|
mojom.NULLABLE_DPPIPE: 4,
|
||||||
|
mojom.INT64: 8,
|
||||||
|
mojom.UINT64: 8,
|
||||||
|
mojom.DOUBLE: 8,
|
||||||
|
mojom.STRING: 8,
|
||||||
|
mojom.NULLABLE_STRING: 8
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def GetSizeForKind(cls, kind):
|
||||||
|
if isinstance(kind, (mojom.Array, mojom.Map, mojom.Struct, mojom.Interface,
|
||||||
|
mojom.AssociatedInterface, mojom.PendingRemote,
|
||||||
|
mojom.PendingAssociatedRemote)):
|
||||||
|
return 8
|
||||||
|
if isinstance(kind, mojom.Union):
|
||||||
|
return 16
|
||||||
|
if isinstance(kind, (mojom.InterfaceRequest, mojom.PendingReceiver)):
|
||||||
|
kind = mojom.MSGPIPE
|
||||||
|
if isinstance(
|
||||||
|
kind,
|
||||||
|
(mojom.AssociatedInterfaceRequest, mojom.PendingAssociatedReceiver)):
|
||||||
|
return 4
|
||||||
|
if isinstance(kind, mojom.Enum):
|
||||||
|
# TODO(mpcomplete): what about big enums?
|
||||||
|
return cls.kind_to_size[mojom.INT32]
|
||||||
|
if not kind in cls.kind_to_size:
|
||||||
|
raise Exception("Undefined type: %s. Did you forget to import the file "
|
||||||
|
"containing the definition?" % kind.spec)
|
||||||
|
return cls.kind_to_size[kind]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def GetAlignmentForKind(cls, kind):
|
||||||
|
if isinstance(kind, (mojom.Interface, mojom.AssociatedInterface,
|
||||||
|
mojom.PendingRemote, mojom.PendingAssociatedRemote)):
|
||||||
|
return 4
|
||||||
|
if isinstance(kind, mojom.Union):
|
||||||
|
return 8
|
||||||
|
return cls.GetSizeForKind(kind)
|
||||||
|
|
||||||
|
def __init__(self, field, index, ordinal):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
field: the original field.
|
||||||
|
index: the position of the original field in the struct.
|
||||||
|
ordinal: the ordinal of the field for serialization.
|
||||||
|
"""
|
||||||
|
self.field = field
|
||||||
|
self.index = index
|
||||||
|
self.ordinal = ordinal
|
||||||
|
self.size = self.GetSizeForKind(field.kind)
|
||||||
|
self.alignment = self.GetAlignmentForKind(field.kind)
|
||||||
|
self.offset = None
|
||||||
|
self.bit = None
|
||||||
|
self.min_version = None
|
||||||
|
|
||||||
|
|
||||||
|
def GetPad(offset, alignment):
|
||||||
|
"""Returns the pad necessary to reserve space so that |offset + pad| equals to
|
||||||
|
some multiple of |alignment|."""
|
||||||
|
return (alignment - (offset % alignment)) % alignment
|
||||||
|
|
||||||
|
|
||||||
|
def GetFieldOffset(field, last_field):
|
||||||
|
"""Returns a 2-tuple of the field offset and bit (for BOOLs)."""
|
||||||
|
if (field.field.kind == mojom.BOOL and last_field.field.kind == mojom.BOOL
|
||||||
|
and last_field.bit < 7):
|
||||||
|
return (last_field.offset, last_field.bit + 1)
|
||||||
|
|
||||||
|
offset = last_field.offset + last_field.size
|
||||||
|
pad = GetPad(offset, field.alignment)
|
||||||
|
return (offset + pad, 0)
|
||||||
|
|
||||||
|
|
||||||
|
def GetPayloadSizeUpToField(field):
|
||||||
|
"""Returns the payload size (not including struct header) if |field| is the
|
||||||
|
last field.
|
||||||
|
"""
|
||||||
|
if not field:
|
||||||
|
return 0
|
||||||
|
offset = field.offset + field.size
|
||||||
|
pad = GetPad(offset, 8)
|
||||||
|
return offset + pad
|
||||||
|
|
||||||
|
|
||||||
|
class PackedStruct(object):
|
||||||
|
def __init__(self, struct):
|
||||||
|
self.struct = struct
|
||||||
|
# |packed_fields| contains all the fields, in increasing offset order.
|
||||||
|
self.packed_fields = []
|
||||||
|
# |packed_fields_in_ordinal_order| refers to the same fields as
|
||||||
|
# |packed_fields|, but in ordinal order.
|
||||||
|
self.packed_fields_in_ordinal_order = []
|
||||||
|
|
||||||
|
# No fields.
|
||||||
|
if (len(struct.fields) == 0):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Start by sorting by ordinal.
|
||||||
|
src_fields = self.packed_fields_in_ordinal_order
|
||||||
|
ordinal = 0
|
||||||
|
for index, field in enumerate(struct.fields):
|
||||||
|
if field.ordinal is not None:
|
||||||
|
ordinal = field.ordinal
|
||||||
|
src_fields.append(PackedField(field, index, ordinal))
|
||||||
|
ordinal += 1
|
||||||
|
src_fields.sort(key=lambda field: field.ordinal)
|
||||||
|
|
||||||
|
# Set |min_version| for each field.
|
||||||
|
next_min_version = 0
|
||||||
|
for packed_field in src_fields:
|
||||||
|
if packed_field.field.min_version is None:
|
||||||
|
assert next_min_version == 0
|
||||||
|
else:
|
||||||
|
assert packed_field.field.min_version >= next_min_version
|
||||||
|
next_min_version = packed_field.field.min_version
|
||||||
|
packed_field.min_version = next_min_version
|
||||||
|
|
||||||
|
if (packed_field.min_version != 0
|
||||||
|
and mojom.IsReferenceKind(packed_field.field.kind)
|
||||||
|
and not packed_field.field.kind.is_nullable):
|
||||||
|
raise Exception("Non-nullable fields are only allowed in version 0 of "
|
||||||
|
"a struct. %s.%s is defined with [MinVersion=%d]." %
|
||||||
|
(self.struct.name, packed_field.field.name,
|
||||||
|
packed_field.min_version))
|
||||||
|
|
||||||
|
src_field = src_fields[0]
|
||||||
|
src_field.offset = 0
|
||||||
|
src_field.bit = 0
|
||||||
|
dst_fields = self.packed_fields
|
||||||
|
dst_fields.append(src_field)
|
||||||
|
|
||||||
|
# Then find first slot that each field will fit.
|
||||||
|
for src_field in src_fields[1:]:
|
||||||
|
last_field = dst_fields[0]
|
||||||
|
for i in range(1, len(dst_fields)):
|
||||||
|
next_field = dst_fields[i]
|
||||||
|
offset, bit = GetFieldOffset(src_field, last_field)
|
||||||
|
if offset + src_field.size <= next_field.offset:
|
||||||
|
# Found hole.
|
||||||
|
src_field.offset = offset
|
||||||
|
src_field.bit = bit
|
||||||
|
dst_fields.insert(i, src_field)
|
||||||
|
break
|
||||||
|
last_field = next_field
|
||||||
|
if src_field.offset is None:
|
||||||
|
# Add to end
|
||||||
|
src_field.offset, src_field.bit = GetFieldOffset(src_field, last_field)
|
||||||
|
dst_fields.append(src_field)
|
||||||
|
|
||||||
|
|
||||||
|
class ByteInfo(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.is_padding = False
|
||||||
|
self.packed_fields = []
|
||||||
|
|
||||||
|
|
||||||
|
def GetByteLayout(packed_struct):
|
||||||
|
total_payload_size = GetPayloadSizeUpToField(
|
||||||
|
packed_struct.packed_fields[-1] if packed_struct.packed_fields else None)
|
||||||
|
byte_info = [ByteInfo() for i in range(total_payload_size)]
|
||||||
|
|
||||||
|
limit_of_previous_field = 0
|
||||||
|
for packed_field in packed_struct.packed_fields:
|
||||||
|
for i in range(limit_of_previous_field, packed_field.offset):
|
||||||
|
byte_info[i].is_padding = True
|
||||||
|
byte_info[packed_field.offset].packed_fields.append(packed_field)
|
||||||
|
limit_of_previous_field = packed_field.offset + packed_field.size
|
||||||
|
|
||||||
|
for i in range(limit_of_previous_field, len(byte_info)):
|
||||||
|
byte_info[i].is_padding = True
|
||||||
|
|
||||||
|
for byte in byte_info:
|
||||||
|
# A given byte cannot both be padding and have a fields packed into it.
|
||||||
|
assert not (byte.is_padding and byte.packed_fields)
|
||||||
|
|
||||||
|
return byte_info
|
||||||
|
|
||||||
|
|
||||||
|
class VersionInfo(object):
|
||||||
|
def __init__(self, version, num_fields, num_bytes):
|
||||||
|
self.version = version
|
||||||
|
self.num_fields = num_fields
|
||||||
|
self.num_bytes = num_bytes
|
||||||
|
|
||||||
|
|
||||||
|
def GetVersionInfo(packed_struct):
|
||||||
|
"""Get version information for a struct.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
packed_struct: A PackedStruct instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A non-empty list of VersionInfo instances, sorted by version in increasing
|
||||||
|
order.
|
||||||
|
Note: The version numbers may not be consecutive.
|
||||||
|
"""
|
||||||
|
versions = []
|
||||||
|
last_version = 0
|
||||||
|
last_num_fields = 0
|
||||||
|
last_payload_size = 0
|
||||||
|
|
||||||
|
for packed_field in packed_struct.packed_fields_in_ordinal_order:
|
||||||
|
if packed_field.min_version != last_version:
|
||||||
|
versions.append(
|
||||||
|
VersionInfo(last_version, last_num_fields,
|
||||||
|
last_payload_size + HEADER_SIZE))
|
||||||
|
last_version = packed_field.min_version
|
||||||
|
|
||||||
|
last_num_fields += 1
|
||||||
|
# The fields are iterated in ordinal order here. However, the size of a
|
||||||
|
# version is determined by the last field of that version in pack order,
|
||||||
|
# instead of ordinal order. Therefore, we need to calculate the max value.
|
||||||
|
last_payload_size = max(
|
||||||
|
GetPayloadSizeUpToField(packed_field), last_payload_size)
|
||||||
|
|
||||||
|
assert len(versions) == 0 or last_num_fields != versions[-1].num_fields
|
||||||
|
versions.append(
|
||||||
|
VersionInfo(last_version, last_num_fields,
|
||||||
|
last_payload_size + HEADER_SIZE))
|
||||||
|
return versions
|
|
@ -0,0 +1,225 @@
|
||||||
|
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mojom.generate import module as mojom
|
||||||
|
from mojom.generate import pack
|
||||||
|
|
||||||
|
|
||||||
|
class PackTest(unittest.TestCase):
|
||||||
|
def testOrdinalOrder(self):
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
struct.AddField('testfield1', mojom.INT32, 2)
|
||||||
|
struct.AddField('testfield2', mojom.INT32, 1)
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
|
||||||
|
self.assertEqual(2, len(ps.packed_fields))
|
||||||
|
self.assertEqual('testfield2', ps.packed_fields[0].field.mojom_name)
|
||||||
|
self.assertEqual('testfield1', ps.packed_fields[1].field.mojom_name)
|
||||||
|
|
||||||
|
def testZeroFields(self):
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
self.assertEqual(0, len(ps.packed_fields))
|
||||||
|
|
||||||
|
def testOneField(self):
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
struct.AddField('testfield1', mojom.INT8)
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
self.assertEqual(1, len(ps.packed_fields))
|
||||||
|
|
||||||
|
def _CheckPackSequence(self, kinds, fields, offsets):
|
||||||
|
"""Checks the pack order and offsets of a sequence of mojom.Kinds.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
kinds: A sequence of mojom.Kinds that specify the fields that are to be
|
||||||
|
created.
|
||||||
|
fields: The expected order of the resulting fields, with the integer "1"
|
||||||
|
first.
|
||||||
|
offsets: The expected order of offsets, with the integer "0" first.
|
||||||
|
"""
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
index = 1
|
||||||
|
for kind in kinds:
|
||||||
|
struct.AddField('%d' % index, kind)
|
||||||
|
index += 1
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
num_fields = len(ps.packed_fields)
|
||||||
|
self.assertEqual(len(kinds), num_fields)
|
||||||
|
for i in range(num_fields):
|
||||||
|
self.assertEqual('%d' % fields[i], ps.packed_fields[i].field.mojom_name)
|
||||||
|
self.assertEqual(offsets[i], ps.packed_fields[i].offset)
|
||||||
|
|
||||||
|
def testPaddingPackedInOrder(self):
|
||||||
|
return self._CheckPackSequence((mojom.INT8, mojom.UINT8, mojom.INT32),
|
||||||
|
(1, 2, 3), (0, 1, 4))
|
||||||
|
|
||||||
|
def testPaddingPackedOutOfOrder(self):
|
||||||
|
return self._CheckPackSequence((mojom.INT8, mojom.INT32, mojom.UINT8),
|
||||||
|
(1, 3, 2), (0, 1, 4))
|
||||||
|
|
||||||
|
def testPaddingPackedOverflow(self):
|
||||||
|
kinds = (mojom.INT8, mojom.INT32, mojom.INT16, mojom.INT8, mojom.INT8)
|
||||||
|
# 2 bytes should be packed together first, followed by short, then by int.
|
||||||
|
fields = (1, 4, 3, 2, 5)
|
||||||
|
offsets = (0, 1, 2, 4, 8)
|
||||||
|
return self._CheckPackSequence(kinds, fields, offsets)
|
||||||
|
|
||||||
|
def testNullableTypes(self):
|
||||||
|
kinds = (mojom.STRING.MakeNullableKind(), mojom.HANDLE.MakeNullableKind(),
|
||||||
|
mojom.Struct('test_struct').MakeNullableKind(),
|
||||||
|
mojom.DCPIPE.MakeNullableKind(), mojom.Array().MakeNullableKind(),
|
||||||
|
mojom.DPPIPE.MakeNullableKind(),
|
||||||
|
mojom.Array(length=5).MakeNullableKind(),
|
||||||
|
mojom.MSGPIPE.MakeNullableKind(),
|
||||||
|
mojom.Interface('test_interface').MakeNullableKind(),
|
||||||
|
mojom.SHAREDBUFFER.MakeNullableKind(),
|
||||||
|
mojom.InterfaceRequest().MakeNullableKind())
|
||||||
|
fields = (1, 2, 4, 3, 5, 6, 8, 7, 9, 10, 11)
|
||||||
|
offsets = (0, 8, 12, 16, 24, 32, 36, 40, 48, 56, 60)
|
||||||
|
return self._CheckPackSequence(kinds, fields, offsets)
|
||||||
|
|
||||||
|
def testAllTypes(self):
|
||||||
|
return self._CheckPackSequence(
|
||||||
|
(mojom.BOOL, mojom.INT8, mojom.STRING, mojom.UINT8, mojom.INT16,
|
||||||
|
mojom.DOUBLE, mojom.UINT16, mojom.INT32, mojom.UINT32, mojom.INT64,
|
||||||
|
mojom.FLOAT, mojom.STRING, mojom.HANDLE, mojom.UINT64,
|
||||||
|
mojom.Struct('test'), mojom.Array(), mojom.STRING.MakeNullableKind()),
|
||||||
|
(1, 2, 4, 5, 7, 3, 6, 8, 9, 10, 11, 13, 12, 14, 15, 16, 17, 18),
|
||||||
|
(0, 1, 2, 4, 6, 8, 16, 24, 28, 32, 40, 44, 48, 56, 64, 72, 80, 88))
|
||||||
|
|
||||||
|
def testPaddingPackedOutOfOrderByOrdinal(self):
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
struct.AddField('testfield1', mojom.INT8)
|
||||||
|
struct.AddField('testfield3', mojom.UINT8, 3)
|
||||||
|
struct.AddField('testfield2', mojom.INT32, 2)
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
self.assertEqual(3, len(ps.packed_fields))
|
||||||
|
|
||||||
|
# Second byte should be packed in behind first, altering order.
|
||||||
|
self.assertEqual('testfield1', ps.packed_fields[0].field.mojom_name)
|
||||||
|
self.assertEqual('testfield3', ps.packed_fields[1].field.mojom_name)
|
||||||
|
self.assertEqual('testfield2', ps.packed_fields[2].field.mojom_name)
|
||||||
|
|
||||||
|
# Second byte should be packed with first.
|
||||||
|
self.assertEqual(0, ps.packed_fields[0].offset)
|
||||||
|
self.assertEqual(1, ps.packed_fields[1].offset)
|
||||||
|
self.assertEqual(4, ps.packed_fields[2].offset)
|
||||||
|
|
||||||
|
def testBools(self):
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
struct.AddField('bit0', mojom.BOOL)
|
||||||
|
struct.AddField('bit1', mojom.BOOL)
|
||||||
|
struct.AddField('int', mojom.INT32)
|
||||||
|
struct.AddField('bit2', mojom.BOOL)
|
||||||
|
struct.AddField('bit3', mojom.BOOL)
|
||||||
|
struct.AddField('bit4', mojom.BOOL)
|
||||||
|
struct.AddField('bit5', mojom.BOOL)
|
||||||
|
struct.AddField('bit6', mojom.BOOL)
|
||||||
|
struct.AddField('bit7', mojom.BOOL)
|
||||||
|
struct.AddField('bit8', mojom.BOOL)
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
self.assertEqual(10, len(ps.packed_fields))
|
||||||
|
|
||||||
|
# First 8 bits packed together.
|
||||||
|
for i in range(8):
|
||||||
|
pf = ps.packed_fields[i]
|
||||||
|
self.assertEqual(0, pf.offset)
|
||||||
|
self.assertEqual("bit%d" % i, pf.field.mojom_name)
|
||||||
|
self.assertEqual(i, pf.bit)
|
||||||
|
|
||||||
|
# Ninth bit goes into second byte.
|
||||||
|
self.assertEqual("bit8", ps.packed_fields[8].field.mojom_name)
|
||||||
|
self.assertEqual(1, ps.packed_fields[8].offset)
|
||||||
|
self.assertEqual(0, ps.packed_fields[8].bit)
|
||||||
|
|
||||||
|
# int comes last.
|
||||||
|
self.assertEqual("int", ps.packed_fields[9].field.mojom_name)
|
||||||
|
self.assertEqual(4, ps.packed_fields[9].offset)
|
||||||
|
|
||||||
|
def testMinVersion(self):
|
||||||
|
"""Tests that |min_version| is properly set for packed fields."""
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
struct.AddField('field_2', mojom.BOOL, 2)
|
||||||
|
struct.AddField('field_0', mojom.INT32, 0)
|
||||||
|
struct.AddField('field_1', mojom.INT64, 1)
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
|
||||||
|
self.assertEqual('field_0', ps.packed_fields[0].field.mojom_name)
|
||||||
|
self.assertEqual('field_2', ps.packed_fields[1].field.mojom_name)
|
||||||
|
self.assertEqual('field_1', ps.packed_fields[2].field.mojom_name)
|
||||||
|
|
||||||
|
self.assertEqual(0, ps.packed_fields[0].min_version)
|
||||||
|
self.assertEqual(0, ps.packed_fields[1].min_version)
|
||||||
|
self.assertEqual(0, ps.packed_fields[2].min_version)
|
||||||
|
|
||||||
|
struct.fields[0].attributes = {'MinVersion': 1}
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
|
||||||
|
self.assertEqual(0, ps.packed_fields[0].min_version)
|
||||||
|
self.assertEqual(1, ps.packed_fields[1].min_version)
|
||||||
|
self.assertEqual(0, ps.packed_fields[2].min_version)
|
||||||
|
|
||||||
|
def testGetVersionInfoEmptyStruct(self):
|
||||||
|
"""Tests that pack.GetVersionInfo() never returns an empty list, even for
|
||||||
|
empty structs.
|
||||||
|
"""
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
|
||||||
|
versions = pack.GetVersionInfo(ps)
|
||||||
|
self.assertEqual(1, len(versions))
|
||||||
|
self.assertEqual(0, versions[0].version)
|
||||||
|
self.assertEqual(0, versions[0].num_fields)
|
||||||
|
self.assertEqual(8, versions[0].num_bytes)
|
||||||
|
|
||||||
|
def testGetVersionInfoComplexOrder(self):
|
||||||
|
"""Tests pack.GetVersionInfo() using a struct whose definition order,
|
||||||
|
ordinal order and pack order for fields are all different.
|
||||||
|
"""
|
||||||
|
struct = mojom.Struct('test')
|
||||||
|
struct.AddField(
|
||||||
|
'field_3', mojom.BOOL, ordinal=3, attributes={'MinVersion': 3})
|
||||||
|
struct.AddField('field_0', mojom.INT32, ordinal=0)
|
||||||
|
struct.AddField(
|
||||||
|
'field_1', mojom.INT64, ordinal=1, attributes={'MinVersion': 2})
|
||||||
|
struct.AddField(
|
||||||
|
'field_2', mojom.INT64, ordinal=2, attributes={'MinVersion': 3})
|
||||||
|
ps = pack.PackedStruct(struct)
|
||||||
|
|
||||||
|
versions = pack.GetVersionInfo(ps)
|
||||||
|
self.assertEqual(3, len(versions))
|
||||||
|
|
||||||
|
self.assertEqual(0, versions[0].version)
|
||||||
|
self.assertEqual(1, versions[0].num_fields)
|
||||||
|
self.assertEqual(16, versions[0].num_bytes)
|
||||||
|
|
||||||
|
self.assertEqual(2, versions[1].version)
|
||||||
|
self.assertEqual(2, versions[1].num_fields)
|
||||||
|
self.assertEqual(24, versions[1].num_bytes)
|
||||||
|
|
||||||
|
self.assertEqual(3, versions[2].version)
|
||||||
|
self.assertEqual(4, versions[2].num_fields)
|
||||||
|
self.assertEqual(32, versions[2].num_bytes)
|
||||||
|
|
||||||
|
def testInterfaceAlignment(self):
|
||||||
|
"""Tests that interfaces are aligned on 4-byte boundaries, although the size
|
||||||
|
of an interface is 8 bytes.
|
||||||
|
"""
|
||||||
|
kinds = (mojom.INT32, mojom.Interface('test_interface'))
|
||||||
|
fields = (1, 2)
|
||||||
|
offsets = (0, 4)
|
||||||
|
self._CheckPackSequence(kinds, fields, offsets)
|
||||||
|
|
||||||
|
def testAssociatedInterfaceAlignment(self):
|
||||||
|
"""Tests that associated interfaces are aligned on 4-byte boundaries,
|
||||||
|
although the size of an associated interface is 8 bytes.
|
||||||
|
"""
|
||||||
|
kinds = (mojom.INT32,
|
||||||
|
mojom.AssociatedInterface(mojom.Interface('test_interface')))
|
||||||
|
fields = (1, 2)
|
||||||
|
offsets = (0, 4)
|
||||||
|
self._CheckPackSequence(kinds, fields, offsets)
|
|
@ -0,0 +1,83 @@
|
||||||
|
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
# Based on third_party/WebKit/Source/build/scripts/template_expander.py.
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from mojom import fileutil
|
||||||
|
|
||||||
|
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||||
|
import jinja2
|
||||||
|
|
||||||
|
|
||||||
|
def ApplyTemplate(mojo_generator, path_to_template, params, **kwargs):
|
||||||
|
loader = jinja2.ModuleLoader(
|
||||||
|
os.path.join(mojo_generator.bytecode_path,
|
||||||
|
"%s.zip" % mojo_generator.GetTemplatePrefix()))
|
||||||
|
final_kwargs = dict(mojo_generator.GetJinjaParameters())
|
||||||
|
final_kwargs.update(kwargs)
|
||||||
|
|
||||||
|
jinja_env = jinja2.Environment(
|
||||||
|
loader=loader, keep_trailing_newline=True, **final_kwargs)
|
||||||
|
jinja_env.globals.update(mojo_generator.GetGlobals())
|
||||||
|
jinja_env.filters.update(mojo_generator.GetFilters())
|
||||||
|
template = jinja_env.get_template(path_to_template)
|
||||||
|
return template.render(params)
|
||||||
|
|
||||||
|
|
||||||
|
def UseJinja(path_to_template, **kwargs):
|
||||||
|
def RealDecorator(generator):
|
||||||
|
def GeneratorInternal(*args, **kwargs2):
|
||||||
|
parameters = generator(*args, **kwargs2)
|
||||||
|
return ApplyTemplate(args[0], path_to_template, parameters, **kwargs)
|
||||||
|
|
||||||
|
GeneratorInternal.__name__ = generator.__name__
|
||||||
|
return GeneratorInternal
|
||||||
|
|
||||||
|
return RealDecorator
|
||||||
|
|
||||||
|
|
||||||
|
def ApplyImportedTemplate(mojo_generator, path_to_template, filename, params,
|
||||||
|
**kwargs):
|
||||||
|
loader = jinja2.FileSystemLoader(searchpath=path_to_template)
|
||||||
|
final_kwargs = dict(mojo_generator.GetJinjaParameters())
|
||||||
|
final_kwargs.update(kwargs)
|
||||||
|
|
||||||
|
jinja_env = jinja2.Environment(
|
||||||
|
loader=loader, keep_trailing_newline=True, **final_kwargs)
|
||||||
|
jinja_env.globals.update(mojo_generator.GetGlobals())
|
||||||
|
jinja_env.filters.update(mojo_generator.GetFilters())
|
||||||
|
template = jinja_env.get_template(filename)
|
||||||
|
return template.render(params)
|
||||||
|
|
||||||
|
|
||||||
|
def UseJinjaForImportedTemplate(func):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
parameters = func(*args, **kwargs)
|
||||||
|
path_to_template = args[1]
|
||||||
|
filename = args[2]
|
||||||
|
return ApplyImportedTemplate(args[0], path_to_template, filename,
|
||||||
|
parameters)
|
||||||
|
|
||||||
|
wrapper.__name__ = func.__name__
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def PrecompileTemplates(generator_modules, output_dir):
|
||||||
|
for module in generator_modules.values():
|
||||||
|
generator = module.Generator(None)
|
||||||
|
jinja_env = jinja2.Environment(
|
||||||
|
loader=jinja2.FileSystemLoader([
|
||||||
|
os.path.join(
|
||||||
|
os.path.dirname(module.__file__), generator.GetTemplatePrefix())
|
||||||
|
]))
|
||||||
|
jinja_env.filters.update(generator.GetFilters())
|
||||||
|
jinja_env.compile_templates(
|
||||||
|
os.path.join(output_dir, "%s.zip" % generator.GetTemplatePrefix()),
|
||||||
|
extensions=["tmpl"],
|
||||||
|
zip="stored",
|
||||||
|
py_compile=True,
|
||||||
|
ignore_errors=False)
|
854
utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
Normal file
854
utils/ipc/mojo/public/tools/mojom/mojom/generate/translate.py
Normal file
|
@ -0,0 +1,854 @@
|
||||||
|
# Copyright 2013 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Convert parse tree to AST.
|
||||||
|
|
||||||
|
This module converts the parse tree to the AST we use for code generation. The
|
||||||
|
main entry point is OrderedModule, which gets passed the parser
|
||||||
|
representation of a mojom file. When called it's assumed that all imports have
|
||||||
|
already been parsed and converted to ASTs before.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from mojom.generate import generator
|
||||||
|
from mojom.generate import module as mojom
|
||||||
|
from mojom.parse import ast
|
||||||
|
|
||||||
|
|
||||||
|
def _IsStrOrUnicode(x):
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
return isinstance(x, (unicode, str))
|
||||||
|
return isinstance(x, str)
|
||||||
|
|
||||||
|
|
||||||
|
def _DuplicateName(values):
|
||||||
|
"""Returns the 'mojom_name' of the first entry in |values| whose 'mojom_name'
|
||||||
|
has already been encountered. If there are no duplicates, returns None."""
|
||||||
|
names = set()
|
||||||
|
for value in values:
|
||||||
|
if value.mojom_name in names:
|
||||||
|
return value.mojom_name
|
||||||
|
names.add(value.mojom_name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _ElemsOfType(elems, elem_type, scope):
|
||||||
|
"""Find all elements of the given type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
elems: {Sequence[Any]} Sequence of elems.
|
||||||
|
elem_type: {Type[C]} Extract all elems of this type.
|
||||||
|
scope: {str} The name of the surrounding scope (e.g. struct
|
||||||
|
definition). Used in error messages.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{List[C]} All elems of matching type.
|
||||||
|
"""
|
||||||
|
assert isinstance(elem_type, type)
|
||||||
|
result = [elem for elem in elems if isinstance(elem, elem_type)]
|
||||||
|
duplicate_name = _DuplicateName(result)
|
||||||
|
if duplicate_name:
|
||||||
|
raise Exception('Names in mojom must be unique within a scope. The name '
|
||||||
|
'"%s" is used more than once within the scope "%s".' %
|
||||||
|
(duplicate_name, scope))
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _ProcessElements(scope, elements, operations_by_type):
|
||||||
|
"""Iterates over the given elements, running a function from
|
||||||
|
operations_by_type for any element that matches a key in that dict. The scope
|
||||||
|
is the name of the surrounding scope, such as a filename or struct name, used
|
||||||
|
only in error messages."""
|
||||||
|
names_in_this_scope = set()
|
||||||
|
for element in elements:
|
||||||
|
# pylint: disable=unidiomatic-typecheck
|
||||||
|
element_type = type(element)
|
||||||
|
if element_type in operations_by_type:
|
||||||
|
if element.mojom_name in names_in_this_scope:
|
||||||
|
raise Exception('Names must be unique within a scope. The name "%s" is '
|
||||||
|
'used more than once within the scope "%s".' %
|
||||||
|
(duplicate_name, scope))
|
||||||
|
operations_by_type[element_type](element)
|
||||||
|
|
||||||
|
|
||||||
|
def _MapKind(kind):
|
||||||
|
map_to_kind = {
|
||||||
|
'bool': 'b',
|
||||||
|
'int8': 'i8',
|
||||||
|
'int16': 'i16',
|
||||||
|
'int32': 'i32',
|
||||||
|
'int64': 'i64',
|
||||||
|
'uint8': 'u8',
|
||||||
|
'uint16': 'u16',
|
||||||
|
'uint32': 'u32',
|
||||||
|
'uint64': 'u64',
|
||||||
|
'float': 'f',
|
||||||
|
'double': 'd',
|
||||||
|
'string': 's',
|
||||||
|
'handle': 'h',
|
||||||
|
'handle<data_pipe_consumer>': 'h:d:c',
|
||||||
|
'handle<data_pipe_producer>': 'h:d:p',
|
||||||
|
'handle<message_pipe>': 'h:m',
|
||||||
|
'handle<shared_buffer>': 'h:s',
|
||||||
|
'handle<platform>': 'h:p'
|
||||||
|
}
|
||||||
|
if kind.endswith('?'):
|
||||||
|
base_kind = _MapKind(kind[0:-1])
|
||||||
|
# NOTE: This doesn't rule out enum types. Those will be detected later, when
|
||||||
|
# cross-reference is established.
|
||||||
|
reference_kinds = ('m', 's', 'h', 'a', 'r', 'x', 'asso', 'rmt', 'rcv',
|
||||||
|
'rma', 'rca')
|
||||||
|
if re.split('[^a-z]', base_kind, 1)[0] not in reference_kinds:
|
||||||
|
raise Exception('A type (spec "%s") cannot be made nullable' % base_kind)
|
||||||
|
return '?' + base_kind
|
||||||
|
if kind.endswith('}'):
|
||||||
|
lbracket = kind.rfind('{')
|
||||||
|
value = kind[0:lbracket]
|
||||||
|
return 'm[' + _MapKind(kind[lbracket + 1:-1]) + '][' + _MapKind(value) + ']'
|
||||||
|
if kind.endswith(']'):
|
||||||
|
lbracket = kind.rfind('[')
|
||||||
|
typename = kind[0:lbracket]
|
||||||
|
return 'a' + kind[lbracket + 1:-1] + ':' + _MapKind(typename)
|
||||||
|
if kind.endswith('&'):
|
||||||
|
return 'r:' + _MapKind(kind[0:-1])
|
||||||
|
if kind.startswith('asso<'):
|
||||||
|
assert kind.endswith('>')
|
||||||
|
return 'asso:' + _MapKind(kind[5:-1])
|
||||||
|
if kind.startswith('rmt<'):
|
||||||
|
assert kind.endswith('>')
|
||||||
|
return 'rmt:' + _MapKind(kind[4:-1])
|
||||||
|
if kind.startswith('rcv<'):
|
||||||
|
assert kind.endswith('>')
|
||||||
|
return 'rcv:' + _MapKind(kind[4:-1])
|
||||||
|
if kind.startswith('rma<'):
|
||||||
|
assert kind.endswith('>')
|
||||||
|
return 'rma:' + _MapKind(kind[4:-1])
|
||||||
|
if kind.startswith('rca<'):
|
||||||
|
assert kind.endswith('>')
|
||||||
|
return 'rca:' + _MapKind(kind[4:-1])
|
||||||
|
if kind in map_to_kind:
|
||||||
|
return map_to_kind[kind]
|
||||||
|
return 'x:' + kind
|
||||||
|
|
||||||
|
|
||||||
|
def _AttributeListToDict(attribute_list):
|
||||||
|
if attribute_list is None:
|
||||||
|
return None
|
||||||
|
assert isinstance(attribute_list, ast.AttributeList)
|
||||||
|
# TODO(vtl): Check for duplicate keys here.
|
||||||
|
return dict(
|
||||||
|
[(attribute.key, attribute.value) for attribute in attribute_list])
|
||||||
|
|
||||||
|
|
||||||
|
builtin_values = frozenset([
|
||||||
|
"double.INFINITY", "double.NEGATIVE_INFINITY", "double.NAN",
|
||||||
|
"float.INFINITY", "float.NEGATIVE_INFINITY", "float.NAN"
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def _IsBuiltinValue(value):
|
||||||
|
return value in builtin_values
|
||||||
|
|
||||||
|
|
||||||
|
def _LookupKind(kinds, spec, scope):
|
||||||
|
"""Tries to find which Kind a spec refers to, given the scope in which its
|
||||||
|
referenced. Starts checking from the narrowest scope to most general. For
|
||||||
|
example, given a struct field like
|
||||||
|
Foo.Bar x;
|
||||||
|
Foo.Bar could refer to the type 'Bar' in the 'Foo' namespace, or an inner
|
||||||
|
type 'Bar' in the struct 'Foo' in the current namespace.
|
||||||
|
|
||||||
|
|scope| is a tuple that looks like (namespace, struct/interface), referring
|
||||||
|
to the location where the type is referenced."""
|
||||||
|
if spec.startswith('x:'):
|
||||||
|
mojom_name = spec[2:]
|
||||||
|
for i in range(len(scope), -1, -1):
|
||||||
|
test_spec = 'x:'
|
||||||
|
if i > 0:
|
||||||
|
test_spec += '.'.join(scope[:i]) + '.'
|
||||||
|
test_spec += mojom_name
|
||||||
|
kind = kinds.get(test_spec)
|
||||||
|
if kind:
|
||||||
|
return kind
|
||||||
|
|
||||||
|
return kinds.get(spec)
|
||||||
|
|
||||||
|
|
||||||
|
def _GetScopeForKind(module, kind):
|
||||||
|
"""For a given kind, returns a tuple of progressively more specific names
|
||||||
|
used to qualify the kind. For example if kind is an enum named Bar nested in a
|
||||||
|
struct Foo within module 'foo', this would return ('foo', 'Foo', 'Bar')"""
|
||||||
|
if isinstance(kind, mojom.Enum) and kind.parent_kind:
|
||||||
|
# Enums may be nested in other kinds.
|
||||||
|
return _GetScopeForKind(module, kind.parent_kind) + (kind.mojom_name, )
|
||||||
|
|
||||||
|
module_fragment = (module.mojom_namespace, ) if module.mojom_namespace else ()
|
||||||
|
kind_fragment = (kind.mojom_name, ) if kind else ()
|
||||||
|
return module_fragment + kind_fragment
|
||||||
|
|
||||||
|
|
||||||
|
def _LookupValueInScope(module, kind, identifier):
|
||||||
|
"""Given a kind and an identifier, this attempts to resolve the given
|
||||||
|
identifier to a concrete NamedValue within the scope of the given kind."""
|
||||||
|
scope = _GetScopeForKind(module, kind)
|
||||||
|
for i in reversed(range(len(scope) + 1)):
|
||||||
|
qualified_name = '.'.join(scope[:i] + (identifier, ))
|
||||||
|
value = module.values.get(qualified_name)
|
||||||
|
if value:
|
||||||
|
return value
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _LookupValue(module, parent_kind, implied_kind, ast_leaf_node):
|
||||||
|
"""Resolves a leaf node in the form ('IDENTIFIER', 'x') to a constant value
|
||||||
|
identified by 'x' in some mojom definition. parent_kind is used as context
|
||||||
|
when resolving the identifier. If the given leaf node is not an IDENTIFIER
|
||||||
|
(e.g. already a constant value), it is returned as-is.
|
||||||
|
|
||||||
|
If implied_kind is provided, the parsed identifier may also be resolved within
|
||||||
|
its scope as fallback. This can be useful for more concise value references
|
||||||
|
when assigning enum-typed constants or field values."""
|
||||||
|
if not isinstance(ast_leaf_node, tuple) or ast_leaf_node[0] != 'IDENTIFIER':
|
||||||
|
return ast_leaf_node
|
||||||
|
|
||||||
|
# First look for a known user-defined identifier to resolve this within the
|
||||||
|
# enclosing scope.
|
||||||
|
identifier = ast_leaf_node[1]
|
||||||
|
|
||||||
|
value = _LookupValueInScope(module, parent_kind, identifier)
|
||||||
|
if value:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Next look in the scope of implied_kind, if provided.
|
||||||
|
value = (implied_kind and implied_kind.module and _LookupValueInScope(
|
||||||
|
implied_kind.module, implied_kind, identifier))
|
||||||
|
if value:
|
||||||
|
return value
|
||||||
|
|
||||||
|
# Fall back on defined builtin symbols
|
||||||
|
if _IsBuiltinValue(identifier):
|
||||||
|
return mojom.BuiltinValue(identifier)
|
||||||
|
|
||||||
|
raise ValueError('Unknown identifier %s' % identifier)
|
||||||
|
|
||||||
|
|
||||||
|
def _Kind(kinds, spec, scope):
|
||||||
|
"""Convert a type name into a mojom.Kind object.
|
||||||
|
|
||||||
|
As a side-effect this function adds the result to 'kinds'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
kinds: {Dict[str, mojom.Kind]} All known kinds up to this point, indexed by
|
||||||
|
their names.
|
||||||
|
spec: {str} A name uniquely identifying a type.
|
||||||
|
scope: {Tuple[str, str]} A tuple that looks like (namespace,
|
||||||
|
struct/interface), referring to the location where the type is
|
||||||
|
referenced.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Kind} The type corresponding to 'spec'.
|
||||||
|
"""
|
||||||
|
kind = _LookupKind(kinds, spec, scope)
|
||||||
|
if kind:
|
||||||
|
return kind
|
||||||
|
|
||||||
|
if spec.startswith('?'):
|
||||||
|
kind = _Kind(kinds, spec[1:], scope).MakeNullableKind()
|
||||||
|
elif spec.startswith('a:'):
|
||||||
|
kind = mojom.Array(_Kind(kinds, spec[2:], scope))
|
||||||
|
elif spec.startswith('asso:'):
|
||||||
|
inner_kind = _Kind(kinds, spec[5:], scope)
|
||||||
|
if isinstance(inner_kind, mojom.InterfaceRequest):
|
||||||
|
kind = mojom.AssociatedInterfaceRequest(inner_kind)
|
||||||
|
else:
|
||||||
|
kind = mojom.AssociatedInterface(inner_kind)
|
||||||
|
elif spec.startswith('a'):
|
||||||
|
colon = spec.find(':')
|
||||||
|
length = int(spec[1:colon])
|
||||||
|
kind = mojom.Array(_Kind(kinds, spec[colon + 1:], scope), length)
|
||||||
|
elif spec.startswith('r:'):
|
||||||
|
kind = mojom.InterfaceRequest(_Kind(kinds, spec[2:], scope))
|
||||||
|
elif spec.startswith('rmt:'):
|
||||||
|
kind = mojom.PendingRemote(_Kind(kinds, spec[4:], scope))
|
||||||
|
elif spec.startswith('rcv:'):
|
||||||
|
kind = mojom.PendingReceiver(_Kind(kinds, spec[4:], scope))
|
||||||
|
elif spec.startswith('rma:'):
|
||||||
|
kind = mojom.PendingAssociatedRemote(_Kind(kinds, spec[4:], scope))
|
||||||
|
elif spec.startswith('rca:'):
|
||||||
|
kind = mojom.PendingAssociatedReceiver(_Kind(kinds, spec[4:], scope))
|
||||||
|
elif spec.startswith('m['):
|
||||||
|
# Isolate the two types from their brackets.
|
||||||
|
|
||||||
|
# It is not allowed to use map as key, so there shouldn't be nested ']'s
|
||||||
|
# inside the key type spec.
|
||||||
|
key_end = spec.find(']')
|
||||||
|
assert key_end != -1 and key_end < len(spec) - 1
|
||||||
|
assert spec[key_end + 1] == '[' and spec[-1] == ']'
|
||||||
|
|
||||||
|
first_kind = spec[2:key_end]
|
||||||
|
second_kind = spec[key_end + 2:-1]
|
||||||
|
|
||||||
|
kind = mojom.Map(
|
||||||
|
_Kind(kinds, first_kind, scope), _Kind(kinds, second_kind, scope))
|
||||||
|
else:
|
||||||
|
kind = mojom.Kind(spec)
|
||||||
|
|
||||||
|
kinds[spec] = kind
|
||||||
|
return kind
|
||||||
|
|
||||||
|
|
||||||
|
def _Import(module, import_module):
|
||||||
|
# Copy the struct kinds from our imports into the current module.
|
||||||
|
importable_kinds = (mojom.Struct, mojom.Union, mojom.Enum, mojom.Interface)
|
||||||
|
for kind in import_module.kinds.values():
|
||||||
|
if (isinstance(kind, importable_kinds)
|
||||||
|
and kind.module.path == import_module.path):
|
||||||
|
module.kinds[kind.spec] = kind
|
||||||
|
# Ditto for values.
|
||||||
|
for value in import_module.values.values():
|
||||||
|
if value.module.path == import_module.path:
|
||||||
|
module.values[value.GetSpec()] = value
|
||||||
|
|
||||||
|
return import_module
|
||||||
|
|
||||||
|
|
||||||
|
def _Struct(module, parsed_struct):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_struct: {ast.Struct} Parsed struct.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Struct} AST struct.
|
||||||
|
"""
|
||||||
|
struct = mojom.Struct(module=module)
|
||||||
|
struct.mojom_name = parsed_struct.mojom_name
|
||||||
|
struct.native_only = parsed_struct.body is None
|
||||||
|
struct.spec = 'x:' + module.GetNamespacePrefix() + struct.mojom_name
|
||||||
|
module.kinds[struct.spec] = struct
|
||||||
|
struct.enums = []
|
||||||
|
struct.constants = []
|
||||||
|
struct.fields_data = []
|
||||||
|
if not struct.native_only:
|
||||||
|
_ProcessElements(
|
||||||
|
parsed_struct.mojom_name, parsed_struct.body, {
|
||||||
|
ast.Enum:
|
||||||
|
lambda enum: struct.enums.append(_Enum(module, enum, struct)),
|
||||||
|
ast.Const:
|
||||||
|
lambda const: struct.constants.append(
|
||||||
|
_Constant(module, const, struct)),
|
||||||
|
ast.StructField:
|
||||||
|
struct.fields_data.append,
|
||||||
|
})
|
||||||
|
|
||||||
|
struct.attributes = _AttributeListToDict(parsed_struct.attribute_list)
|
||||||
|
|
||||||
|
# Enforce that a [Native] attribute is set to make native-only struct
|
||||||
|
# declarations more explicit.
|
||||||
|
if struct.native_only:
|
||||||
|
if not struct.attributes or not struct.attributes.get('Native', False):
|
||||||
|
raise Exception("Native-only struct declarations must include a " +
|
||||||
|
"Native attribute.")
|
||||||
|
|
||||||
|
if struct.attributes and struct.attributes.get('CustomSerializer', False):
|
||||||
|
struct.custom_serializer = True
|
||||||
|
|
||||||
|
return struct
|
||||||
|
|
||||||
|
|
||||||
|
def _Union(module, parsed_union):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_union: {ast.Union} Parsed union.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Union} AST union.
|
||||||
|
"""
|
||||||
|
union = mojom.Union(module=module)
|
||||||
|
union.mojom_name = parsed_union.mojom_name
|
||||||
|
union.spec = 'x:' + module.GetNamespacePrefix() + union.mojom_name
|
||||||
|
module.kinds[union.spec] = union
|
||||||
|
# Stash fields parsed_union here temporarily.
|
||||||
|
union.fields_data = []
|
||||||
|
_ProcessElements(parsed_union.mojom_name, parsed_union.body,
|
||||||
|
{ast.UnionField: union.fields_data.append})
|
||||||
|
union.attributes = _AttributeListToDict(parsed_union.attribute_list)
|
||||||
|
return union
|
||||||
|
|
||||||
|
|
||||||
|
def _StructField(module, parsed_field, struct):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_field: {ast.StructField} Parsed struct field.
|
||||||
|
struct: {mojom.Struct} Struct this field belongs to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.StructField} AST struct field.
|
||||||
|
"""
|
||||||
|
field = mojom.StructField()
|
||||||
|
field.mojom_name = parsed_field.mojom_name
|
||||||
|
field.kind = _Kind(module.kinds, _MapKind(parsed_field.typename),
|
||||||
|
(module.mojom_namespace, struct.mojom_name))
|
||||||
|
field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
|
||||||
|
field.default = _LookupValue(module, struct, field.kind,
|
||||||
|
parsed_field.default_value)
|
||||||
|
field.attributes = _AttributeListToDict(parsed_field.attribute_list)
|
||||||
|
return field
|
||||||
|
|
||||||
|
|
||||||
|
def _UnionField(module, parsed_field, union):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_field: {ast.UnionField} Parsed union field.
|
||||||
|
union: {mojom.Union} Union this fields belong to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.UnionField} AST union.
|
||||||
|
"""
|
||||||
|
field = mojom.UnionField()
|
||||||
|
field.mojom_name = parsed_field.mojom_name
|
||||||
|
field.kind = _Kind(module.kinds, _MapKind(parsed_field.typename),
|
||||||
|
(module.mojom_namespace, union.mojom_name))
|
||||||
|
field.ordinal = parsed_field.ordinal.value if parsed_field.ordinal else None
|
||||||
|
field.default = None
|
||||||
|
field.attributes = _AttributeListToDict(parsed_field.attribute_list)
|
||||||
|
return field
|
||||||
|
|
||||||
|
|
||||||
|
def _Parameter(module, parsed_param, interface):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_param: {ast.Parameter} Parsed parameter.
|
||||||
|
union: {mojom.Interface} Interface this parameter belongs to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Parameter} AST parameter.
|
||||||
|
"""
|
||||||
|
parameter = mojom.Parameter()
|
||||||
|
parameter.mojom_name = parsed_param.mojom_name
|
||||||
|
parameter.kind = _Kind(module.kinds, _MapKind(parsed_param.typename),
|
||||||
|
(module.mojom_namespace, interface.mojom_name))
|
||||||
|
parameter.ordinal = (parsed_param.ordinal.value
|
||||||
|
if parsed_param.ordinal else None)
|
||||||
|
parameter.default = None # TODO(tibell): We never have these. Remove field?
|
||||||
|
parameter.attributes = _AttributeListToDict(parsed_param.attribute_list)
|
||||||
|
return parameter
|
||||||
|
|
||||||
|
|
||||||
|
def _Method(module, parsed_method, interface):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_method: {ast.Method} Parsed method.
|
||||||
|
interface: {mojom.Interface} Interface this method belongs to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Method} AST method.
|
||||||
|
"""
|
||||||
|
method = mojom.Method(
|
||||||
|
interface,
|
||||||
|
parsed_method.mojom_name,
|
||||||
|
ordinal=parsed_method.ordinal.value if parsed_method.ordinal else None)
|
||||||
|
method.parameters = list(
|
||||||
|
map(lambda parameter: _Parameter(module, parameter, interface),
|
||||||
|
parsed_method.parameter_list))
|
||||||
|
if parsed_method.response_parameter_list is not None:
|
||||||
|
method.response_parameters = list(
|
||||||
|
map(lambda parameter: _Parameter(module, parameter, interface),
|
||||||
|
parsed_method.response_parameter_list))
|
||||||
|
method.attributes = _AttributeListToDict(parsed_method.attribute_list)
|
||||||
|
|
||||||
|
# Enforce that only methods with response can have a [Sync] attribute.
|
||||||
|
if method.sync and method.response_parameters is None:
|
||||||
|
raise Exception("Only methods with response can include a [Sync] "
|
||||||
|
"attribute. If no response parameters are needed, you "
|
||||||
|
"could use an empty response parameter list, i.e., "
|
||||||
|
"\"=> ()\".")
|
||||||
|
|
||||||
|
return method
|
||||||
|
|
||||||
|
|
||||||
|
def _Interface(module, parsed_iface):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_iface: {ast.Interface} Parsed interface.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Interface} AST interface.
|
||||||
|
"""
|
||||||
|
interface = mojom.Interface(module=module)
|
||||||
|
interface.mojom_name = parsed_iface.mojom_name
|
||||||
|
interface.spec = 'x:' + module.GetNamespacePrefix() + interface.mojom_name
|
||||||
|
module.kinds[interface.spec] = interface
|
||||||
|
interface.attributes = _AttributeListToDict(parsed_iface.attribute_list)
|
||||||
|
interface.enums = []
|
||||||
|
interface.constants = []
|
||||||
|
interface.methods_data = []
|
||||||
|
_ProcessElements(
|
||||||
|
parsed_iface.mojom_name, parsed_iface.body, {
|
||||||
|
ast.Enum:
|
||||||
|
lambda enum: interface.enums.append(_Enum(module, enum, interface)),
|
||||||
|
ast.Const:
|
||||||
|
lambda const: interface.constants.append(
|
||||||
|
_Constant(module, const, interface)),
|
||||||
|
ast.Method:
|
||||||
|
interface.methods_data.append,
|
||||||
|
})
|
||||||
|
return interface
|
||||||
|
|
||||||
|
|
||||||
|
def _EnumField(module, enum, parsed_field):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
enum: {mojom.Enum} Enum this field belongs to.
|
||||||
|
parsed_field: {ast.EnumValue} Parsed enum value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.EnumField} AST enum field.
|
||||||
|
"""
|
||||||
|
field = mojom.EnumField()
|
||||||
|
field.mojom_name = parsed_field.mojom_name
|
||||||
|
field.value = _LookupValue(module, enum, None, parsed_field.value)
|
||||||
|
field.attributes = _AttributeListToDict(parsed_field.attribute_list)
|
||||||
|
value = mojom.EnumValue(module, enum, field)
|
||||||
|
module.values[value.GetSpec()] = value
|
||||||
|
return field
|
||||||
|
|
||||||
|
|
||||||
|
def _ResolveNumericEnumValues(enum):
|
||||||
|
"""
|
||||||
|
Given a reference to a mojom.Enum, resolves and assigns the numeric value of
|
||||||
|
each field, and also computes the min_value and max_value of the enum.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# map of <mojom_name> -> integral value
|
||||||
|
prev_value = -1
|
||||||
|
min_value = None
|
||||||
|
max_value = None
|
||||||
|
for field in enum.fields:
|
||||||
|
# This enum value is +1 the previous enum value (e.g: BEGIN).
|
||||||
|
if field.value is None:
|
||||||
|
prev_value += 1
|
||||||
|
|
||||||
|
# Integral value (e.g: BEGIN = -0x1).
|
||||||
|
elif _IsStrOrUnicode(field.value):
|
||||||
|
prev_value = int(field.value, 0)
|
||||||
|
|
||||||
|
# Reference to a previous enum value (e.g: INIT = BEGIN).
|
||||||
|
elif isinstance(field.value, mojom.EnumValue):
|
||||||
|
prev_value = field.value.field.numeric_value
|
||||||
|
elif isinstance(field.value, mojom.ConstantValue):
|
||||||
|
constant = field.value.constant
|
||||||
|
kind = constant.kind
|
||||||
|
if not mojom.IsIntegralKind(kind) or mojom.IsBoolKind(kind):
|
||||||
|
raise ValueError('Enum values must be integers. %s is not an integer.' %
|
||||||
|
constant.mojom_name)
|
||||||
|
prev_value = int(constant.value, 0)
|
||||||
|
else:
|
||||||
|
raise Exception('Unresolved enum value for %s' % field.value.GetSpec())
|
||||||
|
|
||||||
|
#resolved_enum_values[field.mojom_name] = prev_value
|
||||||
|
field.numeric_value = prev_value
|
||||||
|
if min_value is None or prev_value < min_value:
|
||||||
|
min_value = prev_value
|
||||||
|
if max_value is None or prev_value > max_value:
|
||||||
|
max_value = prev_value
|
||||||
|
|
||||||
|
enum.min_value = min_value
|
||||||
|
enum.max_value = max_value
|
||||||
|
|
||||||
|
|
||||||
|
def _Enum(module, parsed_enum, parent_kind):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_enum: {ast.Enum} Parsed enum.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Enum} AST enum.
|
||||||
|
"""
|
||||||
|
enum = mojom.Enum(module=module)
|
||||||
|
enum.mojom_name = parsed_enum.mojom_name
|
||||||
|
enum.native_only = parsed_enum.enum_value_list is None
|
||||||
|
mojom_name = enum.mojom_name
|
||||||
|
if parent_kind:
|
||||||
|
mojom_name = parent_kind.mojom_name + '.' + mojom_name
|
||||||
|
enum.spec = 'x:%s.%s' % (module.mojom_namespace, mojom_name)
|
||||||
|
enum.parent_kind = parent_kind
|
||||||
|
enum.attributes = _AttributeListToDict(parsed_enum.attribute_list)
|
||||||
|
|
||||||
|
if not enum.native_only:
|
||||||
|
enum.fields = list(
|
||||||
|
map(lambda field: _EnumField(module, enum, field),
|
||||||
|
parsed_enum.enum_value_list))
|
||||||
|
_ResolveNumericEnumValues(enum)
|
||||||
|
|
||||||
|
module.kinds[enum.spec] = enum
|
||||||
|
|
||||||
|
# Enforce that a [Native] attribute is set to make native-only enum
|
||||||
|
# declarations more explicit.
|
||||||
|
if enum.native_only:
|
||||||
|
if not enum.attributes or not enum.attributes.get('Native', False):
|
||||||
|
raise Exception("Native-only enum declarations must include a " +
|
||||||
|
"Native attribute.")
|
||||||
|
|
||||||
|
return enum
|
||||||
|
|
||||||
|
|
||||||
|
def _Constant(module, parsed_const, parent_kind):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
module: {mojom.Module} Module currently being constructed.
|
||||||
|
parsed_const: {ast.Const} Parsed constant.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Constant} AST constant.
|
||||||
|
"""
|
||||||
|
constant = mojom.Constant()
|
||||||
|
constant.mojom_name = parsed_const.mojom_name
|
||||||
|
if parent_kind:
|
||||||
|
scope = (module.mojom_namespace, parent_kind.mojom_name)
|
||||||
|
else:
|
||||||
|
scope = (module.mojom_namespace, )
|
||||||
|
# TODO(mpcomplete): maybe we should only support POD kinds.
|
||||||
|
constant.kind = _Kind(module.kinds, _MapKind(parsed_const.typename), scope)
|
||||||
|
constant.parent_kind = parent_kind
|
||||||
|
constant.value = _LookupValue(module, parent_kind, constant.kind,
|
||||||
|
parsed_const.value)
|
||||||
|
|
||||||
|
# Iteratively resolve this constant reference to a concrete value
|
||||||
|
while isinstance(constant.value, mojom.ConstantValue):
|
||||||
|
constant.value = constant.value.constant.value
|
||||||
|
|
||||||
|
value = mojom.ConstantValue(module, parent_kind, constant)
|
||||||
|
module.values[value.GetSpec()] = value
|
||||||
|
return constant
|
||||||
|
|
||||||
|
|
||||||
|
def _CollectReferencedKinds(module, all_defined_kinds):
|
||||||
|
"""
|
||||||
|
Takes a {mojom.Module} object and a list of all defined kinds within that
|
||||||
|
module, and enumerates the complete dict of user-defined mojom types
|
||||||
|
(as {mojom.Kind} objects) referenced by the module's own defined kinds (i.e.
|
||||||
|
as types of struct or union or interface parameters. The returned dict is
|
||||||
|
keyed by kind spec.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def extract_referenced_user_kinds(kind):
|
||||||
|
if mojom.IsArrayKind(kind):
|
||||||
|
return extract_referenced_user_kinds(kind.kind)
|
||||||
|
if mojom.IsMapKind(kind):
|
||||||
|
return (extract_referenced_user_kinds(kind.key_kind) +
|
||||||
|
extract_referenced_user_kinds(kind.value_kind))
|
||||||
|
if mojom.IsInterfaceRequestKind(kind) or mojom.IsAssociatedKind(kind):
|
||||||
|
return [kind.kind]
|
||||||
|
if mojom.IsStructKind(kind):
|
||||||
|
return [kind]
|
||||||
|
if (mojom.IsInterfaceKind(kind) or mojom.IsEnumKind(kind)
|
||||||
|
or mojom.IsUnionKind(kind)):
|
||||||
|
return [kind]
|
||||||
|
return []
|
||||||
|
|
||||||
|
def sanitize_kind(kind):
|
||||||
|
"""Removes nullability from a kind"""
|
||||||
|
if kind.spec.startswith('?'):
|
||||||
|
return _Kind(module.kinds, kind.spec[1:], (module.mojom_namespace, ''))
|
||||||
|
return kind
|
||||||
|
|
||||||
|
referenced_user_kinds = {}
|
||||||
|
for defined_kind in all_defined_kinds:
|
||||||
|
if mojom.IsStructKind(defined_kind) or mojom.IsUnionKind(defined_kind):
|
||||||
|
for field in defined_kind.fields:
|
||||||
|
for referenced_kind in extract_referenced_user_kinds(field.kind):
|
||||||
|
sanitized_kind = sanitize_kind(referenced_kind)
|
||||||
|
referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
|
||||||
|
|
||||||
|
# Also scan for references in parameter lists
|
||||||
|
for interface in module.interfaces:
|
||||||
|
for method in interface.methods:
|
||||||
|
for param in itertools.chain(method.parameters or [],
|
||||||
|
method.response_parameters or []):
|
||||||
|
if (mojom.IsStructKind(param.kind) or mojom.IsUnionKind(param.kind)
|
||||||
|
or mojom.IsEnumKind(param.kind)
|
||||||
|
or mojom.IsAnyInterfaceKind(param.kind)):
|
||||||
|
for referenced_kind in extract_referenced_user_kinds(param.kind):
|
||||||
|
sanitized_kind = sanitize_kind(referenced_kind)
|
||||||
|
referenced_user_kinds[sanitized_kind.spec] = sanitized_kind
|
||||||
|
|
||||||
|
return referenced_user_kinds
|
||||||
|
|
||||||
|
|
||||||
|
def _AssignDefaultOrdinals(items):
|
||||||
|
"""Assigns default ordinal values to a sequence of items if necessary."""
|
||||||
|
next_ordinal = 0
|
||||||
|
for item in items:
|
||||||
|
if item.ordinal is not None:
|
||||||
|
next_ordinal = item.ordinal + 1
|
||||||
|
else:
|
||||||
|
item.ordinal = next_ordinal
|
||||||
|
next_ordinal += 1
|
||||||
|
|
||||||
|
|
||||||
|
def _AssertTypeIsStable(kind):
|
||||||
|
"""Raises an error if a type is not stable, meaning it is composed of at least
|
||||||
|
one type that is not marked [Stable]."""
|
||||||
|
|
||||||
|
def assertDependencyIsStable(dependency):
|
||||||
|
if (mojom.IsEnumKind(dependency) or mojom.IsStructKind(dependency)
|
||||||
|
or mojom.IsUnionKind(dependency) or mojom.IsInterfaceKind(dependency)):
|
||||||
|
if not dependency.stable:
|
||||||
|
raise Exception(
|
||||||
|
'%s is marked [Stable] but cannot be stable because it depends on '
|
||||||
|
'%s, which is not marked [Stable].' %
|
||||||
|
(kind.mojom_name, dependency.mojom_name))
|
||||||
|
elif mojom.IsArrayKind(dependency) or mojom.IsAnyInterfaceKind(dependency):
|
||||||
|
assertDependencyIsStable(dependency.kind)
|
||||||
|
elif mojom.IsMapKind(dependency):
|
||||||
|
assertDependencyIsStable(dependency.key_kind)
|
||||||
|
assertDependencyIsStable(dependency.value_kind)
|
||||||
|
|
||||||
|
if mojom.IsStructKind(kind) or mojom.IsUnionKind(kind):
|
||||||
|
for field in kind.fields:
|
||||||
|
assertDependencyIsStable(field.kind)
|
||||||
|
elif mojom.IsInterfaceKind(kind):
|
||||||
|
for method in kind.methods:
|
||||||
|
for param in method.param_struct.fields:
|
||||||
|
assertDependencyIsStable(param.kind)
|
||||||
|
if method.response_param_struct:
|
||||||
|
for response_param in method.response_param_struct.fields:
|
||||||
|
assertDependencyIsStable(response_param.kind)
|
||||||
|
|
||||||
|
|
||||||
|
def _Module(tree, path, imports):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
tree: {ast.Mojom} The parse tree.
|
||||||
|
path: {str} The path to the mojom file.
|
||||||
|
imports: {Dict[str, mojom.Module]} Mapping from filenames, as they appear in
|
||||||
|
the import list, to already processed modules. Used to process imports.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Module} An AST for the mojom.
|
||||||
|
"""
|
||||||
|
module = mojom.Module(path=path)
|
||||||
|
module.kinds = {}
|
||||||
|
for kind in mojom.PRIMITIVES:
|
||||||
|
module.kinds[kind.spec] = kind
|
||||||
|
|
||||||
|
module.values = {}
|
||||||
|
|
||||||
|
module.mojom_namespace = tree.module.mojom_namespace[1] if tree.module else ''
|
||||||
|
# Imports must come first, because they add to module.kinds which is used
|
||||||
|
# by by the others.
|
||||||
|
module.imports = [
|
||||||
|
_Import(module, imports[imp.import_filename]) for imp in tree.import_list
|
||||||
|
]
|
||||||
|
if tree.module and tree.module.attribute_list:
|
||||||
|
assert isinstance(tree.module.attribute_list, ast.AttributeList)
|
||||||
|
# TODO(vtl): Check for duplicate keys here.
|
||||||
|
module.attributes = dict((attribute.key, attribute.value)
|
||||||
|
for attribute in tree.module.attribute_list)
|
||||||
|
|
||||||
|
filename = os.path.basename(path)
|
||||||
|
# First pass collects kinds.
|
||||||
|
module.constants = []
|
||||||
|
module.enums = []
|
||||||
|
module.structs = []
|
||||||
|
module.unions = []
|
||||||
|
module.interfaces = []
|
||||||
|
_ProcessElements(
|
||||||
|
filename, tree.definition_list, {
|
||||||
|
ast.Const:
|
||||||
|
lambda const: module.constants.append(_Constant(module, const, None)),
|
||||||
|
ast.Enum:
|
||||||
|
lambda enum: module.enums.append(_Enum(module, enum, None)),
|
||||||
|
ast.Struct:
|
||||||
|
lambda struct: module.structs.append(_Struct(module, struct)),
|
||||||
|
ast.Union:
|
||||||
|
lambda union: module.unions.append(_Union(module, union)),
|
||||||
|
ast.Interface:
|
||||||
|
lambda interface: module.interfaces.append(
|
||||||
|
_Interface(module, interface)),
|
||||||
|
})
|
||||||
|
|
||||||
|
# Second pass expands fields and methods. This allows fields and parameters
|
||||||
|
# to refer to kinds defined anywhere in the mojom.
|
||||||
|
all_defined_kinds = {}
|
||||||
|
for struct in module.structs:
|
||||||
|
struct.fields = list(
|
||||||
|
map(lambda field: _StructField(module, field, struct),
|
||||||
|
struct.fields_data))
|
||||||
|
_AssignDefaultOrdinals(struct.fields)
|
||||||
|
del struct.fields_data
|
||||||
|
all_defined_kinds[struct.spec] = struct
|
||||||
|
for enum in struct.enums:
|
||||||
|
all_defined_kinds[enum.spec] = enum
|
||||||
|
|
||||||
|
for union in module.unions:
|
||||||
|
union.fields = list(
|
||||||
|
map(lambda field: _UnionField(module, field, union), union.fields_data))
|
||||||
|
_AssignDefaultOrdinals(union.fields)
|
||||||
|
del union.fields_data
|
||||||
|
all_defined_kinds[union.spec] = union
|
||||||
|
|
||||||
|
for interface in module.interfaces:
|
||||||
|
interface.methods = list(
|
||||||
|
map(lambda method: _Method(module, method, interface),
|
||||||
|
interface.methods_data))
|
||||||
|
_AssignDefaultOrdinals(interface.methods)
|
||||||
|
del interface.methods_data
|
||||||
|
all_defined_kinds[interface.spec] = interface
|
||||||
|
for enum in interface.enums:
|
||||||
|
all_defined_kinds[enum.spec] = enum
|
||||||
|
for enum in module.enums:
|
||||||
|
all_defined_kinds[enum.spec] = enum
|
||||||
|
|
||||||
|
all_referenced_kinds = _CollectReferencedKinds(module,
|
||||||
|
all_defined_kinds.values())
|
||||||
|
imported_kind_specs = set(all_referenced_kinds.keys()).difference(
|
||||||
|
set(all_defined_kinds.keys()))
|
||||||
|
module.imported_kinds = dict(
|
||||||
|
(spec, all_referenced_kinds[spec]) for spec in imported_kind_specs)
|
||||||
|
|
||||||
|
generator.AddComputedData(module)
|
||||||
|
for iface in module.interfaces:
|
||||||
|
for method in iface.methods:
|
||||||
|
if method.param_struct:
|
||||||
|
_AssignDefaultOrdinals(method.param_struct.fields)
|
||||||
|
if method.response_param_struct:
|
||||||
|
_AssignDefaultOrdinals(method.response_param_struct.fields)
|
||||||
|
|
||||||
|
# Ensure that all types marked [Stable] are actually stable. Enums are
|
||||||
|
# automatically OK since they don't depend on other definitions.
|
||||||
|
for kinds in (module.structs, module.unions, module.interfaces):
|
||||||
|
for kind in kinds:
|
||||||
|
if kind.stable:
|
||||||
|
_AssertTypeIsStable(kind)
|
||||||
|
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
def OrderedModule(tree, path, imports):
|
||||||
|
"""Convert parse tree to AST module.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tree: {ast.Mojom} The parse tree.
|
||||||
|
path: {str} The path to the mojom file.
|
||||||
|
imports: {Dict[str, mojom.Module]} Mapping from filenames, as they appear in
|
||||||
|
the import list, to already processed modules. Used to process imports.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
{mojom.Module} An AST for the mojom.
|
||||||
|
"""
|
||||||
|
module = _Module(tree, path, imports)
|
||||||
|
return module
|
|
@ -0,0 +1,73 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mojom.generate import module as mojom
|
||||||
|
from mojom.generate import translate
|
||||||
|
from mojom.parse import ast
|
||||||
|
|
||||||
|
|
||||||
|
class TranslateTest(unittest.TestCase):
|
||||||
|
"""Tests |parser.Parse()|."""
|
||||||
|
|
||||||
|
def testSimpleArray(self):
|
||||||
|
"""Tests a simple int32[]."""
|
||||||
|
# pylint: disable=W0212
|
||||||
|
self.assertEquals(translate._MapKind("int32[]"), "a:i32")
|
||||||
|
|
||||||
|
def testAssociativeArray(self):
|
||||||
|
"""Tests a simple uint8{string}."""
|
||||||
|
# pylint: disable=W0212
|
||||||
|
self.assertEquals(translate._MapKind("uint8{string}"), "m[s][u8]")
|
||||||
|
|
||||||
|
def testLeftToRightAssociativeArray(self):
|
||||||
|
"""Makes sure that parsing is done from right to left on the internal kinds
|
||||||
|
in the presence of an associative array."""
|
||||||
|
# pylint: disable=W0212
|
||||||
|
self.assertEquals(translate._MapKind("uint8[]{string}"), "m[s][a:u8]")
|
||||||
|
|
||||||
|
def testTranslateSimpleUnions(self):
|
||||||
|
"""Makes sure that a simple union is translated correctly."""
|
||||||
|
tree = ast.Mojom(None, ast.ImportList(), [
|
||||||
|
ast.Union(
|
||||||
|
"SomeUnion", None,
|
||||||
|
ast.UnionBody([
|
||||||
|
ast.UnionField("a", None, None, "int32"),
|
||||||
|
ast.UnionField("b", None, None, "string")
|
||||||
|
]))
|
||||||
|
])
|
||||||
|
|
||||||
|
translation = translate.OrderedModule(tree, "mojom_tree", [])
|
||||||
|
self.assertEqual(1, len(translation.unions))
|
||||||
|
|
||||||
|
union = translation.unions[0]
|
||||||
|
self.assertTrue(isinstance(union, mojom.Union))
|
||||||
|
self.assertEqual("SomeUnion", union.mojom_name)
|
||||||
|
self.assertEqual(2, len(union.fields))
|
||||||
|
self.assertEqual("a", union.fields[0].mojom_name)
|
||||||
|
self.assertEqual(mojom.INT32.spec, union.fields[0].kind.spec)
|
||||||
|
self.assertEqual("b", union.fields[1].mojom_name)
|
||||||
|
self.assertEqual(mojom.STRING.spec, union.fields[1].kind.spec)
|
||||||
|
|
||||||
|
def testMapKindRaisesWithDuplicate(self):
|
||||||
|
"""Verifies _MapTreeForType() raises when passed two values with the same
|
||||||
|
name."""
|
||||||
|
methods = [
|
||||||
|
ast.Method('dup', None, None, ast.ParameterList(), None),
|
||||||
|
ast.Method('dup', None, None, ast.ParameterList(), None)
|
||||||
|
]
|
||||||
|
with self.assertRaises(Exception):
|
||||||
|
translate._ElemsOfType(methods, ast.Method, 'scope')
|
||||||
|
|
||||||
|
def testAssociatedKinds(self):
|
||||||
|
"""Tests type spec translation of associated interfaces and requests."""
|
||||||
|
# pylint: disable=W0212
|
||||||
|
self.assertEquals(
|
||||||
|
translate._MapKind("asso<SomeInterface>?"), "?asso:x:SomeInterface")
|
||||||
|
self.assertEquals(
|
||||||
|
translate._MapKind("asso<SomeInterface&>?"), "?asso:r:x:SomeInterface")
|
427
utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
Normal file
427
utils/ipc/mojo/public/tools/mojom/mojom/parse/ast.py
Normal file
|
@ -0,0 +1,427 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Node classes for the AST for a Mojo IDL file."""
|
||||||
|
|
||||||
|
# Note: For convenience of testing, you probably want to define __eq__() methods
|
||||||
|
# for all node types; it's okay to be slightly lax (e.g., not compare filename
|
||||||
|
# and lineno). You may also define __repr__() to help with analyzing test
|
||||||
|
# failures, especially for more complex types.
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def _IsStrOrUnicode(x):
|
||||||
|
if sys.version_info[0] < 3:
|
||||||
|
return isinstance(x, (unicode, str))
|
||||||
|
return isinstance(x, str)
|
||||||
|
|
||||||
|
|
||||||
|
class NodeBase(object):
|
||||||
|
"""Base class for nodes in the AST."""
|
||||||
|
|
||||||
|
def __init__(self, filename=None, lineno=None):
|
||||||
|
self.filename = filename
|
||||||
|
self.lineno = lineno
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
# We want strict comparison of the two object's types. Disable pylint's
|
||||||
|
# insistence upon recommending isinstance().
|
||||||
|
# pylint: disable=unidiomatic-typecheck
|
||||||
|
return type(self) == type(other)
|
||||||
|
|
||||||
|
# Make != the inverse of ==. (Subclasses shouldn't have to override this.)
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
|
||||||
|
# TODO(vtl): Some of this is complicated enough that it should be tested.
|
||||||
|
class NodeListBase(NodeBase):
|
||||||
|
"""Represents a list of other nodes, all having the same type. (This is meant
|
||||||
|
to be subclassed, with subclasses defining _list_item_type to be the class (or
|
||||||
|
classes, in a tuple) of the members of the list.)"""
|
||||||
|
|
||||||
|
def __init__(self, item_or_items=None, **kwargs):
|
||||||
|
super(NodeListBase, self).__init__(**kwargs)
|
||||||
|
self.items = []
|
||||||
|
if item_or_items is None:
|
||||||
|
pass
|
||||||
|
elif isinstance(item_or_items, list):
|
||||||
|
for item in item_or_items:
|
||||||
|
assert isinstance(item, self._list_item_type)
|
||||||
|
self.Append(item)
|
||||||
|
else:
|
||||||
|
assert isinstance(item_or_items, self._list_item_type)
|
||||||
|
self.Append(item_or_items)
|
||||||
|
|
||||||
|
# Support iteration. For everything else, users should just access |items|
|
||||||
|
# directly. (We intentionally do NOT supply |__len__()| or |__nonzero__()|, so
|
||||||
|
# |bool(NodeListBase())| is true.)
|
||||||
|
def __iter__(self):
|
||||||
|
return self.items.__iter__()
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(NodeListBase, self).__eq__(other) and \
|
||||||
|
self.items == other.items
|
||||||
|
|
||||||
|
# Implement this so that on failure, we get slightly more sensible output.
|
||||||
|
def __repr__(self):
|
||||||
|
return self.__class__.__name__ + "([" + \
|
||||||
|
", ".join([repr(elem) for elem in self.items]) + "])"
|
||||||
|
|
||||||
|
def Insert(self, item):
|
||||||
|
"""Inserts item at the front of the list."""
|
||||||
|
|
||||||
|
assert isinstance(item, self._list_item_type)
|
||||||
|
self.items.insert(0, item)
|
||||||
|
self._UpdateFilenameAndLineno()
|
||||||
|
|
||||||
|
def Append(self, item):
|
||||||
|
"""Appends item to the end of the list."""
|
||||||
|
|
||||||
|
assert isinstance(item, self._list_item_type)
|
||||||
|
self.items.append(item)
|
||||||
|
self._UpdateFilenameAndLineno()
|
||||||
|
|
||||||
|
def _UpdateFilenameAndLineno(self):
|
||||||
|
if self.items:
|
||||||
|
self.filename = self.items[0].filename
|
||||||
|
self.lineno = self.items[0].lineno
|
||||||
|
|
||||||
|
|
||||||
|
class Definition(NodeBase):
|
||||||
|
"""Represents a definition of anything that has a global name (e.g., enums,
|
||||||
|
enum values, consts, structs, struct fields, interfaces). (This does not
|
||||||
|
include parameter definitions.) This class is meant to be subclassed."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, **kwargs):
|
||||||
|
assert _IsStrOrUnicode(mojom_name)
|
||||||
|
NodeBase.__init__(self, **kwargs)
|
||||||
|
self.mojom_name = mojom_name
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
|
||||||
|
|
||||||
|
class Attribute(NodeBase):
|
||||||
|
"""Represents an attribute."""
|
||||||
|
|
||||||
|
def __init__(self, key, value, **kwargs):
|
||||||
|
assert _IsStrOrUnicode(key)
|
||||||
|
super(Attribute, self).__init__(**kwargs)
|
||||||
|
self.key = key
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Attribute, self).__eq__(other) and \
|
||||||
|
self.key == other.key and \
|
||||||
|
self.value == other.value
|
||||||
|
|
||||||
|
|
||||||
|
class AttributeList(NodeListBase):
|
||||||
|
"""Represents a list attributes."""
|
||||||
|
|
||||||
|
_list_item_type = Attribute
|
||||||
|
|
||||||
|
|
||||||
|
class Const(Definition):
|
||||||
|
"""Represents a const definition."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, typename, value, **kwargs):
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
# The typename is currently passed through as a string.
|
||||||
|
assert _IsStrOrUnicode(typename)
|
||||||
|
# The value is either a literal (currently passed through as a string) or a
|
||||||
|
# "wrapped identifier".
|
||||||
|
assert _IsStrOrUnicode or isinstance(value, tuple)
|
||||||
|
super(Const, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.typename = typename
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Const, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.typename == other.typename and \
|
||||||
|
self.value == other.value
|
||||||
|
|
||||||
|
|
||||||
|
class Enum(Definition):
|
||||||
|
"""Represents an enum definition."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, enum_value_list, **kwargs):
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert enum_value_list is None or isinstance(enum_value_list, EnumValueList)
|
||||||
|
super(Enum, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.enum_value_list = enum_value_list
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Enum, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.enum_value_list == other.enum_value_list
|
||||||
|
|
||||||
|
|
||||||
|
class EnumValue(Definition):
|
||||||
|
"""Represents a definition of an enum value."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, value, **kwargs):
|
||||||
|
# The optional value is either an int (which is current a string) or a
|
||||||
|
# "wrapped identifier".
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert value is None or _IsStrOrUnicode(value) or isinstance(value, tuple)
|
||||||
|
super(EnumValue, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(EnumValue, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.value == other.value
|
||||||
|
|
||||||
|
|
||||||
|
class EnumValueList(NodeListBase):
|
||||||
|
"""Represents a list of enum value definitions (i.e., the "body" of an enum
|
||||||
|
definition)."""
|
||||||
|
|
||||||
|
_list_item_type = EnumValue
|
||||||
|
|
||||||
|
|
||||||
|
class Import(NodeBase):
|
||||||
|
"""Represents an import statement."""
|
||||||
|
|
||||||
|
def __init__(self, attribute_list, import_filename, **kwargs):
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert _IsStrOrUnicode(import_filename)
|
||||||
|
super(Import, self).__init__(**kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.import_filename = import_filename
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Import, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.import_filename == other.import_filename
|
||||||
|
|
||||||
|
|
||||||
|
class ImportList(NodeListBase):
|
||||||
|
"""Represents a list (i.e., sequence) of import statements."""
|
||||||
|
|
||||||
|
_list_item_type = Import
|
||||||
|
|
||||||
|
|
||||||
|
class Interface(Definition):
|
||||||
|
"""Represents an interface definition."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert isinstance(body, InterfaceBody)
|
||||||
|
super(Interface, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.body = body
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Interface, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.body == other.body
|
||||||
|
|
||||||
|
|
||||||
|
class Method(Definition):
|
||||||
|
"""Represents a method definition."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, ordinal, parameter_list,
|
||||||
|
response_parameter_list, **kwargs):
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||||
|
assert isinstance(parameter_list, ParameterList)
|
||||||
|
assert response_parameter_list is None or \
|
||||||
|
isinstance(response_parameter_list, ParameterList)
|
||||||
|
super(Method, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.ordinal = ordinal
|
||||||
|
self.parameter_list = parameter_list
|
||||||
|
self.response_parameter_list = response_parameter_list
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Method, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.ordinal == other.ordinal and \
|
||||||
|
self.parameter_list == other.parameter_list and \
|
||||||
|
self.response_parameter_list == other.response_parameter_list
|
||||||
|
|
||||||
|
|
||||||
|
# This needs to be declared after |Method|.
|
||||||
|
class InterfaceBody(NodeListBase):
|
||||||
|
"""Represents the body of (i.e., list of definitions inside) an interface."""
|
||||||
|
|
||||||
|
_list_item_type = (Const, Enum, Method)
|
||||||
|
|
||||||
|
|
||||||
|
class Module(NodeBase):
|
||||||
|
"""Represents a module statement."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_namespace, attribute_list, **kwargs):
|
||||||
|
# |mojom_namespace| is either none or a "wrapped identifier".
|
||||||
|
assert mojom_namespace is None or isinstance(mojom_namespace, tuple)
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
super(Module, self).__init__(**kwargs)
|
||||||
|
self.mojom_namespace = mojom_namespace
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Module, self).__eq__(other) and \
|
||||||
|
self.mojom_namespace == other.mojom_namespace and \
|
||||||
|
self.attribute_list == other.attribute_list
|
||||||
|
|
||||||
|
|
||||||
|
class Mojom(NodeBase):
|
||||||
|
"""Represents an entire .mojom file. (This is the root node.)"""
|
||||||
|
|
||||||
|
def __init__(self, module, import_list, definition_list, **kwargs):
|
||||||
|
assert module is None or isinstance(module, Module)
|
||||||
|
assert isinstance(import_list, ImportList)
|
||||||
|
assert isinstance(definition_list, list)
|
||||||
|
super(Mojom, self).__init__(**kwargs)
|
||||||
|
self.module = module
|
||||||
|
self.import_list = import_list
|
||||||
|
self.definition_list = definition_list
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Mojom, self).__eq__(other) and \
|
||||||
|
self.module == other.module and \
|
||||||
|
self.import_list == other.import_list and \
|
||||||
|
self.definition_list == other.definition_list
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "%s(%r, %r, %r)" % (self.__class__.__name__, self.module,
|
||||||
|
self.import_list, self.definition_list)
|
||||||
|
|
||||||
|
|
||||||
|
class Ordinal(NodeBase):
|
||||||
|
"""Represents an ordinal value labeling, e.g., a struct field."""
|
||||||
|
|
||||||
|
def __init__(self, value, **kwargs):
|
||||||
|
assert isinstance(value, int)
|
||||||
|
super(Ordinal, self).__init__(**kwargs)
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Ordinal, self).__eq__(other) and \
|
||||||
|
self.value == other.value
|
||||||
|
|
||||||
|
|
||||||
|
class Parameter(NodeBase):
|
||||||
|
"""Represents a method request or response parameter."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
|
||||||
|
assert _IsStrOrUnicode(mojom_name)
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||||
|
assert _IsStrOrUnicode(typename)
|
||||||
|
super(Parameter, self).__init__(**kwargs)
|
||||||
|
self.mojom_name = mojom_name
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.ordinal = ordinal
|
||||||
|
self.typename = typename
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Parameter, self).__eq__(other) and \
|
||||||
|
self.mojom_name == other.mojom_name and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.ordinal == other.ordinal and \
|
||||||
|
self.typename == other.typename
|
||||||
|
|
||||||
|
|
||||||
|
class ParameterList(NodeListBase):
|
||||||
|
"""Represents a list of (method request or response) parameters."""
|
||||||
|
|
||||||
|
_list_item_type = Parameter
|
||||||
|
|
||||||
|
|
||||||
|
class Struct(Definition):
|
||||||
|
"""Represents a struct definition."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert isinstance(body, StructBody) or body is None
|
||||||
|
super(Struct, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.body = body
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Struct, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.body == other.body
|
||||||
|
|
||||||
|
|
||||||
|
class StructField(Definition):
|
||||||
|
"""Represents a struct field definition."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, ordinal, typename,
|
||||||
|
default_value, **kwargs):
|
||||||
|
assert _IsStrOrUnicode(mojom_name)
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||||
|
assert _IsStrOrUnicode(typename)
|
||||||
|
# The optional default value is currently either a value as a string or a
|
||||||
|
# "wrapped identifier".
|
||||||
|
assert default_value is None or _IsStrOrUnicode(default_value) or \
|
||||||
|
isinstance(default_value, tuple)
|
||||||
|
super(StructField, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.ordinal = ordinal
|
||||||
|
self.typename = typename
|
||||||
|
self.default_value = default_value
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(StructField, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.ordinal == other.ordinal and \
|
||||||
|
self.typename == other.typename and \
|
||||||
|
self.default_value == other.default_value
|
||||||
|
|
||||||
|
|
||||||
|
# This needs to be declared after |StructField|.
|
||||||
|
class StructBody(NodeListBase):
|
||||||
|
"""Represents the body of (i.e., list of definitions inside) a struct."""
|
||||||
|
|
||||||
|
_list_item_type = (Const, Enum, StructField)
|
||||||
|
|
||||||
|
|
||||||
|
class Union(Definition):
|
||||||
|
"""Represents a union definition."""
|
||||||
|
|
||||||
|
def __init__(self, mojom_name, attribute_list, body, **kwargs):
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert isinstance(body, UnionBody)
|
||||||
|
super(Union, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.body = body
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(Union, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.body == other.body
|
||||||
|
|
||||||
|
|
||||||
|
class UnionField(Definition):
|
||||||
|
def __init__(self, mojom_name, attribute_list, ordinal, typename, **kwargs):
|
||||||
|
assert _IsStrOrUnicode(mojom_name)
|
||||||
|
assert attribute_list is None or isinstance(attribute_list, AttributeList)
|
||||||
|
assert ordinal is None or isinstance(ordinal, Ordinal)
|
||||||
|
assert _IsStrOrUnicode(typename)
|
||||||
|
super(UnionField, self).__init__(mojom_name, **kwargs)
|
||||||
|
self.attribute_list = attribute_list
|
||||||
|
self.ordinal = ordinal
|
||||||
|
self.typename = typename
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(UnionField, self).__eq__(other) and \
|
||||||
|
self.attribute_list == other.attribute_list and \
|
||||||
|
self.ordinal == other.ordinal and \
|
||||||
|
self.typename == other.typename
|
||||||
|
|
||||||
|
|
||||||
|
class UnionBody(NodeListBase):
|
||||||
|
|
||||||
|
_list_item_type = UnionField
|
121
utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
Normal file
121
utils/ipc/mojo/public/tools/mojom/mojom/parse/ast_unittest.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from mojom.parse import ast
|
||||||
|
|
||||||
|
|
||||||
|
class _TestNode(ast.NodeBase):
|
||||||
|
"""Node type for tests."""
|
||||||
|
|
||||||
|
def __init__(self, value, **kwargs):
|
||||||
|
super(_TestNode, self).__init__(**kwargs)
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return super(_TestNode, self).__eq__(other) and self.value == other.value
|
||||||
|
|
||||||
|
|
||||||
|
class _TestNodeList(ast.NodeListBase):
|
||||||
|
"""Node list type for tests."""
|
||||||
|
|
||||||
|
_list_item_type = _TestNode
|
||||||
|
|
||||||
|
|
||||||
|
class ASTTest(unittest.TestCase):
|
||||||
|
"""Tests various AST classes."""
|
||||||
|
|
||||||
|
def testNodeBase(self):
|
||||||
|
# Test |__eq__()|; this is only used for testing, where we want to do
|
||||||
|
# comparison by value and ignore filenames/line numbers (for convenience).
|
||||||
|
node1 = ast.NodeBase(filename="hello.mojom", lineno=123)
|
||||||
|
node2 = ast.NodeBase()
|
||||||
|
self.assertEquals(node1, node2)
|
||||||
|
self.assertEquals(node2, node1)
|
||||||
|
|
||||||
|
# Check that |__ne__()| just defers to |__eq__()| properly.
|
||||||
|
self.assertFalse(node1 != node2)
|
||||||
|
self.assertFalse(node2 != node1)
|
||||||
|
|
||||||
|
# Check that |filename| and |lineno| are set properly (and are None by
|
||||||
|
# default).
|
||||||
|
self.assertEquals(node1.filename, "hello.mojom")
|
||||||
|
self.assertEquals(node1.lineno, 123)
|
||||||
|
self.assertIsNone(node2.filename)
|
||||||
|
self.assertIsNone(node2.lineno)
|
||||||
|
|
||||||
|
# |NodeBase|'s |__eq__()| should compare types (and a subclass's |__eq__()|
|
||||||
|
# should first defer to its superclass's).
|
||||||
|
node3 = _TestNode(123)
|
||||||
|
self.assertNotEqual(node1, node3)
|
||||||
|
self.assertNotEqual(node3, node1)
|
||||||
|
# Also test |__eq__()| directly.
|
||||||
|
self.assertFalse(node1 == node3)
|
||||||
|
self.assertFalse(node3 == node1)
|
||||||
|
|
||||||
|
node4 = _TestNode(123, filename="world.mojom", lineno=123)
|
||||||
|
self.assertEquals(node4, node3)
|
||||||
|
node5 = _TestNode(456)
|
||||||
|
self.assertNotEquals(node5, node4)
|
||||||
|
|
||||||
|
def testNodeListBase(self):
|
||||||
|
node1 = _TestNode(1, filename="foo.mojom", lineno=1)
|
||||||
|
# Equal to, but not the same as, |node1|:
|
||||||
|
node1b = _TestNode(1, filename="foo.mojom", lineno=1)
|
||||||
|
node2 = _TestNode(2, filename="foo.mojom", lineno=2)
|
||||||
|
|
||||||
|
nodelist1 = _TestNodeList() # Contains: (empty).
|
||||||
|
self.assertEquals(nodelist1, nodelist1)
|
||||||
|
self.assertEquals(nodelist1.items, [])
|
||||||
|
self.assertIsNone(nodelist1.filename)
|
||||||
|
self.assertIsNone(nodelist1.lineno)
|
||||||
|
|
||||||
|
nodelist2 = _TestNodeList(node1) # Contains: 1.
|
||||||
|
self.assertEquals(nodelist2, nodelist2)
|
||||||
|
self.assertEquals(nodelist2.items, [node1])
|
||||||
|
self.assertNotEqual(nodelist2, nodelist1)
|
||||||
|
self.assertEquals(nodelist2.filename, "foo.mojom")
|
||||||
|
self.assertEquals(nodelist2.lineno, 1)
|
||||||
|
|
||||||
|
nodelist3 = _TestNodeList([node2]) # Contains: 2.
|
||||||
|
self.assertEquals(nodelist3.items, [node2])
|
||||||
|
self.assertNotEqual(nodelist3, nodelist1)
|
||||||
|
self.assertNotEqual(nodelist3, nodelist2)
|
||||||
|
self.assertEquals(nodelist3.filename, "foo.mojom")
|
||||||
|
self.assertEquals(nodelist3.lineno, 2)
|
||||||
|
|
||||||
|
nodelist1.Append(node1b) # Contains: 1.
|
||||||
|
self.assertEquals(nodelist1.items, [node1])
|
||||||
|
self.assertEquals(nodelist1, nodelist2)
|
||||||
|
self.assertNotEqual(nodelist1, nodelist3)
|
||||||
|
self.assertEquals(nodelist1.filename, "foo.mojom")
|
||||||
|
self.assertEquals(nodelist1.lineno, 1)
|
||||||
|
|
||||||
|
nodelist1.Append(node2) # Contains: 1, 2.
|
||||||
|
self.assertEquals(nodelist1.items, [node1, node2])
|
||||||
|
self.assertNotEqual(nodelist1, nodelist2)
|
||||||
|
self.assertNotEqual(nodelist1, nodelist3)
|
||||||
|
self.assertEquals(nodelist1.lineno, 1)
|
||||||
|
|
||||||
|
nodelist2.Append(node2) # Contains: 1, 2.
|
||||||
|
self.assertEquals(nodelist2.items, [node1, node2])
|
||||||
|
self.assertEquals(nodelist2, nodelist1)
|
||||||
|
self.assertNotEqual(nodelist2, nodelist3)
|
||||||
|
self.assertEquals(nodelist2.lineno, 1)
|
||||||
|
|
||||||
|
nodelist3.Insert(node1) # Contains: 1, 2.
|
||||||
|
self.assertEquals(nodelist3.items, [node1, node2])
|
||||||
|
self.assertEquals(nodelist3, nodelist1)
|
||||||
|
self.assertEquals(nodelist3, nodelist2)
|
||||||
|
self.assertEquals(nodelist3.lineno, 1)
|
||||||
|
|
||||||
|
# Test iteration:
|
||||||
|
i = 1
|
||||||
|
for item in nodelist1:
|
||||||
|
self.assertEquals(item.value, i)
|
||||||
|
i += 1
|
|
@ -0,0 +1,82 @@
|
||||||
|
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Helpers for processing conditionally enabled features in a mojom."""
|
||||||
|
|
||||||
|
from mojom.error import Error
|
||||||
|
from mojom.parse import ast
|
||||||
|
|
||||||
|
|
||||||
|
class EnableIfError(Error):
|
||||||
|
""" Class for errors from ."""
|
||||||
|
|
||||||
|
def __init__(self, filename, message, lineno=None):
|
||||||
|
Error.__init__(self, filename, message, lineno=lineno, addenda=None)
|
||||||
|
|
||||||
|
|
||||||
|
def _IsEnabled(definition, enabled_features):
|
||||||
|
"""Returns true if a definition is enabled.
|
||||||
|
|
||||||
|
A definition is enabled if it has no EnableIf attribute, or if the value of
|
||||||
|
the EnableIf attribute is in enabled_features.
|
||||||
|
"""
|
||||||
|
if not hasattr(definition, "attribute_list"):
|
||||||
|
return True
|
||||||
|
if not definition.attribute_list:
|
||||||
|
return True
|
||||||
|
|
||||||
|
already_defined = False
|
||||||
|
for a in definition.attribute_list:
|
||||||
|
if a.key == 'EnableIf':
|
||||||
|
if already_defined:
|
||||||
|
raise EnableIfError(
|
||||||
|
definition.filename,
|
||||||
|
"EnableIf attribute may only be defined once per field.",
|
||||||
|
definition.lineno)
|
||||||
|
already_defined = True
|
||||||
|
|
||||||
|
for attribute in definition.attribute_list:
|
||||||
|
if attribute.key == 'EnableIf' and attribute.value not in enabled_features:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _FilterDisabledFromNodeList(node_list, enabled_features):
|
||||||
|
if not node_list:
|
||||||
|
return
|
||||||
|
assert isinstance(node_list, ast.NodeListBase)
|
||||||
|
node_list.items = [
|
||||||
|
item for item in node_list.items if _IsEnabled(item, enabled_features)
|
||||||
|
]
|
||||||
|
for item in node_list.items:
|
||||||
|
_FilterDefinition(item, enabled_features)
|
||||||
|
|
||||||
|
|
||||||
|
def _FilterDefinition(definition, enabled_features):
|
||||||
|
"""Filters definitions with a body."""
|
||||||
|
if isinstance(definition, ast.Enum):
|
||||||
|
_FilterDisabledFromNodeList(definition.enum_value_list, enabled_features)
|
||||||
|
elif isinstance(definition, ast.Interface):
|
||||||
|
_FilterDisabledFromNodeList(definition.body, enabled_features)
|
||||||
|
elif isinstance(definition, ast.Method):
|
||||||
|
_FilterDisabledFromNodeList(definition.parameter_list, enabled_features)
|
||||||
|
_FilterDisabledFromNodeList(definition.response_parameter_list,
|
||||||
|
enabled_features)
|
||||||
|
elif isinstance(definition, ast.Struct):
|
||||||
|
_FilterDisabledFromNodeList(definition.body, enabled_features)
|
||||||
|
elif isinstance(definition, ast.Union):
|
||||||
|
_FilterDisabledFromNodeList(definition.body, enabled_features)
|
||||||
|
|
||||||
|
|
||||||
|
def RemoveDisabledDefinitions(mojom, enabled_features):
|
||||||
|
"""Removes conditionally disabled definitions from a Mojom node."""
|
||||||
|
mojom.import_list = ast.ImportList([
|
||||||
|
imported_file for imported_file in mojom.import_list
|
||||||
|
if _IsEnabled(imported_file, enabled_features)
|
||||||
|
])
|
||||||
|
mojom.definition_list = [
|
||||||
|
definition for definition in mojom.definition_list
|
||||||
|
if _IsEnabled(definition, enabled_features)
|
||||||
|
]
|
||||||
|
for definition in mojom.definition_list:
|
||||||
|
_FilterDefinition(definition, enabled_features)
|
|
@ -0,0 +1,233 @@
|
||||||
|
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
def _GetDirAbove(dirname):
|
||||||
|
"""Returns the directory "above" this file containing |dirname| (which must
|
||||||
|
also be "above" this file)."""
|
||||||
|
path = os.path.abspath(__file__)
|
||||||
|
while True:
|
||||||
|
path, tail = os.path.split(path)
|
||||||
|
assert tail
|
||||||
|
if tail == dirname:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
imp.find_module('mojom')
|
||||||
|
except ImportError:
|
||||||
|
sys.path.append(os.path.join(_GetDirAbove('pylib'), 'pylib'))
|
||||||
|
import mojom.parse.ast as ast
|
||||||
|
import mojom.parse.conditional_features as conditional_features
|
||||||
|
import mojom.parse.parser as parser
|
||||||
|
|
||||||
|
ENABLED_FEATURES = frozenset({'red', 'green', 'blue'})
|
||||||
|
|
||||||
|
|
||||||
|
class ConditionalFeaturesTest(unittest.TestCase):
|
||||||
|
"""Tests |mojom.parse.conditional_features|."""
|
||||||
|
|
||||||
|
def parseAndAssertEqual(self, source, expected_source):
|
||||||
|
definition = parser.Parse(source, "my_file.mojom")
|
||||||
|
conditional_features.RemoveDisabledDefinitions(definition, ENABLED_FEATURES)
|
||||||
|
expected = parser.Parse(expected_source, "my_file.mojom")
|
||||||
|
self.assertEquals(definition, expected)
|
||||||
|
|
||||||
|
def testFilterConst(self):
|
||||||
|
"""Test that Consts are correctly filtered."""
|
||||||
|
const_source = """
|
||||||
|
[EnableIf=blue]
|
||||||
|
const int kMyConst1 = 1;
|
||||||
|
[EnableIf=orange]
|
||||||
|
const double kMyConst2 = 2;
|
||||||
|
const int kMyConst3 = 3;
|
||||||
|
"""
|
||||||
|
expected_source = """
|
||||||
|
[EnableIf=blue]
|
||||||
|
const int kMyConst1 = 1;
|
||||||
|
const int kMyConst3 = 3;
|
||||||
|
"""
|
||||||
|
self.parseAndAssertEqual(const_source, expected_source)
|
||||||
|
|
||||||
|
def testFilterEnum(self):
|
||||||
|
"""Test that EnumValues are correctly filtered from an Enum."""
|
||||||
|
enum_source = """
|
||||||
|
enum MyEnum {
|
||||||
|
[EnableIf=purple]
|
||||||
|
VALUE1,
|
||||||
|
[EnableIf=blue]
|
||||||
|
VALUE2,
|
||||||
|
VALUE3,
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
expected_source = """
|
||||||
|
enum MyEnum {
|
||||||
|
[EnableIf=blue]
|
||||||
|
VALUE2,
|
||||||
|
VALUE3
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
self.parseAndAssertEqual(enum_source, expected_source)
|
||||||
|
|
||||||
|
def testFilterImport(self):
|
||||||
|
"""Test that imports are correctly filtered from a Mojom."""
|
||||||
|
import_source = """
|
||||||
|
[EnableIf=blue]
|
||||||
|
import "foo.mojom";
|
||||||
|
import "bar.mojom";
|
||||||
|
[EnableIf=purple]
|
||||||
|
import "baz.mojom";
|
||||||
|
"""
|
||||||
|
expected_source = """
|
||||||
|
[EnableIf=blue]
|
||||||
|
import "foo.mojom";
|
||||||
|
import "bar.mojom";
|
||||||
|
"""
|
||||||
|
self.parseAndAssertEqual(import_source, expected_source)
|
||||||
|
|
||||||
|
def testFilterInterface(self):
|
||||||
|
"""Test that definitions are correctly filtered from an Interface."""
|
||||||
|
interface_source = """
|
||||||
|
interface MyInterface {
|
||||||
|
[EnableIf=blue]
|
||||||
|
enum MyEnum {
|
||||||
|
[EnableIf=purple]
|
||||||
|
VALUE1,
|
||||||
|
VALUE2,
|
||||||
|
};
|
||||||
|
[EnableIf=blue]
|
||||||
|
const int32 kMyConst = 123;
|
||||||
|
[EnableIf=purple]
|
||||||
|
MyMethod();
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
expected_source = """
|
||||||
|
interface MyInterface {
|
||||||
|
[EnableIf=blue]
|
||||||
|
enum MyEnum {
|
||||||
|
VALUE2,
|
||||||
|
};
|
||||||
|
[EnableIf=blue]
|
||||||
|
const int32 kMyConst = 123;
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
self.parseAndAssertEqual(interface_source, expected_source)
|
||||||
|
|
||||||
|
def testFilterMethod(self):
|
||||||
|
"""Test that Parameters are correctly filtered from a Method."""
|
||||||
|
method_source = """
|
||||||
|
interface MyInterface {
|
||||||
|
[EnableIf=blue]
|
||||||
|
MyMethod([EnableIf=purple] int32 a) => ([EnableIf=red] int32 b);
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
expected_source = """
|
||||||
|
interface MyInterface {
|
||||||
|
[EnableIf=blue]
|
||||||
|
MyMethod() => ([EnableIf=red] int32 b);
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
self.parseAndAssertEqual(method_source, expected_source)
|
||||||
|
|
||||||
|
def testFilterStruct(self):
|
||||||
|
"""Test that definitions are correctly filtered from a Struct."""
|
||||||
|
struct_source = """
|
||||||
|
struct MyStruct {
|
||||||
|
[EnableIf=blue]
|
||||||
|
enum MyEnum {
|
||||||
|
VALUE1,
|
||||||
|
[EnableIf=purple]
|
||||||
|
VALUE2,
|
||||||
|
};
|
||||||
|
[EnableIf=yellow]
|
||||||
|
const double kMyConst = 1.23;
|
||||||
|
[EnableIf=green]
|
||||||
|
int32 a;
|
||||||
|
double b;
|
||||||
|
[EnableIf=purple]
|
||||||
|
int32 c;
|
||||||
|
[EnableIf=blue]
|
||||||
|
double d;
|
||||||
|
int32 e;
|
||||||
|
[EnableIf=orange]
|
||||||
|
double f;
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
expected_source = """
|
||||||
|
struct MyStruct {
|
||||||
|
[EnableIf=blue]
|
||||||
|
enum MyEnum {
|
||||||
|
VALUE1,
|
||||||
|
};
|
||||||
|
[EnableIf=green]
|
||||||
|
int32 a;
|
||||||
|
double b;
|
||||||
|
[EnableIf=blue]
|
||||||
|
double d;
|
||||||
|
int32 e;
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
self.parseAndAssertEqual(struct_source, expected_source)
|
||||||
|
|
||||||
|
def testFilterUnion(self):
|
||||||
|
"""Test that UnionFields are correctly filtered from a Union."""
|
||||||
|
union_source = """
|
||||||
|
union MyUnion {
|
||||||
|
[EnableIf=yellow]
|
||||||
|
int32 a;
|
||||||
|
[EnableIf=red]
|
||||||
|
bool b;
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
expected_source = """
|
||||||
|
union MyUnion {
|
||||||
|
[EnableIf=red]
|
||||||
|
bool b;
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
self.parseAndAssertEqual(union_source, expected_source)
|
||||||
|
|
||||||
|
def testSameNameFields(self):
|
||||||
|
mojom_source = """
|
||||||
|
enum Foo {
|
||||||
|
[EnableIf=red]
|
||||||
|
VALUE1 = 5,
|
||||||
|
[EnableIf=yellow]
|
||||||
|
VALUE1 = 6,
|
||||||
|
};
|
||||||
|
[EnableIf=red]
|
||||||
|
const double kMyConst = 1.23;
|
||||||
|
[EnableIf=yellow]
|
||||||
|
const double kMyConst = 4.56;
|
||||||
|
"""
|
||||||
|
expected_source = """
|
||||||
|
enum Foo {
|
||||||
|
[EnableIf=red]
|
||||||
|
VALUE1 = 5,
|
||||||
|
};
|
||||||
|
[EnableIf=red]
|
||||||
|
const double kMyConst = 1.23;
|
||||||
|
"""
|
||||||
|
self.parseAndAssertEqual(mojom_source, expected_source)
|
||||||
|
|
||||||
|
def testMultipleEnableIfs(self):
|
||||||
|
source = """
|
||||||
|
enum Foo {
|
||||||
|
[EnableIf=red,EnableIf=yellow]
|
||||||
|
kBarValue = 5,
|
||||||
|
};
|
||||||
|
"""
|
||||||
|
definition = parser.Parse(source, "my_file.mojom")
|
||||||
|
self.assertRaises(conditional_features.EnableIfError,
|
||||||
|
conditional_features.RemoveDisabledDefinitions,
|
||||||
|
definition, ENABLED_FEATURES)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
251
utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
Normal file
251
utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer.py
Normal file
|
@ -0,0 +1,251 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from mojom import fileutil
|
||||||
|
from mojom.error import Error
|
||||||
|
|
||||||
|
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||||
|
from ply.lex import TOKEN
|
||||||
|
|
||||||
|
|
||||||
|
class LexError(Error):
|
||||||
|
"""Class for errors from the lexer."""
|
||||||
|
|
||||||
|
def __init__(self, filename, message, lineno):
|
||||||
|
Error.__init__(self, filename, message, lineno=lineno)
|
||||||
|
|
||||||
|
|
||||||
|
# We have methods which look like they could be functions:
|
||||||
|
# pylint: disable=R0201
|
||||||
|
class Lexer(object):
|
||||||
|
def __init__(self, filename):
|
||||||
|
self.filename = filename
|
||||||
|
|
||||||
|
######################-- PRIVATE --######################
|
||||||
|
|
||||||
|
##
|
||||||
|
## Internal auxiliary methods
|
||||||
|
##
|
||||||
|
def _error(self, msg, token):
|
||||||
|
raise LexError(self.filename, msg, token.lineno)
|
||||||
|
|
||||||
|
##
|
||||||
|
## Reserved keywords
|
||||||
|
##
|
||||||
|
keywords = (
|
||||||
|
'HANDLE',
|
||||||
|
'IMPORT',
|
||||||
|
'MODULE',
|
||||||
|
'STRUCT',
|
||||||
|
'UNION',
|
||||||
|
'INTERFACE',
|
||||||
|
'ENUM',
|
||||||
|
'CONST',
|
||||||
|
'TRUE',
|
||||||
|
'FALSE',
|
||||||
|
'DEFAULT',
|
||||||
|
'ARRAY',
|
||||||
|
'MAP',
|
||||||
|
'ASSOCIATED',
|
||||||
|
'PENDING_REMOTE',
|
||||||
|
'PENDING_RECEIVER',
|
||||||
|
'PENDING_ASSOCIATED_REMOTE',
|
||||||
|
'PENDING_ASSOCIATED_RECEIVER',
|
||||||
|
)
|
||||||
|
|
||||||
|
keyword_map = {}
|
||||||
|
for keyword in keywords:
|
||||||
|
keyword_map[keyword.lower()] = keyword
|
||||||
|
|
||||||
|
##
|
||||||
|
## All the tokens recognized by the lexer
|
||||||
|
##
|
||||||
|
tokens = keywords + (
|
||||||
|
# Identifiers
|
||||||
|
'NAME',
|
||||||
|
|
||||||
|
# Constants
|
||||||
|
'ORDINAL',
|
||||||
|
'INT_CONST_DEC',
|
||||||
|
'INT_CONST_HEX',
|
||||||
|
'FLOAT_CONST',
|
||||||
|
|
||||||
|
# String literals
|
||||||
|
'STRING_LITERAL',
|
||||||
|
|
||||||
|
# Operators
|
||||||
|
'MINUS',
|
||||||
|
'PLUS',
|
||||||
|
'AMP',
|
||||||
|
'QSTN',
|
||||||
|
|
||||||
|
# Assignment
|
||||||
|
'EQUALS',
|
||||||
|
|
||||||
|
# Request / response
|
||||||
|
'RESPONSE',
|
||||||
|
|
||||||
|
# Delimiters
|
||||||
|
'LPAREN',
|
||||||
|
'RPAREN', # ( )
|
||||||
|
'LBRACKET',
|
||||||
|
'RBRACKET', # [ ]
|
||||||
|
'LBRACE',
|
||||||
|
'RBRACE', # { }
|
||||||
|
'LANGLE',
|
||||||
|
'RANGLE', # < >
|
||||||
|
'SEMI', # ;
|
||||||
|
'COMMA',
|
||||||
|
'DOT' # , .
|
||||||
|
)
|
||||||
|
|
||||||
|
##
|
||||||
|
## Regexes for use in tokens
|
||||||
|
##
|
||||||
|
|
||||||
|
# valid C identifiers (K&R2: A.2.3)
|
||||||
|
identifier = r'[a-zA-Z_][0-9a-zA-Z_]*'
|
||||||
|
|
||||||
|
hex_prefix = '0[xX]'
|
||||||
|
hex_digits = '[0-9a-fA-F]+'
|
||||||
|
|
||||||
|
# integer constants (K&R2: A.2.5.1)
|
||||||
|
decimal_constant = '0|([1-9][0-9]*)'
|
||||||
|
hex_constant = hex_prefix + hex_digits
|
||||||
|
# Don't allow octal constants (even invalid octal).
|
||||||
|
octal_constant_disallowed = '0[0-9]+'
|
||||||
|
|
||||||
|
# character constants (K&R2: A.2.5.2)
|
||||||
|
# Note: a-zA-Z and '.-~^_!=&;,' are allowed as escape chars to support #line
|
||||||
|
# directives with Windows paths as filenames (..\..\dir\file)
|
||||||
|
# For the same reason, decimal_escape allows all digit sequences. We want to
|
||||||
|
# parse all correct code, even if it means to sometimes parse incorrect
|
||||||
|
# code.
|
||||||
|
#
|
||||||
|
simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
|
||||||
|
decimal_escape = r"""(\d+)"""
|
||||||
|
hex_escape = r"""(x[0-9a-fA-F]+)"""
|
||||||
|
bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
|
||||||
|
|
||||||
|
escape_sequence = \
|
||||||
|
r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
|
||||||
|
|
||||||
|
# string literals (K&R2: A.2.6)
|
||||||
|
string_char = r"""([^"\\\n]|""" + escape_sequence + ')'
|
||||||
|
string_literal = '"' + string_char + '*"'
|
||||||
|
bad_string_literal = '"' + string_char + '*' + bad_escape + string_char + '*"'
|
||||||
|
|
||||||
|
# floating constants (K&R2: A.2.5.3)
|
||||||
|
exponent_part = r"""([eE][-+]?[0-9]+)"""
|
||||||
|
fractional_constant = r"""([0-9]*\.[0-9]+)|([0-9]+\.)"""
|
||||||
|
floating_constant = \
|
||||||
|
'(((('+fractional_constant+')'+ \
|
||||||
|
exponent_part+'?)|([0-9]+'+exponent_part+')))'
|
||||||
|
|
||||||
|
# Ordinals
|
||||||
|
ordinal = r'@[0-9]+'
|
||||||
|
missing_ordinal_value = r'@'
|
||||||
|
# Don't allow ordinal values in octal (even invalid octal, like 09) or
|
||||||
|
# hexadecimal.
|
||||||
|
octal_or_hex_ordinal_disallowed = (
|
||||||
|
r'@((0[0-9]+)|(' + hex_prefix + hex_digits + '))')
|
||||||
|
|
||||||
|
##
|
||||||
|
## Rules for the normal state
|
||||||
|
##
|
||||||
|
t_ignore = ' \t\r'
|
||||||
|
|
||||||
|
# Newlines
|
||||||
|
def t_NEWLINE(self, t):
|
||||||
|
r'\n+'
|
||||||
|
t.lexer.lineno += len(t.value)
|
||||||
|
|
||||||
|
# Operators
|
||||||
|
t_MINUS = r'-'
|
||||||
|
t_PLUS = r'\+'
|
||||||
|
t_AMP = r'&'
|
||||||
|
t_QSTN = r'\?'
|
||||||
|
|
||||||
|
# =
|
||||||
|
t_EQUALS = r'='
|
||||||
|
|
||||||
|
# =>
|
||||||
|
t_RESPONSE = r'=>'
|
||||||
|
|
||||||
|
# Delimiters
|
||||||
|
t_LPAREN = r'\('
|
||||||
|
t_RPAREN = r'\)'
|
||||||
|
t_LBRACKET = r'\['
|
||||||
|
t_RBRACKET = r'\]'
|
||||||
|
t_LBRACE = r'\{'
|
||||||
|
t_RBRACE = r'\}'
|
||||||
|
t_LANGLE = r'<'
|
||||||
|
t_RANGLE = r'>'
|
||||||
|
t_COMMA = r','
|
||||||
|
t_DOT = r'\.'
|
||||||
|
t_SEMI = r';'
|
||||||
|
|
||||||
|
t_STRING_LITERAL = string_literal
|
||||||
|
|
||||||
|
# The following floating and integer constants are defined as
|
||||||
|
# functions to impose a strict order (otherwise, decimal
|
||||||
|
# is placed before the others because its regex is longer,
|
||||||
|
# and this is bad)
|
||||||
|
#
|
||||||
|
@TOKEN(floating_constant)
|
||||||
|
def t_FLOAT_CONST(self, t):
|
||||||
|
return t
|
||||||
|
|
||||||
|
@TOKEN(hex_constant)
|
||||||
|
def t_INT_CONST_HEX(self, t):
|
||||||
|
return t
|
||||||
|
|
||||||
|
@TOKEN(octal_constant_disallowed)
|
||||||
|
def t_OCTAL_CONSTANT_DISALLOWED(self, t):
|
||||||
|
msg = "Octal values not allowed"
|
||||||
|
self._error(msg, t)
|
||||||
|
|
||||||
|
@TOKEN(decimal_constant)
|
||||||
|
def t_INT_CONST_DEC(self, t):
|
||||||
|
return t
|
||||||
|
|
||||||
|
# unmatched string literals are caught by the preprocessor
|
||||||
|
|
||||||
|
@TOKEN(bad_string_literal)
|
||||||
|
def t_BAD_STRING_LITERAL(self, t):
|
||||||
|
msg = "String contains invalid escape code"
|
||||||
|
self._error(msg, t)
|
||||||
|
|
||||||
|
# Handle ordinal-related tokens in the right order:
|
||||||
|
@TOKEN(octal_or_hex_ordinal_disallowed)
|
||||||
|
def t_OCTAL_OR_HEX_ORDINAL_DISALLOWED(self, t):
|
||||||
|
msg = "Octal and hexadecimal ordinal values not allowed"
|
||||||
|
self._error(msg, t)
|
||||||
|
|
||||||
|
@TOKEN(ordinal)
|
||||||
|
def t_ORDINAL(self, t):
|
||||||
|
return t
|
||||||
|
|
||||||
|
@TOKEN(missing_ordinal_value)
|
||||||
|
def t_BAD_ORDINAL(self, t):
|
||||||
|
msg = "Missing ordinal value"
|
||||||
|
self._error(msg, t)
|
||||||
|
|
||||||
|
@TOKEN(identifier)
|
||||||
|
def t_NAME(self, t):
|
||||||
|
t.type = self.keyword_map.get(t.value, "NAME")
|
||||||
|
return t
|
||||||
|
|
||||||
|
# Ignore C and C++ style comments
|
||||||
|
def t_COMMENT(self, t):
|
||||||
|
r'(/\*(.|\n)*?\*/)|(//.*(\n[ \t]*//.*)*)'
|
||||||
|
t.lexer.lineno += t.value.count("\n")
|
||||||
|
|
||||||
|
def t_error(self, t):
|
||||||
|
msg = "Illegal character %s" % repr(t.value[0])
|
||||||
|
self._error(msg, t)
|
198
utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
Normal file
198
utils/ipc/mojo/public/tools/mojom/mojom/parse/lexer_unittest.py
Normal file
|
@ -0,0 +1,198 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import imp
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
def _GetDirAbove(dirname):
|
||||||
|
"""Returns the directory "above" this file containing |dirname| (which must
|
||||||
|
also be "above" this file)."""
|
||||||
|
path = os.path.abspath(__file__)
|
||||||
|
while True:
|
||||||
|
path, tail = os.path.split(path)
|
||||||
|
assert tail
|
||||||
|
if tail == dirname:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.insert(1, os.path.join(_GetDirAbove("mojo"), "third_party"))
|
||||||
|
from ply import lex
|
||||||
|
|
||||||
|
try:
|
||||||
|
imp.find_module("mojom")
|
||||||
|
except ImportError:
|
||||||
|
sys.path.append(os.path.join(_GetDirAbove("pylib"), "pylib"))
|
||||||
|
import mojom.parse.lexer
|
||||||
|
|
||||||
|
|
||||||
|
# This (monkey-patching LexToken to make comparison value-based) is evil, but
|
||||||
|
# we'll do it anyway. (I'm pretty sure ply's lexer never cares about comparing
|
||||||
|
# for object identity.)
|
||||||
|
def _LexTokenEq(self, other):
|
||||||
|
return self.type == other.type and self.value == other.value and \
|
||||||
|
self.lineno == other.lineno and self.lexpos == other.lexpos
|
||||||
|
|
||||||
|
|
||||||
|
setattr(lex.LexToken, '__eq__', _LexTokenEq)
|
||||||
|
|
||||||
|
|
||||||
|
def _MakeLexToken(token_type, value, lineno=1, lexpos=0):
|
||||||
|
"""Makes a LexToken with the given parameters. (Note that lineno is 1-based,
|
||||||
|
but lexpos is 0-based.)"""
|
||||||
|
rv = lex.LexToken()
|
||||||
|
rv.type, rv.value, rv.lineno, rv.lexpos = token_type, value, lineno, lexpos
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
def _MakeLexTokenForKeyword(keyword, **kwargs):
|
||||||
|
"""Makes a LexToken for the given keyword."""
|
||||||
|
return _MakeLexToken(keyword.upper(), keyword.lower(), **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class LexerTest(unittest.TestCase):
|
||||||
|
"""Tests |mojom.parse.lexer.Lexer|."""
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
unittest.TestCase.__init__(self, *args, **kwargs)
|
||||||
|
# Clone all lexer instances from this one, since making a lexer is slow.
|
||||||
|
self._zygote_lexer = lex.lex(mojom.parse.lexer.Lexer("my_file.mojom"))
|
||||||
|
|
||||||
|
def testValidKeywords(self):
|
||||||
|
"""Tests valid keywords."""
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("handle"), _MakeLexTokenForKeyword("handle"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("import"), _MakeLexTokenForKeyword("import"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("module"), _MakeLexTokenForKeyword("module"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("struct"), _MakeLexTokenForKeyword("struct"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("union"), _MakeLexTokenForKeyword("union"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("interface"),
|
||||||
|
_MakeLexTokenForKeyword("interface"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("enum"), _MakeLexTokenForKeyword("enum"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("const"), _MakeLexTokenForKeyword("const"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("true"), _MakeLexTokenForKeyword("true"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("false"), _MakeLexTokenForKeyword("false"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("default"),
|
||||||
|
_MakeLexTokenForKeyword("default"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("array"), _MakeLexTokenForKeyword("array"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("map"), _MakeLexTokenForKeyword("map"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("associated"),
|
||||||
|
_MakeLexTokenForKeyword("associated"))
|
||||||
|
|
||||||
|
def testValidIdentifiers(self):
|
||||||
|
"""Tests identifiers."""
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("abcd"), _MakeLexToken("NAME", "abcd"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("AbC_d012_"),
|
||||||
|
_MakeLexToken("NAME", "AbC_d012_"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("_0123"), _MakeLexToken("NAME", "_0123"))
|
||||||
|
|
||||||
|
def testInvalidIdentifiers(self):
|
||||||
|
with self.assertRaisesRegexp(
|
||||||
|
mojom.parse.lexer.LexError,
|
||||||
|
r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
|
||||||
|
self._TokensForInput("$abc")
|
||||||
|
with self.assertRaisesRegexp(
|
||||||
|
mojom.parse.lexer.LexError,
|
||||||
|
r"^my_file\.mojom:1: Error: Illegal character '\$'$"):
|
||||||
|
self._TokensForInput("a$bc")
|
||||||
|
|
||||||
|
def testDecimalIntegerConstants(self):
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("0"), _MakeLexToken("INT_CONST_DEC", "0"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("1"), _MakeLexToken("INT_CONST_DEC", "1"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("123"), _MakeLexToken("INT_CONST_DEC", "123"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("10"), _MakeLexToken("INT_CONST_DEC", "10"))
|
||||||
|
|
||||||
|
def testValidTokens(self):
|
||||||
|
"""Tests valid tokens (which aren't tested elsewhere)."""
|
||||||
|
# Keywords tested in |testValidKeywords|.
|
||||||
|
# NAME tested in |testValidIdentifiers|.
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("@123"), _MakeLexToken("ORDINAL", "@123"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("456"), _MakeLexToken("INT_CONST_DEC", "456"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("0x01aB2eF3"),
|
||||||
|
_MakeLexToken("INT_CONST_HEX", "0x01aB2eF3"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("123.456"),
|
||||||
|
_MakeLexToken("FLOAT_CONST", "123.456"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("\"hello\""),
|
||||||
|
_MakeLexToken("STRING_LITERAL", "\"hello\""))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("+"), _MakeLexToken("PLUS", "+"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("-"), _MakeLexToken("MINUS", "-"))
|
||||||
|
self.assertEquals(self._SingleTokenForInput("&"), _MakeLexToken("AMP", "&"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("?"), _MakeLexToken("QSTN", "?"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("="), _MakeLexToken("EQUALS", "="))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("=>"), _MakeLexToken("RESPONSE", "=>"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("("), _MakeLexToken("LPAREN", "("))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput(")"), _MakeLexToken("RPAREN", ")"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("["), _MakeLexToken("LBRACKET", "["))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("]"), _MakeLexToken("RBRACKET", "]"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("{"), _MakeLexToken("LBRACE", "{"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("}"), _MakeLexToken("RBRACE", "}"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput("<"), _MakeLexToken("LANGLE", "<"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput(">"), _MakeLexToken("RANGLE", ">"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput(";"), _MakeLexToken("SEMI", ";"))
|
||||||
|
self.assertEquals(
|
||||||
|
self._SingleTokenForInput(","), _MakeLexToken("COMMA", ","))
|
||||||
|
self.assertEquals(self._SingleTokenForInput("."), _MakeLexToken("DOT", "."))
|
||||||
|
|
||||||
|
def _TokensForInput(self, input_string):
|
||||||
|
"""Gets a list of tokens for the given input string."""
|
||||||
|
lexer = self._zygote_lexer.clone()
|
||||||
|
lexer.input(input_string)
|
||||||
|
rv = []
|
||||||
|
while True:
|
||||||
|
tok = lexer.token()
|
||||||
|
if not tok:
|
||||||
|
return rv
|
||||||
|
rv.append(tok)
|
||||||
|
|
||||||
|
def _SingleTokenForInput(self, input_string):
|
||||||
|
"""Gets the single token for the given input string. (Raises an exception if
|
||||||
|
the input string does not result in exactly one token.)"""
|
||||||
|
toks = self._TokensForInput(input_string)
|
||||||
|
assert len(toks) == 1
|
||||||
|
return toks[0]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
488
utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
Normal file
488
utils/ipc/mojo/public/tools/mojom/mojom/parse/parser.py
Normal file
|
@ -0,0 +1,488 @@
|
||||||
|
# Copyright 2014 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Generates a syntax tree from a Mojo IDL file."""
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from mojom import fileutil
|
||||||
|
from mojom.error import Error
|
||||||
|
from mojom.parse import ast
|
||||||
|
from mojom.parse.lexer import Lexer
|
||||||
|
|
||||||
|
fileutil.AddLocalRepoThirdPartyDirToModulePath()
|
||||||
|
from ply import lex
|
||||||
|
from ply import yacc
|
||||||
|
|
||||||
|
_MAX_ORDINAL_VALUE = 0xffffffff
|
||||||
|
_MAX_ARRAY_SIZE = 0xffffffff
|
||||||
|
|
||||||
|
|
||||||
|
class ParseError(Error):
|
||||||
|
"""Class for errors from the parser."""
|
||||||
|
|
||||||
|
def __init__(self, filename, message, lineno=None, snippet=None):
|
||||||
|
Error.__init__(
|
||||||
|
self,
|
||||||
|
filename,
|
||||||
|
message,
|
||||||
|
lineno=lineno,
|
||||||
|
addenda=([snippet] if snippet else None))
|
||||||
|
|
||||||
|
|
||||||
|
# We have methods which look like they could be functions:
|
||||||
|
# pylint: disable=R0201
|
||||||
|
class Parser(object):
|
||||||
|
def __init__(self, lexer, source, filename):
|
||||||
|
self.tokens = lexer.tokens
|
||||||
|
self.source = source
|
||||||
|
self.filename = filename
|
||||||
|
|
||||||
|
# Names of functions
|
||||||
|
#
|
||||||
|
# In general, we name functions after the left-hand-side of the rule(s) that
|
||||||
|
# they handle. E.g., |p_foo_bar| for a rule |foo_bar : ...|.
|
||||||
|
#
|
||||||
|
# There may be multiple functions handling rules for the same left-hand-side;
|
||||||
|
# then we name the functions |p_foo_bar_N| (for left-hand-side |foo_bar|),
|
||||||
|
# where N is a number (numbered starting from 1). Note that using multiple
|
||||||
|
# functions is actually more efficient than having single functions handle
|
||||||
|
# multiple rules (and, e.g., distinguishing them by examining |len(p)|).
|
||||||
|
#
|
||||||
|
# It's also possible to have a function handling multiple rules with different
|
||||||
|
# left-hand-sides. We do not do this.
|
||||||
|
#
|
||||||
|
# See http://www.dabeaz.com/ply/ply.html#ply_nn25 for more details.
|
||||||
|
|
||||||
|
# TODO(vtl): Get rid of the braces in the module "statement". (Consider
|
||||||
|
# renaming "module" -> "package".) Then we'll be able to have a single rule
|
||||||
|
# for root (by making module "optional").
|
||||||
|
def p_root_1(self, p):
|
||||||
|
"""root : """
|
||||||
|
p[0] = ast.Mojom(None, ast.ImportList(), [])
|
||||||
|
|
||||||
|
def p_root_2(self, p):
|
||||||
|
"""root : root module"""
|
||||||
|
if p[1].module is not None:
|
||||||
|
raise ParseError(
|
||||||
|
self.filename,
|
||||||
|
"Multiple \"module\" statements not allowed:",
|
||||||
|
p[2].lineno,
|
||||||
|
snippet=self._GetSnippet(p[2].lineno))
|
||||||
|
if p[1].import_list.items or p[1].definition_list:
|
||||||
|
raise ParseError(
|
||||||
|
self.filename,
|
||||||
|
"\"module\" statements must precede imports and definitions:",
|
||||||
|
p[2].lineno,
|
||||||
|
snippet=self._GetSnippet(p[2].lineno))
|
||||||
|
p[0] = p[1]
|
||||||
|
p[0].module = p[2]
|
||||||
|
|
||||||
|
def p_root_3(self, p):
|
||||||
|
"""root : root import"""
|
||||||
|
if p[1].definition_list:
|
||||||
|
raise ParseError(
|
||||||
|
self.filename,
|
||||||
|
"\"import\" statements must precede definitions:",
|
||||||
|
p[2].lineno,
|
||||||
|
snippet=self._GetSnippet(p[2].lineno))
|
||||||
|
p[0] = p[1]
|
||||||
|
p[0].import_list.Append(p[2])
|
||||||
|
|
||||||
|
def p_root_4(self, p):
|
||||||
|
"""root : root definition"""
|
||||||
|
p[0] = p[1]
|
||||||
|
p[0].definition_list.append(p[2])
|
||||||
|
|
||||||
|
def p_import(self, p):
|
||||||
|
"""import : attribute_section IMPORT STRING_LITERAL SEMI"""
|
||||||
|
# 'eval' the literal to strip the quotes.
|
||||||
|
# TODO(vtl): This eval is dubious. We should unquote/unescape ourselves.
|
||||||
|
p[0] = ast.Import(
|
||||||
|
p[1], eval(p[3]), filename=self.filename, lineno=p.lineno(2))
|
||||||
|
|
||||||
|
def p_module(self, p):
|
||||||
|
"""module : attribute_section MODULE identifier_wrapped SEMI"""
|
||||||
|
p[0] = ast.Module(p[3], p[1], filename=self.filename, lineno=p.lineno(2))
|
||||||
|
|
||||||
|
def p_definition(self, p):
|
||||||
|
"""definition : struct
|
||||||
|
| union
|
||||||
|
| interface
|
||||||
|
| enum
|
||||||
|
| const"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_attribute_section_1(self, p):
|
||||||
|
"""attribute_section : """
|
||||||
|
p[0] = None
|
||||||
|
|
||||||
|
def p_attribute_section_2(self, p):
|
||||||
|
"""attribute_section : LBRACKET attribute_list RBRACKET"""
|
||||||
|
p[0] = p[2]
|
||||||
|
|
||||||
|
def p_attribute_list_1(self, p):
|
||||||
|
"""attribute_list : """
|
||||||
|
p[0] = ast.AttributeList()
|
||||||
|
|
||||||
|
def p_attribute_list_2(self, p):
|
||||||
|
"""attribute_list : nonempty_attribute_list"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_nonempty_attribute_list_1(self, p):
|
||||||
|
"""nonempty_attribute_list : attribute"""
|
||||||
|
p[0] = ast.AttributeList(p[1])
|
||||||
|
|
||||||
|
def p_nonempty_attribute_list_2(self, p):
|
||||||
|
"""nonempty_attribute_list : nonempty_attribute_list COMMA attribute"""
|
||||||
|
p[0] = p[1]
|
||||||
|
p[0].Append(p[3])
|
||||||
|
|
||||||
|
def p_attribute_1(self, p):
|
||||||
|
"""attribute : NAME EQUALS evaled_literal
|
||||||
|
| NAME EQUALS NAME"""
|
||||||
|
p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
|
||||||
|
|
||||||
|
def p_attribute_2(self, p):
|
||||||
|
"""attribute : NAME"""
|
||||||
|
p[0] = ast.Attribute(p[1], True, filename=self.filename, lineno=p.lineno(1))
|
||||||
|
|
||||||
|
def p_evaled_literal(self, p):
|
||||||
|
"""evaled_literal : literal"""
|
||||||
|
# 'eval' the literal to strip the quotes. Handle keywords "true" and "false"
|
||||||
|
# specially since they cannot directly be evaluated to python boolean
|
||||||
|
# values.
|
||||||
|
if p[1] == "true":
|
||||||
|
p[0] = True
|
||||||
|
elif p[1] == "false":
|
||||||
|
p[0] = False
|
||||||
|
else:
|
||||||
|
p[0] = eval(p[1])
|
||||||
|
|
||||||
|
def p_struct_1(self, p):
|
||||||
|
"""struct : attribute_section STRUCT NAME LBRACE struct_body RBRACE SEMI"""
|
||||||
|
p[0] = ast.Struct(p[3], p[1], p[5])
|
||||||
|
|
||||||
|
def p_struct_2(self, p):
|
||||||
|
"""struct : attribute_section STRUCT NAME SEMI"""
|
||||||
|
p[0] = ast.Struct(p[3], p[1], None)
|
||||||
|
|
||||||
|
def p_struct_body_1(self, p):
|
||||||
|
"""struct_body : """
|
||||||
|
p[0] = ast.StructBody()
|
||||||
|
|
||||||
|
def p_struct_body_2(self, p):
|
||||||
|
"""struct_body : struct_body const
|
||||||
|
| struct_body enum
|
||||||
|
| struct_body struct_field"""
|
||||||
|
p[0] = p[1]
|
||||||
|
p[0].Append(p[2])
|
||||||
|
|
||||||
|
def p_struct_field(self, p):
|
||||||
|
"""struct_field : attribute_section typename NAME ordinal default SEMI"""
|
||||||
|
p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
|
||||||
|
|
||||||
|
def p_union(self, p):
|
||||||
|
"""union : attribute_section UNION NAME LBRACE union_body RBRACE SEMI"""
|
||||||
|
p[0] = ast.Union(p[3], p[1], p[5])
|
||||||
|
|
||||||
|
def p_union_body_1(self, p):
|
||||||
|
"""union_body : """
|
||||||
|
p[0] = ast.UnionBody()
|
||||||
|
|
||||||
|
def p_union_body_2(self, p):
|
||||||
|
"""union_body : union_body union_field"""
|
||||||
|
p[0] = p[1]
|
||||||
|
p[1].Append(p[2])
|
||||||
|
|
||||||
|
def p_union_field(self, p):
|
||||||
|
"""union_field : attribute_section typename NAME ordinal SEMI"""
|
||||||
|
p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
|
||||||
|
|
||||||
|
def p_default_1(self, p):
|
||||||
|
"""default : """
|
||||||
|
p[0] = None
|
||||||
|
|
||||||
|
def p_default_2(self, p):
|
||||||
|
"""default : EQUALS constant"""
|
||||||
|
p[0] = p[2]
|
||||||
|
|
||||||
|
def p_interface(self, p):
|
||||||
|
"""interface : attribute_section INTERFACE NAME LBRACE interface_body \
|
||||||
|
RBRACE SEMI"""
|
||||||
|
p[0] = ast.Interface(p[3], p[1], p[5])
|
||||||
|
|
||||||
|
def p_interface_body_1(self, p):
|
||||||
|
"""interface_body : """
|
||||||
|
p[0] = ast.InterfaceBody()
|
||||||
|
|
||||||
|
def p_interface_body_2(self, p):
|
||||||
|
"""interface_body : interface_body const
|
||||||
|
| interface_body enum
|
||||||
|
| interface_body method"""
|
||||||
|
p[0] = p[1]
|
||||||
|
p[0].Append(p[2])
|
||||||
|
|
||||||
|
def p_response_1(self, p):
|
||||||
|
"""response : """
|
||||||
|
p[0] = None
|
||||||
|
|
||||||
|
def p_response_2(self, p):
|
||||||
|
"""response : RESPONSE LPAREN parameter_list RPAREN"""
|
||||||
|
p[0] = p[3]
|
||||||
|
|
||||||
|
def p_method(self, p):
|
||||||
|
"""method : attribute_section NAME ordinal LPAREN parameter_list RPAREN \
|
||||||
|
response SEMI"""
|
||||||
|
p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
|
||||||
|
|
||||||
|
def p_parameter_list_1(self, p):
|
||||||
|
"""parameter_list : """
|
||||||
|
p[0] = ast.ParameterList()
|
||||||
|
|
||||||
|
def p_parameter_list_2(self, p):
|
||||||
|
"""parameter_list : nonempty_parameter_list"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_nonempty_parameter_list_1(self, p):
|
||||||
|
"""nonempty_parameter_list : parameter"""
|
||||||
|
p[0] = ast.ParameterList(p[1])
|
||||||
|
|
||||||
|
def p_nonempty_parameter_list_2(self, p):
|
||||||
|
"""nonempty_parameter_list : nonempty_parameter_list COMMA parameter"""
|
||||||
|
p[0] = p[1]
|
||||||
|
p[0].Append(p[3])
|
||||||
|
|
||||||
|
def p_parameter(self, p):
|
||||||
|
"""parameter : attribute_section typename NAME ordinal"""
|
||||||
|
p[0] = ast.Parameter(
|
||||||
|
p[3], p[1], p[4], p[2], filename=self.filename, lineno=p.lineno(3))
|
||||||
|
|
||||||
|
def p_typename(self, p):
|
||||||
|
"""typename : nonnullable_typename QSTN
|
||||||
|
| nonnullable_typename"""
|
||||||
|
if len(p) == 2:
|
||||||
|
p[0] = p[1]
|
||||||
|
else:
|
||||||
|
p[0] = p[1] + "?"
|
||||||
|
|
||||||
|
def p_nonnullable_typename(self, p):
|
||||||
|
"""nonnullable_typename : basictypename
|
||||||
|
| array
|
||||||
|
| fixed_array
|
||||||
|
| associative_array
|
||||||
|
| interfacerequest"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_basictypename(self, p):
|
||||||
|
"""basictypename : remotetype
|
||||||
|
| receivertype
|
||||||
|
| associatedremotetype
|
||||||
|
| associatedreceivertype
|
||||||
|
| identifier
|
||||||
|
| ASSOCIATED identifier
|
||||||
|
| handletype"""
|
||||||
|
if len(p) == 2:
|
||||||
|
p[0] = p[1]
|
||||||
|
else:
|
||||||
|
p[0] = "asso<" + p[2] + ">"
|
||||||
|
|
||||||
|
def p_remotetype(self, p):
|
||||||
|
"""remotetype : PENDING_REMOTE LANGLE identifier RANGLE"""
|
||||||
|
p[0] = "rmt<%s>" % p[3]
|
||||||
|
|
||||||
|
def p_receivertype(self, p):
|
||||||
|
"""receivertype : PENDING_RECEIVER LANGLE identifier RANGLE"""
|
||||||
|
p[0] = "rcv<%s>" % p[3]
|
||||||
|
|
||||||
|
def p_associatedremotetype(self, p):
|
||||||
|
"""associatedremotetype : PENDING_ASSOCIATED_REMOTE LANGLE identifier \
|
||||||
|
RANGLE"""
|
||||||
|
p[0] = "rma<%s>" % p[3]
|
||||||
|
|
||||||
|
def p_associatedreceivertype(self, p):
|
||||||
|
"""associatedreceivertype : PENDING_ASSOCIATED_RECEIVER LANGLE identifier \
|
||||||
|
RANGLE"""
|
||||||
|
p[0] = "rca<%s>" % p[3]
|
||||||
|
|
||||||
|
def p_handletype(self, p):
|
||||||
|
"""handletype : HANDLE
|
||||||
|
| HANDLE LANGLE NAME RANGLE"""
|
||||||
|
if len(p) == 2:
|
||||||
|
p[0] = p[1]
|
||||||
|
else:
|
||||||
|
if p[3] not in ('data_pipe_consumer', 'data_pipe_producer',
|
||||||
|
'message_pipe', 'shared_buffer', 'platform'):
|
||||||
|
# Note: We don't enable tracking of line numbers for everything, so we
|
||||||
|
# can't use |p.lineno(3)|.
|
||||||
|
raise ParseError(
|
||||||
|
self.filename,
|
||||||
|
"Invalid handle type %r:" % p[3],
|
||||||
|
lineno=p.lineno(1),
|
||||||
|
snippet=self._GetSnippet(p.lineno(1)))
|
||||||
|
p[0] = "handle<" + p[3] + ">"
|
||||||
|
|
||||||
|
def p_array(self, p):
|
||||||
|
"""array : ARRAY LANGLE typename RANGLE"""
|
||||||
|
p[0] = p[3] + "[]"
|
||||||
|
|
||||||
|
def p_fixed_array(self, p):
|
||||||
|
"""fixed_array : ARRAY LANGLE typename COMMA INT_CONST_DEC RANGLE"""
|
||||||
|
value = int(p[5])
|
||||||
|
if value == 0 or value > _MAX_ARRAY_SIZE:
|
||||||
|
raise ParseError(
|
||||||
|
self.filename,
|
||||||
|
"Fixed array size %d invalid:" % value,
|
||||||
|
lineno=p.lineno(5),
|
||||||
|
snippet=self._GetSnippet(p.lineno(5)))
|
||||||
|
p[0] = p[3] + "[" + p[5] + "]"
|
||||||
|
|
||||||
|
def p_associative_array(self, p):
|
||||||
|
"""associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
|
||||||
|
p[0] = p[5] + "{" + p[3] + "}"
|
||||||
|
|
||||||
|
def p_interfacerequest(self, p):
|
||||||
|
"""interfacerequest : identifier AMP
|
||||||
|
| ASSOCIATED identifier AMP"""
|
||||||
|
if len(p) == 3:
|
||||||
|
p[0] = p[1] + "&"
|
||||||
|
else:
|
||||||
|
p[0] = "asso<" + p[2] + "&>"
|
||||||
|
|
||||||
|
def p_ordinal_1(self, p):
|
||||||
|
"""ordinal : """
|
||||||
|
p[0] = None
|
||||||
|
|
||||||
|
def p_ordinal_2(self, p):
|
||||||
|
"""ordinal : ORDINAL"""
|
||||||
|
value = int(p[1][1:])
|
||||||
|
if value > _MAX_ORDINAL_VALUE:
|
||||||
|
raise ParseError(
|
||||||
|
self.filename,
|
||||||
|
"Ordinal value %d too large:" % value,
|
||||||
|
lineno=p.lineno(1),
|
||||||
|
snippet=self._GetSnippet(p.lineno(1)))
|
||||||
|
p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
|
||||||
|
|
||||||
|
def p_enum_1(self, p):
|
||||||
|
"""enum : attribute_section ENUM NAME LBRACE enum_value_list \
|
||||||
|
RBRACE SEMI
|
||||||
|
| attribute_section ENUM NAME LBRACE nonempty_enum_value_list \
|
||||||
|
COMMA RBRACE SEMI"""
|
||||||
|
p[0] = ast.Enum(
|
||||||
|
p[3], p[1], p[5], filename=self.filename, lineno=p.lineno(2))
|
||||||
|
|
||||||
|
def p_enum_2(self, p):
|
||||||
|
"""enum : attribute_section ENUM NAME SEMI"""
|
||||||
|
p[0] = ast.Enum(
|
||||||
|
p[3], p[1], None, filename=self.filename, lineno=p.lineno(2))
|
||||||
|
|
||||||
|
def p_enum_value_list_1(self, p):
|
||||||
|
"""enum_value_list : """
|
||||||
|
p[0] = ast.EnumValueList()
|
||||||
|
|
||||||
|
def p_enum_value_list_2(self, p):
|
||||||
|
"""enum_value_list : nonempty_enum_value_list"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_nonempty_enum_value_list_1(self, p):
|
||||||
|
"""nonempty_enum_value_list : enum_value"""
|
||||||
|
p[0] = ast.EnumValueList(p[1])
|
||||||
|
|
||||||
|
def p_nonempty_enum_value_list_2(self, p):
|
||||||
|
"""nonempty_enum_value_list : nonempty_enum_value_list COMMA enum_value"""
|
||||||
|
p[0] = p[1]
|
||||||
|
p[0].Append(p[3])
|
||||||
|
|
||||||
|
def p_enum_value(self, p):
|
||||||
|
"""enum_value : attribute_section NAME
|
||||||
|
| attribute_section NAME EQUALS int
|
||||||
|
| attribute_section NAME EQUALS identifier_wrapped"""
|
||||||
|
p[0] = ast.EnumValue(
|
||||||
|
p[2],
|
||||||
|
p[1],
|
||||||
|
p[4] if len(p) == 5 else None,
|
||||||
|
filename=self.filename,
|
||||||
|
lineno=p.lineno(2))
|
||||||
|
|
||||||
|
def p_const(self, p):
|
||||||
|
"""const : attribute_section CONST typename NAME EQUALS constant SEMI"""
|
||||||
|
p[0] = ast.Const(p[4], p[1], p[3], p[6])
|
||||||
|
|
||||||
|
def p_constant(self, p):
|
||||||
|
"""constant : literal
|
||||||
|
| identifier_wrapped"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_identifier_wrapped(self, p):
|
||||||
|
"""identifier_wrapped : identifier"""
|
||||||
|
p[0] = ('IDENTIFIER', p[1])
|
||||||
|
|
||||||
|
# TODO(vtl): Make this produce a "wrapped" identifier (probably as an
|
||||||
|
# |ast.Identifier|, to be added) and get rid of identifier_wrapped.
|
||||||
|
def p_identifier(self, p):
|
||||||
|
"""identifier : NAME
|
||||||
|
| NAME DOT identifier"""
|
||||||
|
p[0] = ''.join(p[1:])
|
||||||
|
|
||||||
|
def p_literal(self, p):
|
||||||
|
"""literal : int
|
||||||
|
| float
|
||||||
|
| TRUE
|
||||||
|
| FALSE
|
||||||
|
| DEFAULT
|
||||||
|
| STRING_LITERAL"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_int(self, p):
|
||||||
|
"""int : int_const
|
||||||
|
| PLUS int_const
|
||||||
|
| MINUS int_const"""
|
||||||
|
p[0] = ''.join(p[1:])
|
||||||
|
|
||||||
|
def p_int_const(self, p):
|
||||||
|
"""int_const : INT_CONST_DEC
|
||||||
|
| INT_CONST_HEX"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_float(self, p):
|
||||||
|
"""float : FLOAT_CONST
|
||||||
|
| PLUS FLOAT_CONST
|
||||||
|
| MINUS FLOAT_CONST"""
|
||||||
|
p[0] = ''.join(p[1:])
|
||||||
|
|
||||||
|
def p_error(self, e):
|
||||||
|
if e is None:
|
||||||
|
# Unexpected EOF.
|
||||||
|
# TODO(vtl): Can we figure out what's missing?
|
||||||
|
raise ParseError(self.filename, "Unexpected end of file")
|
||||||
|
|
||||||
|
raise ParseError(
|
||||||
|
self.filename,
|
||||||
|
"Unexpected %r:" % e.value,
|
||||||
|
lineno=e.lineno,
|
||||||
|
snippet=self._GetSnippet(e.lineno))
|
||||||
|
|
||||||
|
def _GetSnippet(self, lineno):
|
||||||
|
return self.source.split('\n')[lineno - 1]
|
||||||
|
|
||||||
|
|
||||||
|
def Parse(source, filename):
|
||||||
|
"""Parse source file to AST.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
source: The source text as a str (Python 2 or 3) or unicode (Python 2).
|
||||||
|
filename: The filename that |source| originates from.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The AST as a mojom.parse.ast.Mojom object.
|
||||||
|
"""
|
||||||
|
lexer = Lexer(filename)
|
||||||
|
parser = Parser(lexer, source, filename)
|
||||||
|
|
||||||
|
lex.lex(object=lexer)
|
||||||
|
yacc.yacc(module=parser, debug=0, write_tables=0)
|
||||||
|
|
||||||
|
tree = yacc.parse(source)
|
||||||
|
return tree
|
1390
utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
Normal file
1390
utils/ipc/mojo/public/tools/mojom/mojom/parse/parser_unittest.py
Normal file
File diff suppressed because it is too large
Load diff
361
utils/ipc/mojo/public/tools/mojom/mojom_parser.py
Executable file
361
utils/ipc/mojo/public/tools/mojom/mojom_parser.py
Executable file
|
@ -0,0 +1,361 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
"""Parses mojom IDL files.
|
||||||
|
|
||||||
|
This script parses one or more input mojom files and produces corresponding
|
||||||
|
module files fully describing the definitions contained within each mojom. The
|
||||||
|
module data is pickled and can be easily consumed by other tools to, e.g.,
|
||||||
|
generate usable language bindings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import codecs
|
||||||
|
import errno
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from mojom.generate import module
|
||||||
|
from mojom.generate import translate
|
||||||
|
from mojom.parse import parser
|
||||||
|
from mojom.parse import conditional_features
|
||||||
|
|
||||||
|
|
||||||
|
def _ResolveRelativeImportPath(path, roots):
|
||||||
|
"""Attempts to resolve a relative import path against a set of possible roots.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: The relative import path to resolve.
|
||||||
|
roots: A list of absolute paths which will be checked in descending length
|
||||||
|
order for a match against path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A normalized absolute path combining one of the roots with the input path if
|
||||||
|
and only if such a file exists.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: The path could not be resolved against any of the given roots.
|
||||||
|
"""
|
||||||
|
for root in reversed(sorted(roots, key=len)):
|
||||||
|
abs_path = os.path.join(root, path)
|
||||||
|
if os.path.isfile(abs_path):
|
||||||
|
return os.path.normcase(os.path.normpath(abs_path))
|
||||||
|
|
||||||
|
raise ValueError('"%s" does not exist in any of %s' % (path, roots))
|
||||||
|
|
||||||
|
|
||||||
|
def _RebaseAbsolutePath(path, roots):
|
||||||
|
"""Rewrites an absolute file path as relative to an absolute directory path in
|
||||||
|
roots.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: The absolute path of an existing file.
|
||||||
|
roots: A list of absolute directory paths. The given path argument must fall
|
||||||
|
within one of these directories.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A path equivalent to the input path, but relative to one of the provided
|
||||||
|
roots. If the input path falls within multiple roots, the longest root is
|
||||||
|
chosen (and thus the shortest relative path is returned).
|
||||||
|
|
||||||
|
Paths returned by this method always use forward slashes as a separator to
|
||||||
|
mirror mojom import syntax.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError if the given path does not fall within any of the listed roots.
|
||||||
|
"""
|
||||||
|
assert os.path.isabs(path)
|
||||||
|
assert os.path.isfile(path)
|
||||||
|
assert all(map(os.path.isabs, roots))
|
||||||
|
|
||||||
|
sorted_roots = list(reversed(sorted(roots, key=len)))
|
||||||
|
|
||||||
|
def try_rebase_path(path, root):
|
||||||
|
head, rebased_path = os.path.split(path)
|
||||||
|
while head != root:
|
||||||
|
head, tail = os.path.split(head)
|
||||||
|
if not tail:
|
||||||
|
return None
|
||||||
|
rebased_path = os.path.join(tail, rebased_path)
|
||||||
|
return rebased_path
|
||||||
|
|
||||||
|
for root in sorted_roots:
|
||||||
|
relative_path = try_rebase_path(path, root)
|
||||||
|
if relative_path:
|
||||||
|
# TODO(crbug.com/953884): Use pathlib for this kind of thing once we're
|
||||||
|
# fully migrated to Python 3.
|
||||||
|
return relative_path.replace('\\', '/')
|
||||||
|
|
||||||
|
raise ValueError('%s does not fall within any of %s' % (path, sorted_roots))
|
||||||
|
|
||||||
|
|
||||||
|
def _GetModuleFilename(mojom_filename):
|
||||||
|
return mojom_filename + '-module'
|
||||||
|
|
||||||
|
|
||||||
|
def _EnsureInputLoaded(mojom_abspath, module_path, abs_paths, asts,
|
||||||
|
dependencies, loaded_modules):
|
||||||
|
"""Recursively ensures that a module and its dependencies are loaded.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mojom_abspath: An absolute file path pointing to a mojom file to load.
|
||||||
|
module_path: The relative path used to identify mojom_abspath.
|
||||||
|
abs_paths: A mapping from module paths to absolute file paths for all
|
||||||
|
inputs given to this execution of the script.
|
||||||
|
asts: A map from each input mojom's absolute path to its parsed AST.
|
||||||
|
dependencies: A mapping of which input mojoms depend on each other, indexed
|
||||||
|
by absolute file path.
|
||||||
|
loaded_modules: A mapping of all modules loaded so far, including non-input
|
||||||
|
modules that were pulled in as transitive dependencies of the inputs.
|
||||||
|
import_set: The working set of mojom imports processed so far in this
|
||||||
|
call stack. Used to detect circular dependencies.
|
||||||
|
import_stack: An ordered list of imports processed so far in this call
|
||||||
|
stack. Used to report circular dependencies.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None
|
||||||
|
|
||||||
|
On return, loaded_modules will be populated with the loaded input mojom's
|
||||||
|
Module as well as the Modules of all of its transitive dependencies."""
|
||||||
|
|
||||||
|
if mojom_abspath in loaded_modules:
|
||||||
|
# Already done.
|
||||||
|
return
|
||||||
|
|
||||||
|
for dep_abspath, dep_path in dependencies[mojom_abspath]:
|
||||||
|
if dep_abspath not in loaded_modules:
|
||||||
|
_EnsureInputLoaded(dep_abspath, dep_path, abs_paths, asts, dependencies,
|
||||||
|
loaded_modules)
|
||||||
|
|
||||||
|
imports = {}
|
||||||
|
for imp in asts[mojom_abspath].import_list:
|
||||||
|
path = imp.import_filename
|
||||||
|
imports[path] = loaded_modules[abs_paths[path]]
|
||||||
|
loaded_modules[mojom_abspath] = translate.OrderedModule(
|
||||||
|
asts[mojom_abspath], module_path, imports)
|
||||||
|
|
||||||
|
|
||||||
|
def _CollectAllowedImportsFromBuildMetadata(build_metadata_filename):
|
||||||
|
allowed_imports = set()
|
||||||
|
processed_deps = set()
|
||||||
|
|
||||||
|
def collect(metadata_filename):
|
||||||
|
processed_deps.add(metadata_filename)
|
||||||
|
with open(metadata_filename) as f:
|
||||||
|
metadata = json.load(f)
|
||||||
|
allowed_imports.update(
|
||||||
|
map(os.path.normcase, map(os.path.normpath, metadata['sources'])))
|
||||||
|
for dep_metadata in metadata['deps']:
|
||||||
|
if dep_metadata not in processed_deps:
|
||||||
|
collect(dep_metadata)
|
||||||
|
|
||||||
|
collect(build_metadata_filename)
|
||||||
|
return allowed_imports
|
||||||
|
|
||||||
|
|
||||||
|
def _ParseMojoms(mojom_files,
|
||||||
|
input_root_paths,
|
||||||
|
output_root_path,
|
||||||
|
enabled_features,
|
||||||
|
allowed_imports=None):
|
||||||
|
"""Parses a set of mojom files and produces serialized module outputs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mojom_files: A list of mojom files to process. Paths must be absolute paths
|
||||||
|
which fall within one of the input or output root paths.
|
||||||
|
input_root_paths: A list of absolute filesystem paths which may be used to
|
||||||
|
resolve relative mojom file paths.
|
||||||
|
output_root_path: An absolute filesystem path which will service as the root
|
||||||
|
for all emitted artifacts. Artifacts produced from a given mojom file
|
||||||
|
are based on the mojom's relative path, rebased onto this path.
|
||||||
|
Additionally, the script expects this root to contain already-generated
|
||||||
|
modules for any transitive dependencies not listed in mojom_files.
|
||||||
|
enabled_features: A list of enabled feature names, controlling which AST
|
||||||
|
nodes are filtered by [EnableIf] attributes.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
None.
|
||||||
|
|
||||||
|
Upon completion, a mojom-module file will be saved for each input mojom.
|
||||||
|
"""
|
||||||
|
assert input_root_paths
|
||||||
|
assert output_root_path
|
||||||
|
|
||||||
|
loaded_mojom_asts = {}
|
||||||
|
loaded_modules = {}
|
||||||
|
input_dependencies = defaultdict(set)
|
||||||
|
mojom_files_to_parse = dict((os.path.normcase(abs_path),
|
||||||
|
_RebaseAbsolutePath(abs_path, input_root_paths))
|
||||||
|
for abs_path in mojom_files)
|
||||||
|
abs_paths = dict(
|
||||||
|
(path, abs_path) for abs_path, path in mojom_files_to_parse.items())
|
||||||
|
for mojom_abspath, _ in mojom_files_to_parse.items():
|
||||||
|
with codecs.open(mojom_abspath, encoding='utf-8') as f:
|
||||||
|
ast = parser.Parse(''.join(f.readlines()), mojom_abspath)
|
||||||
|
conditional_features.RemoveDisabledDefinitions(ast, enabled_features)
|
||||||
|
loaded_mojom_asts[mojom_abspath] = ast
|
||||||
|
invalid_imports = []
|
||||||
|
for imp in ast.import_list:
|
||||||
|
import_abspath = _ResolveRelativeImportPath(imp.import_filename,
|
||||||
|
input_root_paths)
|
||||||
|
if allowed_imports and import_abspath not in allowed_imports:
|
||||||
|
invalid_imports.append(imp.import_filename)
|
||||||
|
|
||||||
|
abs_paths[imp.import_filename] = import_abspath
|
||||||
|
if import_abspath in mojom_files_to_parse:
|
||||||
|
# This import is in the input list, so we're going to translate it
|
||||||
|
# into a module below; however it's also a dependency of another input
|
||||||
|
# module. We retain record of dependencies to help with input
|
||||||
|
# processing later.
|
||||||
|
input_dependencies[mojom_abspath].add((import_abspath,
|
||||||
|
imp.import_filename))
|
||||||
|
else:
|
||||||
|
# We have an import that isn't being parsed right now. It must already
|
||||||
|
# be parsed and have a module file sitting in a corresponding output
|
||||||
|
# location.
|
||||||
|
module_path = _GetModuleFilename(imp.import_filename)
|
||||||
|
module_abspath = _ResolveRelativeImportPath(module_path,
|
||||||
|
[output_root_path])
|
||||||
|
with open(module_abspath, 'rb') as module_file:
|
||||||
|
loaded_modules[import_abspath] = module.Module.Load(module_file)
|
||||||
|
|
||||||
|
if invalid_imports:
|
||||||
|
raise ValueError(
|
||||||
|
'\nThe file %s imports the following files not allowed by build '
|
||||||
|
'dependencies:\n\n%s\n' % (mojom_abspath,
|
||||||
|
'\n'.join(invalid_imports)))
|
||||||
|
|
||||||
|
|
||||||
|
# At this point all transitive imports not listed as inputs have been loaded
|
||||||
|
# and we have a complete dependency tree of the unprocessed inputs. Now we can
|
||||||
|
# load all the inputs, resolving dependencies among them recursively as we go.
|
||||||
|
num_existing_modules_loaded = len(loaded_modules)
|
||||||
|
for mojom_abspath, mojom_path in mojom_files_to_parse.items():
|
||||||
|
_EnsureInputLoaded(mojom_abspath, mojom_path, abs_paths, loaded_mojom_asts,
|
||||||
|
input_dependencies, loaded_modules)
|
||||||
|
assert (num_existing_modules_loaded +
|
||||||
|
len(mojom_files_to_parse) == len(loaded_modules))
|
||||||
|
|
||||||
|
# Now we have fully translated modules for every input and every transitive
|
||||||
|
# dependency. We can dump the modules to disk for other tools to use.
|
||||||
|
for mojom_abspath, mojom_path in mojom_files_to_parse.items():
|
||||||
|
module_path = os.path.join(output_root_path, _GetModuleFilename(mojom_path))
|
||||||
|
module_dir = os.path.dirname(module_path)
|
||||||
|
if not os.path.exists(module_dir):
|
||||||
|
try:
|
||||||
|
# Python 2 doesn't support exist_ok on makedirs(), so we just ignore
|
||||||
|
# that failure if it happens. It's possible during build due to races
|
||||||
|
# among build steps with module outputs in the same directory.
|
||||||
|
os.makedirs(module_dir)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno != errno.EEXIST:
|
||||||
|
raise
|
||||||
|
with open(module_path, 'wb') as f:
|
||||||
|
loaded_modules[mojom_abspath].Dump(f)
|
||||||
|
|
||||||
|
|
||||||
|
def Run(command_line):
|
||||||
|
arg_parser = argparse.ArgumentParser(
|
||||||
|
description="""
|
||||||
|
Parses one or more mojom files and produces corresponding module outputs fully
|
||||||
|
describing the definitions therein. The output is exhaustive, stable, and
|
||||||
|
sufficient for another tool to consume and emit e.g. usable language
|
||||||
|
bindings based on the original mojoms.""",
|
||||||
|
epilog="""
|
||||||
|
Note that each transitive import dependency reachable from the input mojoms must
|
||||||
|
either also be listed as an input or must have its corresponding compiled module
|
||||||
|
already present in the provided output root.""")
|
||||||
|
|
||||||
|
arg_parser.add_argument(
|
||||||
|
'--input-root',
|
||||||
|
default=[],
|
||||||
|
action='append',
|
||||||
|
metavar='ROOT',
|
||||||
|
dest='input_root_paths',
|
||||||
|
help='Adds ROOT to the set of root paths against which relative input '
|
||||||
|
'paths should be resolved. Provided root paths are always searched '
|
||||||
|
'in order from longest absolute path to shortest.')
|
||||||
|
arg_parser.add_argument(
|
||||||
|
'--output-root',
|
||||||
|
action='store',
|
||||||
|
required=True,
|
||||||
|
dest='output_root_path',
|
||||||
|
metavar='ROOT',
|
||||||
|
help='Use ROOT as the root path in which the parser should emit compiled '
|
||||||
|
'modules for each processed input mojom. The path of emitted module is '
|
||||||
|
'based on the relative input path, rebased onto this root. Note that '
|
||||||
|
'ROOT is also searched for existing modules of any transitive imports '
|
||||||
|
'which were not included in the set of inputs.')
|
||||||
|
arg_parser.add_argument(
|
||||||
|
'--mojoms',
|
||||||
|
nargs='+',
|
||||||
|
dest='mojom_files',
|
||||||
|
default=[],
|
||||||
|
metavar='MOJOM_FILE',
|
||||||
|
help='Input mojom filename(s). Each filename must be either an absolute '
|
||||||
|
'path which falls within one of the given input or output roots, or a '
|
||||||
|
'relative path the parser will attempt to resolve using each of those '
|
||||||
|
'roots in unspecified order.')
|
||||||
|
arg_parser.add_argument(
|
||||||
|
'--mojom-file-list',
|
||||||
|
action='store',
|
||||||
|
metavar='LIST_FILENAME',
|
||||||
|
help='Input file whose contents are a list of mojoms to process. This '
|
||||||
|
'may be provided in lieu of --mojoms to avoid hitting command line '
|
||||||
|
'length limtations')
|
||||||
|
arg_parser.add_argument(
|
||||||
|
'--enable-feature',
|
||||||
|
dest='enabled_features',
|
||||||
|
default=[],
|
||||||
|
action='append',
|
||||||
|
metavar='FEATURE',
|
||||||
|
help='Enables a named feature when parsing the given mojoms. Features '
|
||||||
|
'are identified by arbitrary string values. Specifying this flag with a '
|
||||||
|
'given FEATURE name will cause the parser to process any syntax elements '
|
||||||
|
'tagged with an [EnableIf=FEATURE] attribute. If this flag is not '
|
||||||
|
'provided for a given FEATURE, such tagged elements are discarded by the '
|
||||||
|
'parser and will not be present in the compiled output.')
|
||||||
|
arg_parser.add_argument(
|
||||||
|
'--check-imports',
|
||||||
|
dest='build_metadata_filename',
|
||||||
|
action='store',
|
||||||
|
metavar='METADATA_FILENAME',
|
||||||
|
help='Instructs the parser to check imports against a set of allowed '
|
||||||
|
'imports. Allowed imports are based on build metadata within '
|
||||||
|
'METADATA_FILENAME. This is a JSON file with a `sources` key listing '
|
||||||
|
'paths to the set of input mojom files being processed by this parser '
|
||||||
|
'run, and a `deps` key listing paths to metadata files for any '
|
||||||
|
'dependencies of these inputs. This feature can be used to implement '
|
||||||
|
'build-time dependency checking for mojom imports, where each build '
|
||||||
|
'metadata file corresponds to a build target in the dependency graph of '
|
||||||
|
'a typical build system.')
|
||||||
|
|
||||||
|
args, _ = arg_parser.parse_known_args(command_line)
|
||||||
|
if args.mojom_file_list:
|
||||||
|
with open(args.mojom_file_list) as f:
|
||||||
|
args.mojom_files.extend(f.read().split())
|
||||||
|
|
||||||
|
if not args.mojom_files:
|
||||||
|
raise ValueError(
|
||||||
|
'Must list at least one mojom file via --mojoms or --mojom-file-list')
|
||||||
|
|
||||||
|
mojom_files = list(map(os.path.abspath, args.mojom_files))
|
||||||
|
input_roots = list(map(os.path.abspath, args.input_root_paths))
|
||||||
|
output_root = os.path.abspath(args.output_root_path)
|
||||||
|
|
||||||
|
if args.build_metadata_filename:
|
||||||
|
allowed_imports = _CollectAllowedImportsFromBuildMetadata(
|
||||||
|
args.build_metadata_filename)
|
||||||
|
else:
|
||||||
|
allowed_imports = None
|
||||||
|
|
||||||
|
_ParseMojoms(mojom_files, input_roots, output_root, args.enabled_features,
|
||||||
|
allowed_imports)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
Run(sys.argv[1:])
|
73
utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
Normal file
73
utils/ipc/mojo/public/tools/mojom/mojom_parser_test_case.py
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import mojom_parser
|
||||||
|
|
||||||
|
from mojom.generate import module
|
||||||
|
|
||||||
|
|
||||||
|
class MojomParserTestCase(unittest.TestCase):
|
||||||
|
"""Tests covering the behavior defined by the main mojom_parser.py script.
|
||||||
|
This includes behavior around input and output path manipulation, dependency
|
||||||
|
resolution, and module serialization and deserialization."""
|
||||||
|
|
||||||
|
def __init__(self, method_name):
|
||||||
|
super(MojomParserTestCase, self).__init__(method_name)
|
||||||
|
self._temp_dir = None
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self._temp_dir = tempfile.mkdtemp()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
shutil.rmtree(self._temp_dir)
|
||||||
|
self._temp_dir = None
|
||||||
|
|
||||||
|
def GetPath(self, path):
|
||||||
|
assert not os.path.isabs(path)
|
||||||
|
return os.path.join(self._temp_dir, path)
|
||||||
|
|
||||||
|
def GetModulePath(self, path):
|
||||||
|
assert not os.path.isabs(path)
|
||||||
|
return os.path.join(self.GetPath('out'), path) + '-module'
|
||||||
|
|
||||||
|
def WriteFile(self, path, contents):
|
||||||
|
full_path = self.GetPath(path)
|
||||||
|
dirname = os.path.dirname(full_path)
|
||||||
|
if not os.path.exists(dirname):
|
||||||
|
os.makedirs(dirname)
|
||||||
|
with open(full_path, 'w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
|
||||||
|
def LoadModule(self, mojom_path):
|
||||||
|
with open(self.GetModulePath(mojom_path), 'rb') as f:
|
||||||
|
return module.Module.Load(f)
|
||||||
|
|
||||||
|
def ParseMojoms(self, mojoms, metadata=None):
|
||||||
|
"""Parse all input mojoms relative the temp dir."""
|
||||||
|
out_dir = self.GetPath('out')
|
||||||
|
args = [
|
||||||
|
'--input-root', self._temp_dir, '--input-root', out_dir,
|
||||||
|
'--output-root', out_dir, '--mojoms'
|
||||||
|
] + list(map(lambda mojom: os.path.join(self._temp_dir, mojom), mojoms))
|
||||||
|
if metadata:
|
||||||
|
args.extend(['--check-imports', self.GetPath(metadata)])
|
||||||
|
mojom_parser.Run(args)
|
||||||
|
|
||||||
|
def ExtractTypes(self, mojom):
|
||||||
|
filename = 'test.mojom'
|
||||||
|
self.WriteFile(filename, mojom)
|
||||||
|
self.ParseMojoms([filename])
|
||||||
|
m = self.LoadModule(filename)
|
||||||
|
definitions = {}
|
||||||
|
for kinds in (m.enums, m.structs, m.unions, m.interfaces):
|
||||||
|
for kind in kinds:
|
||||||
|
definitions[kind.mojom_name] = kind
|
||||||
|
return definitions
|
171
utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
Normal file
171
utils/ipc/mojo/public/tools/mojom/mojom_parser_unittest.py
Normal file
|
@ -0,0 +1,171 @@
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from mojom_parser_test_case import MojomParserTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class MojomParserTest(MojomParserTestCase):
|
||||||
|
"""Tests covering the behavior defined by the main mojom_parser.py script.
|
||||||
|
This includes behavior around input and output path manipulation, dependency
|
||||||
|
resolution, and module serialization and deserialization."""
|
||||||
|
|
||||||
|
def testBasicParse(self):
|
||||||
|
"""Basic test to verify that we can parse a mojom file and get a module."""
|
||||||
|
mojom = 'foo/bar.mojom'
|
||||||
|
self.WriteFile(
|
||||||
|
mojom, """\
|
||||||
|
module test;
|
||||||
|
enum TestEnum { kFoo };
|
||||||
|
""")
|
||||||
|
self.ParseMojoms([mojom])
|
||||||
|
|
||||||
|
m = self.LoadModule(mojom)
|
||||||
|
self.assertEqual('foo/bar.mojom', m.path)
|
||||||
|
self.assertEqual('test', m.mojom_namespace)
|
||||||
|
self.assertEqual(1, len(m.enums))
|
||||||
|
|
||||||
|
def testBasicParseWithAbsolutePaths(self):
|
||||||
|
"""Verifies that we can parse a mojom file given an absolute path input."""
|
||||||
|
mojom = 'foo/bar.mojom'
|
||||||
|
self.WriteFile(
|
||||||
|
mojom, """\
|
||||||
|
module test;
|
||||||
|
enum TestEnum { kFoo };
|
||||||
|
""")
|
||||||
|
self.ParseMojoms([self.GetPath(mojom)])
|
||||||
|
|
||||||
|
m = self.LoadModule(mojom)
|
||||||
|
self.assertEqual('foo/bar.mojom', m.path)
|
||||||
|
self.assertEqual('test', m.mojom_namespace)
|
||||||
|
self.assertEqual(1, len(m.enums))
|
||||||
|
|
||||||
|
def testImport(self):
|
||||||
|
"""Verify imports within the same set of mojom inputs."""
|
||||||
|
a = 'a.mojom'
|
||||||
|
b = 'b.mojom'
|
||||||
|
self.WriteFile(
|
||||||
|
a, """\
|
||||||
|
module a;
|
||||||
|
import "b.mojom";
|
||||||
|
struct Foo { b.Bar bar; };""")
|
||||||
|
self.WriteFile(b, """\
|
||||||
|
module b;
|
||||||
|
struct Bar {};""")
|
||||||
|
self.ParseMojoms([a, b])
|
||||||
|
|
||||||
|
ma = self.LoadModule(a)
|
||||||
|
mb = self.LoadModule(b)
|
||||||
|
self.assertEqual('a.mojom', ma.path)
|
||||||
|
self.assertEqual('b.mojom', mb.path)
|
||||||
|
self.assertEqual(1, len(ma.imports))
|
||||||
|
self.assertEqual(mb, ma.imports[0])
|
||||||
|
|
||||||
|
def testPreProcessedImport(self):
|
||||||
|
"""Verify imports processed by a previous parser execution can be loaded
|
||||||
|
properly when parsing a dependent mojom."""
|
||||||
|
a = 'a.mojom'
|
||||||
|
self.WriteFile(a, """\
|
||||||
|
module a;
|
||||||
|
struct Bar {};""")
|
||||||
|
self.ParseMojoms([a])
|
||||||
|
|
||||||
|
b = 'b.mojom'
|
||||||
|
self.WriteFile(
|
||||||
|
b, """\
|
||||||
|
module b;
|
||||||
|
import "a.mojom";
|
||||||
|
struct Foo { a.Bar bar; };""")
|
||||||
|
self.ParseMojoms([b])
|
||||||
|
|
||||||
|
def testMissingImport(self):
|
||||||
|
"""Verify that an import fails if the imported mojom does not exist."""
|
||||||
|
a = 'a.mojom'
|
||||||
|
self.WriteFile(
|
||||||
|
a, """\
|
||||||
|
module a;
|
||||||
|
import "non-existent.mojom";
|
||||||
|
struct Bar {};""")
|
||||||
|
with self.assertRaisesRegexp(ValueError, "does not exist"):
|
||||||
|
self.ParseMojoms([a])
|
||||||
|
|
||||||
|
def testUnparsedImport(self):
|
||||||
|
"""Verify that an import fails if the imported mojom is not in the set of
|
||||||
|
mojoms provided to the parser on this execution AND there is no pre-existing
|
||||||
|
parsed output module already on disk for it."""
|
||||||
|
a = 'a.mojom'
|
||||||
|
b = 'b.mojom'
|
||||||
|
self.WriteFile(a, """\
|
||||||
|
module a;
|
||||||
|
struct Bar {};""")
|
||||||
|
self.WriteFile(
|
||||||
|
b, """\
|
||||||
|
module b;
|
||||||
|
import "a.mojom";
|
||||||
|
struct Foo { a.Bar bar; };""")
|
||||||
|
|
||||||
|
# a.mojom has not been parsed yet, so its import will fail when processing
|
||||||
|
# b.mojom here.
|
||||||
|
with self.assertRaisesRegexp(ValueError, "does not exist"):
|
||||||
|
self.ParseMojoms([b])
|
||||||
|
|
||||||
|
def testCheckImportsBasic(self):
|
||||||
|
"""Verify that the parser can handle --check-imports with a valid set of
|
||||||
|
inputs, including support for transitive dependency resolution."""
|
||||||
|
a = 'a.mojom'
|
||||||
|
a_metadata = 'out/a.build_metadata'
|
||||||
|
b = 'b.mojom'
|
||||||
|
b_metadata = 'out/b.build_metadata'
|
||||||
|
c = 'c.mojom'
|
||||||
|
c_metadata = 'out/c.build_metadata'
|
||||||
|
self.WriteFile(a_metadata,
|
||||||
|
'{"sources": ["%s"], "deps": []}\n' % self.GetPath(a))
|
||||||
|
self.WriteFile(
|
||||||
|
b_metadata,
|
||||||
|
'{"sources": ["%s"], "deps": ["%s"]}\n' % (self.GetPath(b),
|
||||||
|
self.GetPath(a_metadata)))
|
||||||
|
self.WriteFile(
|
||||||
|
c_metadata,
|
||||||
|
'{"sources": ["%s"], "deps": ["%s"]}\n' % (self.GetPath(c),
|
||||||
|
self.GetPath(b_metadata)))
|
||||||
|
self.WriteFile(a, """\
|
||||||
|
module a;
|
||||||
|
struct Bar {};""")
|
||||||
|
self.WriteFile(
|
||||||
|
b, """\
|
||||||
|
module b;
|
||||||
|
import "a.mojom";
|
||||||
|
struct Foo { a.Bar bar; };""")
|
||||||
|
self.WriteFile(
|
||||||
|
c, """\
|
||||||
|
module c;
|
||||||
|
import "a.mojom";
|
||||||
|
import "b.mojom";
|
||||||
|
struct Baz { b.Foo foo; };""")
|
||||||
|
self.ParseMojoms([a], metadata=a_metadata)
|
||||||
|
self.ParseMojoms([b], metadata=b_metadata)
|
||||||
|
self.ParseMojoms([c], metadata=c_metadata)
|
||||||
|
|
||||||
|
def testCheckImportsMissing(self):
|
||||||
|
"""Verify that the parser rejects valid input mojoms when imports don't
|
||||||
|
agree with build metadata given via --check-imports."""
|
||||||
|
a = 'a.mojom'
|
||||||
|
a_metadata = 'out/a.build_metadata'
|
||||||
|
b = 'b.mojom'
|
||||||
|
b_metadata = 'out/b.build_metadata'
|
||||||
|
self.WriteFile(a_metadata,
|
||||||
|
'{"sources": ["%s"], "deps": []}\n' % self.GetPath(a))
|
||||||
|
self.WriteFile(b_metadata,
|
||||||
|
'{"sources": ["%s"], "deps": []}\n' % self.GetPath(b))
|
||||||
|
self.WriteFile(a, """\
|
||||||
|
module a;
|
||||||
|
struct Bar {};""")
|
||||||
|
self.WriteFile(
|
||||||
|
b, """\
|
||||||
|
module b;
|
||||||
|
import "a.mojom";
|
||||||
|
struct Foo { a.Bar bar; };""")
|
||||||
|
|
||||||
|
self.ParseMojoms([a], metadata=a_metadata)
|
||||||
|
with self.assertRaisesRegexp(ValueError, "not allowed by build"):
|
||||||
|
self.ParseMojoms([b], metadata=b_metadata)
|
127
utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
Normal file
127
utils/ipc/mojo/public/tools/mojom/stable_attribute_unittest.py
Normal file
|
@ -0,0 +1,127 @@
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from mojom_parser_test_case import MojomParserTestCase
|
||||||
|
|
||||||
|
from mojom.generate import module
|
||||||
|
|
||||||
|
|
||||||
|
class StableAttributeTest(MojomParserTestCase):
|
||||||
|
"""Tests covering usage of the [Stable] attribute."""
|
||||||
|
|
||||||
|
def testStableAttributeTagging(self):
|
||||||
|
"""Verify that we recognize the [Stable] attribute on relevant definitions
|
||||||
|
and the resulting parser outputs are tagged accordingly."""
|
||||||
|
mojom = 'test.mojom'
|
||||||
|
self.WriteFile(
|
||||||
|
mojom, """\
|
||||||
|
[Stable] enum TestEnum { kFoo };
|
||||||
|
enum UnstableEnum { kBar };
|
||||||
|
[Stable] struct TestStruct { TestEnum a; };
|
||||||
|
struct UnstableStruct { UnstableEnum a; };
|
||||||
|
[Stable] union TestUnion { TestEnum a; TestStruct b; };
|
||||||
|
union UnstableUnion { UnstableEnum a; UnstableStruct b; };
|
||||||
|
[Stable] interface TestInterface { Foo@0(TestUnion x) => (); };
|
||||||
|
interface UnstableInterface { Foo(UnstableUnion x) => (); };
|
||||||
|
""")
|
||||||
|
self.ParseMojoms([mojom])
|
||||||
|
|
||||||
|
m = self.LoadModule(mojom)
|
||||||
|
self.assertEqual(2, len(m.enums))
|
||||||
|
self.assertTrue(m.enums[0].stable)
|
||||||
|
self.assertFalse(m.enums[1].stable)
|
||||||
|
self.assertEqual(2, len(m.structs))
|
||||||
|
self.assertTrue(m.structs[0].stable)
|
||||||
|
self.assertFalse(m.structs[1].stable)
|
||||||
|
self.assertEqual(2, len(m.unions))
|
||||||
|
self.assertTrue(m.unions[0].stable)
|
||||||
|
self.assertFalse(m.unions[1].stable)
|
||||||
|
self.assertEqual(2, len(m.interfaces))
|
||||||
|
self.assertTrue(m.interfaces[0].stable)
|
||||||
|
self.assertFalse(m.interfaces[1].stable)
|
||||||
|
|
||||||
|
def testStableStruct(self):
|
||||||
|
"""A [Stable] struct is valid if all its fields are also stable."""
|
||||||
|
self.ExtractTypes('[Stable] struct S {};')
|
||||||
|
self.ExtractTypes('[Stable] struct S { int32 x; bool b; };')
|
||||||
|
self.ExtractTypes('[Stable] enum E { A }; [Stable] struct S { E e; };')
|
||||||
|
self.ExtractTypes('[Stable] struct S {}; [Stable] struct T { S s; };')
|
||||||
|
self.ExtractTypes(
|
||||||
|
'[Stable] struct S {}; [Stable] struct T { array<S> ss; };')
|
||||||
|
self.ExtractTypes(
|
||||||
|
'[Stable] interface F {}; [Stable] struct T { pending_remote<F> f; };')
|
||||||
|
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||||
|
self.ExtractTypes('enum E { A }; [Stable] struct S { E e; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on X'):
|
||||||
|
self.ExtractTypes('struct X {}; [Stable] struct S { X x; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||||
|
self.ExtractTypes('struct T {}; [Stable] struct S { array<T> xs; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||||
|
self.ExtractTypes('struct T {}; [Stable] struct S { map<int32, T> xs; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||||
|
self.ExtractTypes('struct T {}; [Stable] struct S { map<T, int32> xs; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||||
|
self.ExtractTypes(
|
||||||
|
'interface F {}; [Stable] struct S { pending_remote<F> f; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||||
|
self.ExtractTypes(
|
||||||
|
'interface F {}; [Stable] struct S { pending_receiver<F> f; };')
|
||||||
|
|
||||||
|
def testStableUnion(self):
|
||||||
|
"""A [Stable] union is valid if all its fields' types are also stable."""
|
||||||
|
self.ExtractTypes('[Stable] union U {};')
|
||||||
|
self.ExtractTypes('[Stable] union U { int32 x; bool b; };')
|
||||||
|
self.ExtractTypes('[Stable] enum E { A }; [Stable] union U { E e; };')
|
||||||
|
self.ExtractTypes('[Stable] struct S {}; [Stable] union U { S s; };')
|
||||||
|
self.ExtractTypes(
|
||||||
|
'[Stable] struct S {}; [Stable] union U { array<S> ss; };')
|
||||||
|
self.ExtractTypes(
|
||||||
|
'[Stable] interface F {}; [Stable] union U { pending_remote<F> f; };')
|
||||||
|
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||||
|
self.ExtractTypes('enum E { A }; [Stable] union U { E e; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on X'):
|
||||||
|
self.ExtractTypes('struct X {}; [Stable] union U { X x; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||||
|
self.ExtractTypes('struct T {}; [Stable] union U { array<T> xs; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||||
|
self.ExtractTypes('struct T {}; [Stable] union U { map<int32, T> xs; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on T'):
|
||||||
|
self.ExtractTypes('struct T {}; [Stable] union U { map<T, int32> xs; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||||
|
self.ExtractTypes(
|
||||||
|
'interface F {}; [Stable] union U { pending_remote<F> f; };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on F'):
|
||||||
|
self.ExtractTypes(
|
||||||
|
'interface F {}; [Stable] union U { pending_receiver<F> f; };')
|
||||||
|
|
||||||
|
def testStableInterface(self):
|
||||||
|
"""A [Stable] interface is valid if all its methods' parameter types are
|
||||||
|
stable, including response parameters where applicable."""
|
||||||
|
self.ExtractTypes('[Stable] interface F {};')
|
||||||
|
self.ExtractTypes('[Stable] interface F { A@0(int32 x); };')
|
||||||
|
self.ExtractTypes('[Stable] interface F { A@0(int32 x) => (bool b); };')
|
||||||
|
self.ExtractTypes("""\
|
||||||
|
[Stable] enum E { A, B, C };
|
||||||
|
[Stable] struct S {};
|
||||||
|
[Stable] interface F { A@0(E e, S s) => (bool b, array<S> s); };
|
||||||
|
""")
|
||||||
|
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||||
|
self.ExtractTypes(
|
||||||
|
'enum E { A, B, C }; [Stable] interface F { A@0(E e); };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on E'):
|
||||||
|
self.ExtractTypes(
|
||||||
|
'enum E { A, B, C }; [Stable] interface F { A@0(int32 x) => (E e); };'
|
||||||
|
)
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on S'):
|
||||||
|
self.ExtractTypes(
|
||||||
|
'struct S {}; [Stable] interface F { A@0(int32 x) => (S s); };')
|
||||||
|
with self.assertRaisesRegexp(Exception, 'because it depends on S'):
|
||||||
|
self.ExtractTypes(
|
||||||
|
'struct S {}; [Stable] interface F { A@0(S s) => (bool b); };')
|
||||||
|
|
||||||
|
with self.assertRaisesRegexp(Exception, 'explicit method ordinals'):
|
||||||
|
self.ExtractTypes('[Stable] interface F { A() => (); };')
|
|
@ -0,0 +1,397 @@
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
from mojom_parser_test_case import MojomParserTestCase
|
||||||
|
|
||||||
|
|
||||||
|
class VersionCompatibilityTest(MojomParserTestCase):
|
||||||
|
"""Tests covering compatibility between two versions of the same mojom type
|
||||||
|
definition. This coverage ensures that we can reliably detect unsafe changes
|
||||||
|
to definitions that are expected to tolerate version skew in production
|
||||||
|
environments."""
|
||||||
|
|
||||||
|
def _GetTypeCompatibilityMap(self, old_mojom, new_mojom):
|
||||||
|
"""Helper to support the implementation of assertBackwardCompatible and
|
||||||
|
assertNotBackwardCompatible."""
|
||||||
|
|
||||||
|
old = self.ExtractTypes(old_mojom)
|
||||||
|
new = self.ExtractTypes(new_mojom)
|
||||||
|
self.assertEqual(set(old.keys()), set(new.keys()),
|
||||||
|
'Old and new test mojoms should use the same type names.')
|
||||||
|
|
||||||
|
compatibility_map = {}
|
||||||
|
for name in old.keys():
|
||||||
|
compatibility_map[name] = new[name].IsBackwardCompatible(old[name])
|
||||||
|
return compatibility_map
|
||||||
|
|
||||||
|
def assertBackwardCompatible(self, old_mojom, new_mojom):
|
||||||
|
compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
|
||||||
|
for name, compatible in compatibility_map.items():
|
||||||
|
if not compatible:
|
||||||
|
raise AssertionError(
|
||||||
|
'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n'
|
||||||
|
'The new definition of %s should pass a backward-compatibiity '
|
||||||
|
'check, but it does not.' % (old_mojom, new_mojom, name))
|
||||||
|
|
||||||
|
def assertNotBackwardCompatible(self, old_mojom, new_mojom):
|
||||||
|
compatibility_map = self._GetTypeCompatibilityMap(old_mojom, new_mojom)
|
||||||
|
if all(compatibility_map.values()):
|
||||||
|
raise AssertionError(
|
||||||
|
'Given the old mojom:\n\n %s\n\nand the new mojom:\n\n %s\n\n'
|
||||||
|
'The new mojom should fail a backward-compatibility check, but it '
|
||||||
|
'does not.' % (old_mojom, new_mojom))
|
||||||
|
|
||||||
|
def testNewNonExtensibleEnumValue(self):
|
||||||
|
"""Adding a value to a non-extensible enum breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
|
||||||
|
'enum E { kFoo, kBar, kBaz };')
|
||||||
|
|
||||||
|
def testNewNonExtensibleEnumValueWithMinVersion(self):
|
||||||
|
"""Adding a value to a non-extensible enum breaks backward-compatibility,
|
||||||
|
even with a new [MinVersion] specified for the value."""
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'enum E { kFoo, kBar };', 'enum E { kFoo, kBar, [MinVersion=1] kBaz };')
|
||||||
|
|
||||||
|
def testNewValueInExistingVersion(self):
|
||||||
|
"""Adding a value to an existing version is not allowed, even if the old
|
||||||
|
enum was marked [Extensible]. Note that it is irrelevant whether or not the
|
||||||
|
new enum is marked [Extensible]."""
|
||||||
|
self.assertNotBackwardCompatible('[Extensible] enum E { kFoo, kBar };',
|
||||||
|
'enum E { kFoo, kBar, kBaz };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kFoo, kBar };',
|
||||||
|
'[Extensible] enum E { kFoo, kBar, kBaz };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kFoo, [MinVersion=1] kBar };',
|
||||||
|
'enum E { kFoo, [MinVersion=1] kBar, [MinVersion=1] kBaz };')
|
||||||
|
|
||||||
|
def testEnumValueRemoval(self):
|
||||||
|
"""Removal of an enum value is never valid even for [Extensible] enums."""
|
||||||
|
self.assertNotBackwardCompatible('enum E { kFoo, kBar };',
|
||||||
|
'enum E { kFoo };')
|
||||||
|
self.assertNotBackwardCompatible('[Extensible] enum E { kFoo, kBar };',
|
||||||
|
'[Extensible] enum E { kFoo };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kA, [MinVersion=1] kB };',
|
||||||
|
'[Extensible] enum E { kA, };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kA, [MinVersion=1] kB, [MinVersion=1] kZ };',
|
||||||
|
'[Extensible] enum E { kA, [MinVersion=1] kB };')
|
||||||
|
|
||||||
|
def testNewExtensibleEnumValueWithMinVersion(self):
|
||||||
|
"""Adding a new and properly [MinVersion]'d value to an [Extensible] enum
|
||||||
|
is a backward-compatible change. Note that it is irrelevant whether or not
|
||||||
|
the new enum is marked [Extensible]."""
|
||||||
|
self.assertBackwardCompatible('[Extensible] enum E { kA, kB };',
|
||||||
|
'enum E { kA, kB, [MinVersion=1] kC };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kA, kB };',
|
||||||
|
'[Extensible] enum E { kA, kB, [MinVersion=1] kC };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kA, [MinVersion=1] kB };',
|
||||||
|
'[Extensible] enum E { kA, [MinVersion=1] kB, [MinVersion=2] kC };')
|
||||||
|
|
||||||
|
def testRenameEnumValue(self):
|
||||||
|
"""Renaming an enum value does not affect backward-compatibility. Only
|
||||||
|
numeric value is relevant."""
|
||||||
|
self.assertBackwardCompatible('enum E { kA, kB };', 'enum E { kX, kY };')
|
||||||
|
|
||||||
|
def testAddEnumValueAlias(self):
|
||||||
|
"""Adding new enum fields does not affect backward-compatibility if it does
|
||||||
|
not introduce any new numeric values."""
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'enum E { kA, kB };', 'enum E { kA, kB, kC = kA, kD = 1, kE = kD };')
|
||||||
|
|
||||||
|
def testEnumIdentity(self):
|
||||||
|
"""An unchanged enum is obviously backward-compatible."""
|
||||||
|
self.assertBackwardCompatible('enum E { kA, kB, kC };',
|
||||||
|
'enum E { kA, kB, kC };')
|
||||||
|
|
||||||
|
def testNewStructFieldUnversioned(self):
|
||||||
|
"""Adding a new field to a struct without a new (i.e. higher than any
|
||||||
|
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||||
|
'struct S { string a; string b; };')
|
||||||
|
|
||||||
|
def testStructFieldRemoval(self):
|
||||||
|
"""Removing a field from a struct breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('struct S { string a; string b; };',
|
||||||
|
'struct S { string a; };')
|
||||||
|
|
||||||
|
def testStructFieldTypeChange(self):
|
||||||
|
"""Changing the type of an existing field always breaks
|
||||||
|
backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||||
|
'struct S { array<int32> a; };')
|
||||||
|
|
||||||
|
def testStructFieldBecomingOptional(self):
|
||||||
|
"""Changing a field from non-optional to optional breaks
|
||||||
|
backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('struct S { string a; };',
|
||||||
|
'struct S { string? a; };')
|
||||||
|
|
||||||
|
def testStructFieldBecomingNonOptional(self):
|
||||||
|
"""Changing a field from optional to non-optional breaks
|
||||||
|
backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('struct S { string? a; };',
|
||||||
|
'struct S { string a; };')
|
||||||
|
|
||||||
|
def testStructFieldOrderChange(self):
|
||||||
|
"""Changing the order of fields breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('struct S { string a; bool b; };',
|
||||||
|
'struct S { bool b; string a; };')
|
||||||
|
self.assertNotBackwardCompatible('struct S { string a@0; bool b@1; };',
|
||||||
|
'struct S { string a@1; bool b@0; };')
|
||||||
|
|
||||||
|
def testStructFieldMinVersionChange(self):
|
||||||
|
"""Changing the MinVersion of a field breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'struct S { string a; [MinVersion=1] string? b; };',
|
||||||
|
'struct S { string a; [MinVersion=2] string? b; };')
|
||||||
|
|
||||||
|
def testStructFieldTypeChange(self):
|
||||||
|
"""If a struct field's own type definition changes, the containing struct
|
||||||
|
is backward-compatible if and only if the field type's change is
|
||||||
|
backward-compatible."""
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'struct S {}; struct T { S s; };',
|
||||||
|
'struct S { [MinVersion=1] int32 x; }; struct T { S s; };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kA }; struct S { E e; };',
|
||||||
|
'[Extensible] enum E { kA, [MinVersion=1] kB }; struct S { E e; };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'struct S {}; struct T { S s; };',
|
||||||
|
'struct S { int32 x; }; struct T { S s; };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kA }; struct S { E e; };',
|
||||||
|
'[Extensible] enum E { kA, kB }; struct S { E e; };')
|
||||||
|
|
||||||
|
def testNewStructFieldWithInvalidMinVersion(self):
|
||||||
|
"""Adding a new field using an existing MinVersion breaks backward-
|
||||||
|
compatibility."""
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
"""\
|
||||||
|
struct S {
|
||||||
|
string a;
|
||||||
|
[MinVersion=1] string? b;
|
||||||
|
};
|
||||||
|
""", """\
|
||||||
|
struct S {
|
||||||
|
string a;
|
||||||
|
[MinVersion=1] string? b;
|
||||||
|
[MinVersion=1] string? c;
|
||||||
|
};""")
|
||||||
|
|
||||||
|
def testNewStructFieldWithValidMinVersion(self):
|
||||||
|
"""Adding a new field is safe if tagged with a MinVersion greater than any
|
||||||
|
previously used MinVersion in the struct."""
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'struct S { int32 a; };',
|
||||||
|
'struct S { int32 a; [MinVersion=1] int32 b; };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'struct S { int32 a; [MinVersion=1] int32 b; };',
|
||||||
|
'struct S { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
|
||||||
|
|
||||||
|
def testNewStructFieldNullableReference(self):
|
||||||
|
"""Adding a new nullable reference-typed field is fine if versioned
|
||||||
|
properly."""
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'struct S { int32 a; };',
|
||||||
|
'struct S { int32 a; [MinVersion=1] string? b; };')
|
||||||
|
|
||||||
|
def testStructFieldRename(self):
|
||||||
|
"""Renaming a field has no effect on backward-compatibility."""
|
||||||
|
self.assertBackwardCompatible('struct S { int32 x; bool b; };',
|
||||||
|
'struct S { int32 a; bool b; };')
|
||||||
|
|
||||||
|
def testStructFieldReorderWithExplicitOrdinals(self):
|
||||||
|
"""Reordering fields has no effect on backward-compatibility when field
|
||||||
|
ordinals are explicitly labeled and remain unchanged."""
|
||||||
|
self.assertBackwardCompatible('struct S { bool b@1; int32 a@0; };',
|
||||||
|
'struct S { int32 a@0; bool b@1; };')
|
||||||
|
|
||||||
|
def testNewUnionFieldUnversioned(self):
|
||||||
|
"""Adding a new field to a union without a new (i.e. higher than any
|
||||||
|
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('union U { string a; };',
|
||||||
|
'union U { string a; string b; };')
|
||||||
|
|
||||||
|
def testUnionFieldRemoval(self):
|
||||||
|
"""Removing a field from a union breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('union U { string a; string b; };',
|
||||||
|
'union U { string a; };')
|
||||||
|
|
||||||
|
def testUnionFieldTypeChange(self):
|
||||||
|
"""Changing the type of an existing field always breaks
|
||||||
|
backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('union U { string a; };',
|
||||||
|
'union U { array<int32> a; };')
|
||||||
|
|
||||||
|
def testUnionFieldBecomingOptional(self):
|
||||||
|
"""Changing a field from non-optional to optional breaks
|
||||||
|
backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('union U { string a; };',
|
||||||
|
'union U { string? a; };')
|
||||||
|
|
||||||
|
def testUnionFieldBecomingNonOptional(self):
|
||||||
|
"""Changing a field from optional to non-optional breaks
|
||||||
|
backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('union U { string? a; };',
|
||||||
|
'union U { string a; };')
|
||||||
|
|
||||||
|
def testUnionFieldOrderChange(self):
|
||||||
|
"""Changing the order of fields breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('union U { string a; bool b; };',
|
||||||
|
'union U { bool b; string a; };')
|
||||||
|
self.assertNotBackwardCompatible('union U { string a@0; bool b@1; };',
|
||||||
|
'union U { string a@1; bool b@0; };')
|
||||||
|
|
||||||
|
def testUnionFieldMinVersionChange(self):
|
||||||
|
"""Changing the MinVersion of a field breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'union U { string a; [MinVersion=1] string b; };',
|
||||||
|
'union U { string a; [MinVersion=2] string b; };')
|
||||||
|
|
||||||
|
def testUnionFieldTypeChange(self):
|
||||||
|
"""If a union field's own type definition changes, the containing union
|
||||||
|
is backward-compatible if and only if the field type's change is
|
||||||
|
backward-compatible."""
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'struct S {}; union U { S s; };',
|
||||||
|
'struct S { [MinVersion=1] int32 x; }; union U { S s; };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kA }; union U { E e; };',
|
||||||
|
'[Extensible] enum E { kA, [MinVersion=1] kB }; union U { E e; };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'struct S {}; union U { S s; };',
|
||||||
|
'struct S { int32 x; }; union U { S s; };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'[Extensible] enum E { kA }; union U { E e; };',
|
||||||
|
'[Extensible] enum E { kA, kB }; union U { E e; };')
|
||||||
|
|
||||||
|
def testNewUnionFieldWithInvalidMinVersion(self):
|
||||||
|
"""Adding a new field using an existing MinVersion breaks backward-
|
||||||
|
compatibility."""
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
"""\
|
||||||
|
union U {
|
||||||
|
string a;
|
||||||
|
[MinVersion=1] string b;
|
||||||
|
};
|
||||||
|
""", """\
|
||||||
|
union U {
|
||||||
|
string a;
|
||||||
|
[MinVersion=1] string b;
|
||||||
|
[MinVersion=1] string c;
|
||||||
|
};""")
|
||||||
|
|
||||||
|
def testNewUnionFieldWithValidMinVersion(self):
|
||||||
|
"""Adding a new field is safe if tagged with a MinVersion greater than any
|
||||||
|
previously used MinVersion in the union."""
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'union U { int32 a; };',
|
||||||
|
'union U { int32 a; [MinVersion=1] int32 b; };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'union U { int32 a; [MinVersion=1] int32 b; };',
|
||||||
|
'union U { int32 a; [MinVersion=1] int32 b; [MinVersion=2] bool c; };')
|
||||||
|
|
||||||
|
def testUnionFieldRename(self):
|
||||||
|
"""Renaming a field has no effect on backward-compatibility."""
|
||||||
|
self.assertBackwardCompatible('union U { int32 x; bool b; };',
|
||||||
|
'union U { int32 a; bool b; };')
|
||||||
|
|
||||||
|
def testUnionFieldReorderWithExplicitOrdinals(self):
|
||||||
|
"""Reordering fields has no effect on backward-compatibility when field
|
||||||
|
ordinals are explicitly labeled and remain unchanged."""
|
||||||
|
self.assertBackwardCompatible('union U { bool b@1; int32 a@0; };',
|
||||||
|
'union U { int32 a@0; bool b@1; };')
|
||||||
|
|
||||||
|
def testNewInterfaceMethodUnversioned(self):
|
||||||
|
"""Adding a new method to an interface without a new (i.e. higher than any
|
||||||
|
existing version) [MinVersion] tag breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||||
|
'interface F { A(); B(); };')
|
||||||
|
|
||||||
|
def testInterfaceMethodRemoval(self):
|
||||||
|
"""Removing a method from an interface breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('interface F { A(); B(); };',
|
||||||
|
'interface F { A(); };')
|
||||||
|
|
||||||
|
def testInterfaceMethodParamsChanged(self):
|
||||||
|
"""Changes to the parameter list are only backward-compatible if they meet
|
||||||
|
backward-compatibility requirements of an equivalent struct definition."""
|
||||||
|
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||||
|
'interface F { A(int32 x); };')
|
||||||
|
self.assertNotBackwardCompatible('interface F { A(int32 x); };',
|
||||||
|
'interface F { A(bool x); };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'interface F { A(int32 x, [MinVersion=1] string? s); };', """\
|
||||||
|
interface F {
|
||||||
|
A(int32 x, [MinVersion=1] string? s, [MinVersion=1] int32 y);
|
||||||
|
};""")
|
||||||
|
|
||||||
|
self.assertBackwardCompatible('interface F { A(int32 x); };',
|
||||||
|
'interface F { A(int32 a); };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'interface F { A(int32 x); };',
|
||||||
|
'interface F { A(int32 x, [MinVersion=1] string? s); };')
|
||||||
|
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'struct S {}; interface F { A(S s); };',
|
||||||
|
'struct S { [MinVersion=1] int32 x; }; interface F { A(S s); };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'struct S {}; struct T {}; interface F { A(S s); };',
|
||||||
|
'struct S {}; struct T {}; interface F { A(T s); };')
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
'struct S {}; struct T { int32 x; }; interface F { A(S s); };',
|
||||||
|
'struct S {}; struct T { int32 x; }; interface F { A(T t); };')
|
||||||
|
|
||||||
|
def testInterfaceMethodReplyAdded(self):
|
||||||
|
"""Adding a reply to a message breaks backward-compatibilty."""
|
||||||
|
self.assertNotBackwardCompatible('interface F { A(); };',
|
||||||
|
'interface F { A() => (); };')
|
||||||
|
|
||||||
|
def testInterfaceMethodReplyRemoved(self):
|
||||||
|
"""Removing a reply from a message breaks backward-compatibility."""
|
||||||
|
self.assertNotBackwardCompatible('interface F { A() => (); };',
|
||||||
|
'interface F { A(); };')
|
||||||
|
|
||||||
|
def testInterfaceMethodReplyParamsChanged(self):
|
||||||
|
"""Similar to request parameters, a change to reply parameters is considered
|
||||||
|
backward-compatible if it meets the same backward-compatibility
|
||||||
|
requirements imposed on equivalent struct changes."""
|
||||||
|
self.assertNotBackwardCompatible('interface F { A() => (); };',
|
||||||
|
'interface F { A() => (int32 x); };')
|
||||||
|
self.assertNotBackwardCompatible('interface F { A() => (int32 x); };',
|
||||||
|
'interface F { A() => (); };')
|
||||||
|
self.assertNotBackwardCompatible('interface F { A() => (bool x); };',
|
||||||
|
'interface F { A() => (int32 x); };')
|
||||||
|
|
||||||
|
self.assertBackwardCompatible('interface F { A() => (int32 a); };',
|
||||||
|
'interface F { A() => (int32 x); };')
|
||||||
|
self.assertBackwardCompatible(
|
||||||
|
'interface F { A() => (int32 x); };',
|
||||||
|
'interface F { A() => (int32 x, [MinVersion] string? s); };')
|
||||||
|
|
||||||
|
def testNewInterfaceMethodWithInvalidMinVersion(self):
|
||||||
|
"""Adding a new method to an existing version is not backward-compatible."""
|
||||||
|
self.assertNotBackwardCompatible(
|
||||||
|
"""\
|
||||||
|
interface F {
|
||||||
|
A();
|
||||||
|
[MinVersion=1] B();
|
||||||
|
};
|
||||||
|
""", """\
|
||||||
|
interface F {
|
||||||
|
A();
|
||||||
|
[MinVersion=1] B();
|
||||||
|
[MinVersion=1] C();
|
||||||
|
};
|
||||||
|
""")
|
||||||
|
|
||||||
|
def testNewInterfaceMethodWithValidMinVersion(self):
|
||||||
|
"""Adding a new method is fine as long as its MinVersion exceeds that of any
|
||||||
|
method on the old interface definition."""
|
||||||
|
self.assertBackwardCompatible('interface F { A(); };',
|
||||||
|
'interface F { A(); [MinVersion=1] B(); };')
|
28
utils/ipc/mojo/public/tools/run_all_python_unittests.py
Executable file
28
utils/ipc/mojo/public/tools/run_all_python_unittests.py
Executable file
|
@ -0,0 +1,28 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2020 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
_TOOLS_DIR = os.path.dirname(__file__)
|
||||||
|
_MOJOM_DIR = os.path.join(_TOOLS_DIR, 'mojom')
|
||||||
|
_SRC_DIR = os.path.join(_TOOLS_DIR, os.path.pardir, os.path.pardir,
|
||||||
|
os.path.pardir)
|
||||||
|
|
||||||
|
# Ensure that the mojom library is discoverable.
|
||||||
|
sys.path.append(_MOJOM_DIR)
|
||||||
|
|
||||||
|
# Help Python find typ in //third_party/catapult/third_party/typ/
|
||||||
|
sys.path.append(
|
||||||
|
os.path.join(_SRC_DIR, 'third_party', 'catapult', 'third_party', 'typ'))
|
||||||
|
import typ
|
||||||
|
|
||||||
|
|
||||||
|
def Main():
|
||||||
|
return typ.main(top_level_dir=_MOJOM_DIR)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(Main())
|
51
utils/ipc/tools/diagnosis/crbug_1001171.py
Normal file
51
utils/ipc/tools/diagnosis/crbug_1001171.py
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
# Copyright 2019 The Chromium Authors. All rights reserved.
|
||||||
|
# Use of this source code is governed by a BSD-style license that can be
|
||||||
|
# found in the LICENSE file.
|
||||||
|
|
||||||
|
"""Helper context wrapper for diagnosing crbug.com/1001171.
|
||||||
|
|
||||||
|
This module and all uses thereof can and should be removed once
|
||||||
|
crbug.com/1001171 has been resolved.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def DumpStateOnLookupError():
|
||||||
|
"""Prints potentially useful state info in the event of a LookupError."""
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except LookupError:
|
||||||
|
print('LookupError diagnosis for crbug.com/1001171:')
|
||||||
|
for path_index, path_entry in enumerate(sys.path):
|
||||||
|
desc = 'unknown'
|
||||||
|
if not os.path.exists(path_entry):
|
||||||
|
desc = 'missing'
|
||||||
|
elif os.path.islink(path_entry):
|
||||||
|
desc = 'link -> %s' % os.path.realpath(path_entry)
|
||||||
|
elif os.path.isfile(path_entry):
|
||||||
|
desc = 'file'
|
||||||
|
elif os.path.isdir(path_entry):
|
||||||
|
desc = 'dir'
|
||||||
|
print(' sys.path[%d]: %s (%s)' % (path_index, path_entry, desc))
|
||||||
|
|
||||||
|
real_path_entry = os.path.realpath(path_entry)
|
||||||
|
if (path_entry.endswith(os.path.join('lib', 'python2.7'))
|
||||||
|
and os.path.isdir(real_path_entry)):
|
||||||
|
encodings_dir = os.path.realpath(
|
||||||
|
os.path.join(real_path_entry, 'encodings'))
|
||||||
|
if os.path.exists(encodings_dir):
|
||||||
|
if os.path.isdir(encodings_dir):
|
||||||
|
print(' %s contents: %s' % (encodings_dir,
|
||||||
|
str(os.listdir(encodings_dir))))
|
||||||
|
else:
|
||||||
|
print(' %s exists but is not a directory' % encodings_dir)
|
||||||
|
else:
|
||||||
|
print(' %s missing' % encodings_dir)
|
||||||
|
|
||||||
|
raise
|
Loading…
Add table
Add a link
Reference in a new issue