Commit e350e84a authored by Johannes Henkel's avatar Johannes Henkel Committed by Commit Bot

[DevTools] Roll inspector_protocol (V8).

New Revision: d48ba2079ffcdaf2d99f4153127aab6dbe32a954

Change-Id: Idde7388b4f92492609c1714fc003ec3234c8bf82
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1686451
Auto-Submit: Johannes Henkel <johannes@chromium.org>
Reviewed-by: 's avatarAlexei Filippov <alph@chromium.org>
Commit-Queue: Alexei Filippov <alph@chromium.org>
Cr-Commit-Position: refs/heads/master@{#62503}
parent 39eab44d
......@@ -15,8 +15,8 @@ https://cs.chromium.org/chromium/src/v8/third_party/inspector_protocol/
See also [Contributing to Chrome Devtools Protocol](https://docs.google.com/document/d/1c-COD2kaK__5iMM5SEx-PzNA7HFmgttcYfOHHX0HaOM/edit).
We're working on enabling standalone builds for parts of this package for
testing and development, please feel free to ignore this for now.
But, if you're familiar with
testing and development.
If you're familiar with
[Chromium's development process](https://www.chromium.org/developers/contributing-code)
and have the depot_tools installed, you may use these commands
to fetch the package (and dependencies) and build and run the tests:
......@@ -24,8 +24,9 @@ to fetch the package (and dependencies) and build and run the tests:
fetch inspector_protocol
cd src
gn gen out/Release
ninja -C out/Release json_parser_test
out/Release/json_parser_test
ninja -C out/Release encoding_test bindings_test
out/Release/encoding_test
out/Release/bindings_test
You'll probably also need to install g++, since Clang uses this to find the
standard C++ headers. E.g.,
......
......@@ -2,7 +2,7 @@ Name: inspector protocol
Short Name: inspector_protocol
URL: https://chromium.googlesource.com/deps/inspector_protocol/
Version: 0
Revision: aec57d43b6a2c41c37fb0a2507108e89a9342177
Revision: 373efb7fe33a7ae84038868ed08b9f1bd328b55d
License: BSD
License File: LICENSE
Security Critical: no
......
......@@ -38,8 +38,10 @@ def read_config():
# pylint: disable=W0703
def json_to_object(data, output_base, config_base):
def json_object_hook(object_dict):
items = [(k, os.path.join(config_base, v) if k == "path" else v) for (k, v) in object_dict.items()]
items = [(k, os.path.join(output_base, v) if k == "output" else v) for (k, v) in items]
items = [(k, os.path.join(config_base, v) if k == "path" else v)
for (k, v) in object_dict.items()]
items = [(k, os.path.join(output_base, v) if k == "output" else v)
for (k, v) in items]
keys, values = list(zip(*items))
return collections.namedtuple('X', keys)(*values)
return json.loads(data, object_hook=json_object_hook)
......@@ -85,7 +87,8 @@ def read_config():
try:
config_json_file = open(config_file, "r")
config_json_string = config_json_file.read()
config_partial = json_to_object(config_json_string, output_base, config_base)
config_partial = json_to_object(config_json_string, output_base,
config_base)
config_json_file.close()
defaults = {
".use_snake_file_names": False,
......@@ -109,10 +112,12 @@ def read_config():
# The encoding lib consists of encoding/encoding.h and
# encoding/encoding.cc in its subdirectory, which binaries
# must link / depend on.
".encoding_lib.header": os.path.join(inspector_protocol_dir, "encoding/encoding.h"),
".encoding_lib.header": os.path.join(inspector_protocol_dir,
"encoding/encoding.h"),
".encoding_lib.namespace": "",
# Ditto for bindings, see bindings/bindings.h.
".bindings_lib.header": os.path.join(inspector_protocol_dir, "bindings/bindings.h"),
".bindings_lib.header": os.path.join(inspector_protocol_dir,
"bindings/bindings.h"),
".bindings_lib.namespace": ""
}
for key_value in config_values:
......@@ -170,7 +175,8 @@ def format_include(config, header, file_name=None):
def format_domain_include(config, header, file_name):
return format_include(config, header, config.protocol.file_name_prefix + file_name)
return format_include(config, header,
config.protocol.file_name_prefix + file_name)
def to_file_name(config, file_name):
......@@ -195,7 +201,10 @@ def initialize_jinja_env(jinja_dir, cache_dir, config):
keep_trailing_newline=True, # newline-terminate generated files
lstrip_blocks=True, # so can indent control flow tags
trim_blocks=True)
jinja_env.filters.update({"to_title_case": to_title_case, "dash_to_camelcase": dash_to_camelcase, "to_method_case": functools.partial(to_method_case, config)})
jinja_env.filters.update({
"to_title_case": to_title_case,
"dash_to_camelcase": dash_to_camelcase,
"to_method_case": functools.partial(to_method_case, config)})
jinja_env.add_extension("jinja2.ext.loopcontrols")
return jinja_env
......@@ -203,23 +212,31 @@ def initialize_jinja_env(jinja_dir, cache_dir, config):
def create_imported_type_definition(domain_name, type, imported_namespace):
# pylint: disable=W0622
return {
"return_type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
"pass_type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
"return_type": "std::unique_ptr<%s::%s::API::%s>" % (
imported_namespace, domain_name, type["id"]),
"pass_type": "std::unique_ptr<%s::%s::API::%s>" % (
imported_namespace, domain_name, type["id"]),
"to_raw_type": "%s.get()",
"to_pass_type": "std::move(%s)",
"to_rvalue": "std::move(%s)",
"type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
"raw_type": "%s::%s::API::%s" % (imported_namespace, domain_name, type["id"]),
"raw_pass_type": "%s::%s::API::%s*" % (imported_namespace, domain_name, type["id"]),
"raw_return_type": "%s::%s::API::%s*" % (imported_namespace, domain_name, type["id"]),
"type": "std::unique_ptr<%s::%s::API::%s>" % (
imported_namespace, domain_name, type["id"]),
"raw_type": "%s::%s::API::%s" % (
imported_namespace, domain_name, type["id"]),
"raw_pass_type": "%s::%s::API::%s*" % (
imported_namespace, domain_name, type["id"]),
"raw_return_type": "%s::%s::API::%s*" % (
imported_namespace, domain_name, type["id"]),
}
def create_user_type_definition(domain_name, type):
# pylint: disable=W0622
return {
"return_type": "std::unique_ptr<protocol::%s::%s>" % (domain_name, type["id"]),
"pass_type": "std::unique_ptr<protocol::%s::%s>" % (domain_name, type["id"]),
"return_type": "std::unique_ptr<protocol::%s::%s>" % (
domain_name, type["id"]),
"pass_type": "std::unique_ptr<protocol::%s::%s>" % (
domain_name, type["id"]),
"to_raw_type": "%s.get()",
"to_pass_type": "std::move(%s)",
"to_rvalue": "std::move(%s)",
......@@ -338,6 +355,7 @@ def wrap_array_definition(type):
class Protocol(object):
def __init__(self, config):
self.config = config
self.json_api = {"domains": []}
......@@ -347,24 +365,26 @@ class Protocol(object):
if config.protocol.options:
self.generate_domains = [rule.domain for rule in config.protocol.options]
self.exported_domains = [rule.domain for rule in config.protocol.options if hasattr(rule, "exported")]
self.exported_domains = [rule.domain for rule in config.protocol.options
if hasattr(rule, "exported")]
if config.imported:
self.imported_domains = self.read_protocol_file(config.imported.path)
if config.imported.options:
self.imported_domains = [rule.domain for rule in config.imported.options]
self.imported_domains = [rule.domain
for rule in config.imported.options]
self.patch_full_qualified_refs()
self.create_notification_types()
self.create_type_definitions()
self.generate_used_types()
def read_protocol_file(self, file_name):
input_file = open(file_name, "r")
parsed_json = pdl.loads(input_file.read(), file_name)
input_file.close()
version = parsed_json["version"]["major"] + "." + parsed_json["version"]["minor"]
version = '%s.%s' % (parsed_json["version"]["major"],
parsed_json["version"]["minor"])
domains = []
for domain in parsed_json["domains"]:
domains.append(domain["domain"])
......@@ -372,7 +392,6 @@ class Protocol(object):
self.json_api["domains"] += parsed_json["domains"]
return domains
def patch_full_qualified_refs(self):
def patch_full_qualified_refs_in_domain(json, domain_name):
if isinstance(json, list):
......@@ -393,7 +412,6 @@ class Protocol(object):
for domain in self.json_api["domains"]:
patch_full_qualified_refs_in_domain(domain, domain["domain"])
def all_references(self, json):
refs = set()
if isinstance(json, list):
......@@ -420,7 +438,8 @@ class Protocol(object):
for event in domain["events"]:
if self.generate_event(domain_name, event["name"]):
all_refs |= self.all_references(event)
all_refs.add(domain_name + "." + to_title_case(event["name"]) + "Notification")
all_refs.add('%s.%sNotification' % (domain_name,
to_title_case(event["name"])))
dependencies = self.generate_type_dependencies()
queue = set(all_refs)
......@@ -431,7 +450,6 @@ class Protocol(object):
all_refs |= dependencies[ref]
self.used_types = all_refs
def generate_type_dependencies(self):
dependencies = dict()
domains_with_types = (x for x in self.json_api["domains"] if "types" in x)
......@@ -443,7 +461,6 @@ class Protocol(object):
dependencies[domain_name + "." + type["id"]] = related_types
return dependencies
def create_notification_types(self):
for domain in self.json_api["domains"]:
if "events" in domain:
......@@ -458,9 +475,10 @@ class Protocol(object):
domain["types"] = list()
domain["types"].append(event_type)
def create_type_definitions(self):
imported_namespace = "::".join(self.config.imported.namespace) if self.config.imported else ""
imported_namespace = ""
if self.config.imported:
imported_namespace = "::".join(self.config.imported.namespace)
self.type_definitions = {}
self.type_definitions["number"] = create_primitive_type_definition("number")
self.type_definitions["integer"] = create_primitive_type_definition("integer")
......@@ -469,16 +487,20 @@ class Protocol(object):
self.type_definitions["any"] = create_any_type_definition()
self.type_definitions["binary"] = create_binary_type_definition()
for domain in self.json_api["domains"]:
self.type_definitions[domain["domain"] + ".string"] = create_string_type_definition()
self.type_definitions[domain["domain"] + ".binary"] = create_binary_type_definition()
self.type_definitions[domain["domain"] + ".string"] = (
create_string_type_definition())
self.type_definitions[domain["domain"] + ".binary"] = (
create_binary_type_definition())
if not ("types" in domain):
continue
for type in domain["types"]:
type_name = domain["domain"] + "." + type["id"]
if type["type"] == "object" and domain["domain"] in self.imported_domains:
self.type_definitions[type_name] = create_imported_type_definition(domain["domain"], type, imported_namespace)
self.type_definitions[type_name] = create_imported_type_definition(
domain["domain"], type, imported_namespace)
elif type["type"] == "object":
self.type_definitions[type_name] = create_user_type_definition(domain["domain"], type)
self.type_definitions[type_name] = create_user_type_definition(
domain["domain"], type)
elif type["type"] == "array":
self.type_definitions[type_name] = self.resolve_type(type)
elif type["type"] == domain["domain"] + ".string":
......@@ -486,10 +508,11 @@ class Protocol(object):
elif type["type"] == domain["domain"] + ".binary":
self.type_definitions[type_name] = create_binary_type_definition()
else:
self.type_definitions[type_name] = create_primitive_type_definition(type["type"])
self.type_definitions[type_name] = create_primitive_type_definition(
type["type"])
def check_options(self, options, domain, name, include_attr, exclude_attr, default):
def check_options(self, options, domain, name, include_attr, exclude_attr,
default):
for rule in options:
if rule.domain != domain:
continue
......@@ -503,11 +526,9 @@ class Protocol(object):
# ---- Begin of methods exposed to generator
def type_definition(self, name):
return self.type_definitions[name]
def resolve_type(self, prop):
if "$ref" in prop:
return self.type_definitions[prop["$ref"]]
......@@ -515,56 +536,53 @@ class Protocol(object):
return wrap_array_definition(self.resolve_type(prop["items"]))
return self.type_definitions[prop["type"]]
def generate_command(self, domain, command):
if not self.config.protocol.options:
return domain in self.generate_domains
return self.check_options(self.config.protocol.options, domain, command, "include", "exclude", True)
return self.check_options(self.config.protocol.options, domain, command,
"include", "exclude", True)
def generate_event(self, domain, event):
if not self.config.protocol.options:
return domain in self.generate_domains
return self.check_options(self.config.protocol.options, domain, event, "include_events", "exclude_events", True)
return self.check_options(self.config.protocol.options, domain, event,
"include_events", "exclude_events", True)
def generate_type(self, domain, typename):
return domain + "." + typename in self.used_types
def is_async_command(self, domain, command):
if not self.config.protocol.options:
return False
return self.check_options(self.config.protocol.options, domain, command, "async", None, False)
return self.check_options(self.config.protocol.options, domain, command,
"async", None, False)
def is_exported(self, domain, name):
if not self.config.protocol.options:
return False
return self.check_options(self.config.protocol.options, domain, name, "exported", None, False)
return self.check_options(self.config.protocol.options, domain, name,
"exported", None, False)
def is_imported(self, domain, name):
if not self.config.imported:
return False
if not self.config.imported.options:
return domain in self.imported_domains
return self.check_options(self.config.imported.options, domain, name, "imported", None, False)
return self.check_options(self.config.imported.options, domain, name,
"imported", None, False)
def is_exported_domain(self, domain):
return domain in self.exported_domains
def generate_disable(self, domain):
if "commands" not in domain:
return True
for command in domain["commands"]:
if command["name"] == "disable" and self.generate_command(domain["domain"], "disable"):
if command["name"] == "disable" and self.generate_command(
domain["domain"], "disable"):
return False
return True
def is_imported_dependency(self, domain):
return domain in self.generate_domains or domain in self.imported_domains
......@@ -575,12 +593,14 @@ def main():
protocol = Protocol(config)
if not config.exported and len(protocol.exported_domains):
sys.stderr.write("Domains [%s] are exported, but config is missing export entry\n\n" % ", ".join(protocol.exported_domains))
sys.stderr.write(("Domains [%s] are exported, but config is missing export "
"entry\n\n") % ", ".join(protocol.exported_domains))
exit(1)
if not os.path.exists(config.protocol.output):
os.mkdir(config.protocol.output)
if len(protocol.exported_domains) and not os.path.exists(config.exported.output):
if len(protocol.exported_domains) and not os.path.exists(
config.exported.output):
os.mkdir(config.exported.output)
jinja_env = initialize_jinja_env(jinja_dir, config.protocol.output, config)
......@@ -616,12 +636,18 @@ def main():
}
if domain["domain"] in protocol.generate_domains:
outputs[os.path.join(config.protocol.output, to_file_name(config, file_name + ".h"))] = h_template.render(template_context)
outputs[os.path.join(config.protocol.output, to_file_name(config, file_name + ".cpp"))] = cpp_template.render(template_context)
outputs[os.path.join(config.protocol.output, to_file_name(
config, file_name + ".h"))] = h_template.render(template_context)
outputs[os.path.join(config.protocol.output, to_file_name(
config, file_name + ".cpp"))] = cpp_template.render(template_context)
if domain["domain"] in protocol.exported_domains:
outputs[os.path.join(config.exported.output, to_file_name(config, file_name + ".h"))] = exported_template.render(template_context)
outputs[os.path.join(config.exported.output, to_file_name(
config, file_name + ".h"))] = exported_template.render(
template_context)
if domain["domain"] in protocol.imported_domains:
outputs[os.path.join(config.protocol.output, to_file_name(config, file_name + ".h"))] = imported_template.render(template_context)
outputs[os.path.join(config.protocol.output, to_file_name(
config, file_name + ".h"))] = imported_template.render(
template_context)
if config.lib:
template_context = {
......@@ -671,17 +697,23 @@ def main():
parts.append(template.render(template_context))
outputs[file_name] = "\n\n".join(parts)
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Forward.h")), forward_h_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Protocol.h")), protocol_h_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Protocol.cpp")), protocol_cpp_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "base_string_adapter.h")), base_string_adapter_h_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "base_string_adapter.cc")), base_string_adapter_cc_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(
config, "Forward.h")), forward_h_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(
config, "Protocol.h")), protocol_h_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(
config, "Protocol.cpp")), protocol_cpp_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(
config, "base_string_adapter.h")), base_string_adapter_h_templates)
generate_lib_file(os.path.join(config.lib.output, to_file_name(
config, "base_string_adapter.cc")), base_string_adapter_cc_templates)
# Make gyp / make generatos happy, otherwise make rebuilds world.
inputs_ts = max(map(os.path.getmtime, inputs))
up_to_date = True
for output_file in outputs.keys():
if not os.path.exists(output_file) or os.path.getmtime(output_file) < inputs_ts:
if (not os.path.exists(output_file)
or os.path.getmtime(output_file) < inputs_ts):
up_to_date = False
break
if up_to_date:
......@@ -693,4 +725,5 @@ def main():
out_file.close()
main()
if __name__ == "__main__":
main()
......@@ -15,7 +15,9 @@ import pdl
def main(argv):
if len(argv) < 1:
sys.stderr.write("Usage: %s <protocol-1> [<protocol-2> [, <protocol-3>...]] <output-file>\n" % sys.argv[0])
sys.stderr.write(
"Usage: %s <protocol-1> [<protocol-2> [, <protocol-3>...]] "
"<output-file>\n" % sys.argv[0])
return 1
domains = []
......@@ -31,7 +33,8 @@ def main(argv):
version = parsed_json["version"]
output_file = open(argv[-1], "w")
json.dump({"version": version, "domains": domains}, output_file, indent=4, sort_keys=False, separators=(',', ': '))
json.dump({"version": version, "domains": domains}, output_file,
indent=4, sort_keys=False, separators=(',', ': '))
output_file.close()
......
......@@ -1370,7 +1370,7 @@ class JSONEncoder : public StreamingParserHandler {
// If we have enough bytes in our input, decode the remaining ones
// belonging to this Unicode character into |codepoint|.
if (ii + num_bytes_left > chars.size())
if (ii + num_bytes_left >= chars.size())
continue;
while (num_bytes_left > 0) {
c = chars[++ii];
......
......@@ -1366,6 +1366,32 @@ TEST(JsonEncoder, OverlongEncodings) {
EXPECT_EQ("\"\"", out); // Empty string means that 0x7f was rejected (good).
}
TEST(JsonEncoder, IncompleteUtf8Sequence) {
std::string out;
Status status;
std::unique_ptr<StreamingParserHandler> writer =
NewJSONEncoder(&GetTestPlatform(), &out, &status);
writer->HandleArrayBegin(); // This emits [, which starts an array.
{ // 🌎 takes four bytes to encode in UTF-8. We test with the first three;
// This means we're trying to emit a string that consists solely of an
// incomplete UTF-8 sequence. So the string in the JSON output is emtpy.
std::string world_utf8 = "🌎";
ASSERT_EQ(4u, world_utf8.size());
std::vector<uint8_t> chars(world_utf8.begin(), world_utf8.begin() + 3);
writer->HandleString8(SpanFrom(chars));
EXPECT_EQ("[\"\"", out); // Incomplete sequence rejected: empty string.
}
{ // This time, the incomplete sequence is at the end of the string.
std::string msg = "Hello, \xF0\x9F\x8C";
std::vector<uint8_t> chars(msg.begin(), msg.end());
writer->HandleString8(SpanFrom(chars));
EXPECT_EQ("[\"\",\"Hello, \"", out); // Incomplete sequence dropped at end.
}
}
TEST(JsonStdStringWriterTest, HelloWorld) {
std::string out;
Status status;
......
......@@ -12,7 +12,8 @@ import sys
description = ''
primitiveTypes = ['integer', 'number', 'boolean', 'string', 'object', 'any', 'array', 'binary']
primitiveTypes = ['integer', 'number', 'boolean', 'string', 'object',
'any', 'array', 'binary']
def assignType(item, type, is_array=False, map_binary_to_string=False):
......@@ -74,9 +75,11 @@ def parse(data, file_name, map_binary_to_string=False):
if len(trimLine) == 0:
continue
match = re.compile(r'^(experimental )?(deprecated )?domain (.*)').match(line)
match = re.compile(
r'^(experimental )?(deprecated )?domain (.*)').match(line)
if match:
domain = createItem({'domain' : match.group(3)}, match.group(1), match.group(2))
domain = createItem({'domain' : match.group(3)}, match.group(1),
match.group(2))
protocol['domains'].append(domain)
continue
......@@ -87,7 +90,8 @@ def parse(data, file_name, map_binary_to_string=False):
domain['dependencies'].append(match.group(1))
continue
match = re.compile(r'^ (experimental )?(deprecated )?type (.*) extends (array of )?([^\s]+)').match(line)
match = re.compile(r'^ (experimental )?(deprecated )?type (.*) '
r'extends (array of )?([^\s]+)').match(line)
if match:
if 'types' not in domain:
domain['types'] = []
......@@ -96,7 +100,8 @@ def parse(data, file_name, map_binary_to_string=False):
domain['types'].append(item)
continue
match = re.compile(r'^ (experimental )?(deprecated )?(command|event) (.*)').match(line)
match = re.compile(
r'^ (experimental )?(deprecated )?(command|event) (.*)').match(line)
if match:
list = []
if match.group(3) == 'command':
......@@ -114,7 +119,9 @@ def parse(data, file_name, map_binary_to_string=False):
list.append(item)
continue
match = re.compile(r'^ (experimental )?(deprecated )?(optional )?(array of )?([^\s]+) ([^\s]+)').match(line)
match = re.compile(
r'^ (experimental )?(deprecated )?(optional )?'
r'(array of )?([^\s]+) ([^\s]+)').match(line)
if match:
param = createItem({}, match.group(1), match.group(2), match.group(6))
if match.group(3):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment