Roll inspector_protocol to 0d4255502019144a5dec5669d7992165ae8924e7.
0d42555020
Change-Id: I3711883a4cff11f71cca10054e4aac11293f5293
Reviewed-on: https://chromium-review.googlesource.com/1139095
Reviewed-by: Dmitry Gozman <dgozman@chromium.org>
Commit-Queue: Johannes Henkel <johannes@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54503}
This commit is contained in:
parent
a8cb6a7218
commit
b102970c46
@ -1,479 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Inspector protocol validator.
|
||||
#
|
||||
# Tests that subsequent protocol changes are not breaking backwards compatibility.
|
||||
# Following violations are reported:
|
||||
#
|
||||
# - Domain has been removed
|
||||
# - Command has been removed
|
||||
# - Required command parameter was added or changed from optional
|
||||
# - Required response parameter was removed or changed to optional
|
||||
# - Event has been removed
|
||||
# - Required event parameter was removed or changed to optional
|
||||
# - Parameter type has changed.
|
||||
#
|
||||
# For the parameters with composite types the above checks are also applied
|
||||
# recursively to every property of the type.
|
||||
#
|
||||
# Adding --show_changes to the command line prints out a list of valid public API changes.
|
||||
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import copy
|
||||
import os.path
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
import check_protocol_compatibility
|
||||
|
||||
|
||||
def list_to_map(items, key):
|
||||
result = {}
|
||||
for item in items:
|
||||
if "experimental" not in item and "hidden" not in item:
|
||||
result[item[key]] = item
|
||||
return result
|
||||
|
||||
|
||||
def named_list_to_map(container, name, key):
|
||||
if name in container:
|
||||
return list_to_map(container[name], key)
|
||||
return {}
|
||||
|
||||
|
||||
def removed(reverse):
|
||||
if reverse:
|
||||
return "added"
|
||||
return "removed"
|
||||
|
||||
|
||||
def required(reverse):
|
||||
if reverse:
|
||||
return "optional"
|
||||
return "required"
|
||||
|
||||
|
||||
def compare_schemas(d_1, d_2, reverse):
|
||||
errors = []
|
||||
domains_1 = copy.deepcopy(d_1)
|
||||
domains_2 = copy.deepcopy(d_2)
|
||||
types_1 = normalize_types_in_schema(domains_1)
|
||||
types_2 = normalize_types_in_schema(domains_2)
|
||||
|
||||
domains_by_name_1 = list_to_map(domains_1, "domain")
|
||||
domains_by_name_2 = list_to_map(domains_2, "domain")
|
||||
|
||||
for name in domains_by_name_1:
|
||||
domain_1 = domains_by_name_1[name]
|
||||
if name not in domains_by_name_2:
|
||||
errors.append("%s: domain has been %s" % (name, removed(reverse)))
|
||||
continue
|
||||
compare_domains(domain_1, domains_by_name_2[name], types_1, types_2, errors, reverse)
|
||||
return errors
|
||||
|
||||
|
||||
def compare_domains(domain_1, domain_2, types_map_1, types_map_2, errors, reverse):
|
||||
domain_name = domain_1["domain"]
|
||||
commands_1 = named_list_to_map(domain_1, "commands", "name")
|
||||
commands_2 = named_list_to_map(domain_2, "commands", "name")
|
||||
for name in commands_1:
|
||||
command_1 = commands_1[name]
|
||||
if name not in commands_2:
|
||||
errors.append("%s.%s: command has been %s" % (domain_1["domain"], name, removed(reverse)))
|
||||
continue
|
||||
compare_commands(domain_name, command_1, commands_2[name], types_map_1, types_map_2, errors, reverse)
|
||||
|
||||
events_1 = named_list_to_map(domain_1, "events", "name")
|
||||
events_2 = named_list_to_map(domain_2, "events", "name")
|
||||
for name in events_1:
|
||||
event_1 = events_1[name]
|
||||
if name not in events_2:
|
||||
errors.append("%s.%s: event has been %s" % (domain_1["domain"], name, removed(reverse)))
|
||||
continue
|
||||
compare_events(domain_name, event_1, events_2[name], types_map_1, types_map_2, errors, reverse)
|
||||
|
||||
|
||||
def compare_commands(domain_name, command_1, command_2, types_map_1, types_map_2, errors, reverse):
|
||||
context = domain_name + "." + command_1["name"]
|
||||
|
||||
params_1 = named_list_to_map(command_1, "parameters", "name")
|
||||
params_2 = named_list_to_map(command_2, "parameters", "name")
|
||||
# Note the reversed order: we allow removing but forbid adding parameters.
|
||||
compare_params_list(context, "parameter", params_2, params_1, types_map_2, types_map_1, 0, errors, not reverse)
|
||||
|
||||
returns_1 = named_list_to_map(command_1, "returns", "name")
|
||||
returns_2 = named_list_to_map(command_2, "returns", "name")
|
||||
compare_params_list(context, "response parameter", returns_1, returns_2, types_map_1, types_map_2, 0, errors, reverse)
|
||||
|
||||
|
||||
def compare_events(domain_name, event_1, event_2, types_map_1, types_map_2, errors, reverse):
|
||||
context = domain_name + "." + event_1["name"]
|
||||
params_1 = named_list_to_map(event_1, "parameters", "name")
|
||||
params_2 = named_list_to_map(event_2, "parameters", "name")
|
||||
compare_params_list(context, "parameter", params_1, params_2, types_map_1, types_map_2, 0, errors, reverse)
|
||||
|
||||
|
||||
def compare_params_list(context, kind, params_1, params_2, types_map_1, types_map_2, depth, errors, reverse):
|
||||
for name in params_1:
|
||||
param_1 = params_1[name]
|
||||
if name not in params_2:
|
||||
if "optional" not in param_1:
|
||||
errors.append("%s.%s: required %s has been %s" % (context, name, kind, removed(reverse)))
|
||||
continue
|
||||
|
||||
param_2 = params_2[name]
|
||||
if param_2 and "optional" in param_2 and "optional" not in param_1:
|
||||
errors.append("%s.%s: %s %s is now %s" % (context, name, required(reverse), kind, required(not reverse)))
|
||||
continue
|
||||
type_1 = extract_type(param_1, types_map_1, errors)
|
||||
type_2 = extract_type(param_2, types_map_2, errors)
|
||||
compare_types(context + "." + name, kind, type_1, type_2, types_map_1, types_map_2, depth, errors, reverse)
|
||||
|
||||
|
||||
def compare_types(context, kind, type_1, type_2, types_map_1, types_map_2, depth, errors, reverse):
|
||||
if depth > 5:
|
||||
return
|
||||
|
||||
base_type_1 = type_1["type"]
|
||||
base_type_2 = type_2["type"]
|
||||
|
||||
if base_type_1 != base_type_2:
|
||||
errors.append("%s: %s base type mismatch, '%s' vs '%s'" % (context, kind, base_type_1, base_type_2))
|
||||
elif base_type_1 == "object":
|
||||
params_1 = named_list_to_map(type_1, "properties", "name")
|
||||
params_2 = named_list_to_map(type_2, "properties", "name")
|
||||
# If both parameters have the same named type use it in the context.
|
||||
if "id" in type_1 and "id" in type_2 and type_1["id"] == type_2["id"]:
|
||||
type_name = type_1["id"]
|
||||
else:
|
||||
type_name = "<object>"
|
||||
context += " %s->%s" % (kind, type_name)
|
||||
compare_params_list(context, "property", params_1, params_2, types_map_1, types_map_2, depth + 1, errors, reverse)
|
||||
elif base_type_1 == "array":
|
||||
item_type_1 = extract_type(type_1["items"], types_map_1, errors)
|
||||
item_type_2 = extract_type(type_2["items"], types_map_2, errors)
|
||||
compare_types(context, kind, item_type_1, item_type_2, types_map_1, types_map_2, depth + 1, errors, reverse)
|
||||
|
||||
|
||||
def extract_type(typed_object, types_map, errors):
|
||||
if "type" in typed_object:
|
||||
result = {"id": "<transient>", "type": typed_object["type"]}
|
||||
if typed_object["type"] == "object":
|
||||
result["properties"] = []
|
||||
elif typed_object["type"] == "array":
|
||||
result["items"] = typed_object["items"]
|
||||
return result
|
||||
elif "$ref" in typed_object:
|
||||
ref = typed_object["$ref"]
|
||||
if ref not in types_map:
|
||||
errors.append("Can not resolve type: %s" % ref)
|
||||
types_map[ref] = {"id": "<transient>", "type": "object"}
|
||||
return types_map[ref]
|
||||
|
||||
|
||||
def normalize_types_in_schema(domains):
|
||||
types = {}
|
||||
for domain in domains:
|
||||
domain_name = domain["domain"]
|
||||
normalize_types(domain, domain_name, types)
|
||||
return types
|
||||
|
||||
|
||||
def normalize_types(obj, domain_name, types):
|
||||
if isinstance(obj, list):
|
||||
for item in obj:
|
||||
normalize_types(item, domain_name, types)
|
||||
elif isinstance(obj, dict):
|
||||
for key, value in obj.items():
|
||||
if key == "$ref" and value.find(".") == -1:
|
||||
obj[key] = "%s.%s" % (domain_name, value)
|
||||
elif key == "id":
|
||||
obj[key] = "%s.%s" % (domain_name, value)
|
||||
types[obj[key]] = obj
|
||||
else:
|
||||
normalize_types(value, domain_name, types)
|
||||
|
||||
|
||||
def load_schema(file_name, domains):
|
||||
# pylint: disable=W0613
|
||||
if not os.path.isfile(file_name):
|
||||
return
|
||||
input_file = open(file_name, "r")
|
||||
json_string = input_file.read()
|
||||
parsed_json = json.loads(json_string)
|
||||
domains += parsed_json["domains"]
|
||||
return parsed_json["version"]
|
||||
|
||||
|
||||
def self_test():
|
||||
def create_test_schema_1():
|
||||
return [
|
||||
{
|
||||
"domain": "Network",
|
||||
"types": [
|
||||
{
|
||||
"id": "LoaderId",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"id": "Headers",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"id": "Request",
|
||||
"type": "object",
|
||||
"properties": [
|
||||
{"name": "url", "type": "string"},
|
||||
{"name": "method", "type": "string"},
|
||||
{"name": "headers", "$ref": "Headers"},
|
||||
{"name": "becameOptionalField", "type": "string"},
|
||||
{"name": "removedField", "type": "string"},
|
||||
]
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
{
|
||||
"name": "removedCommand",
|
||||
},
|
||||
{
|
||||
"name": "setExtraHTTPHeaders",
|
||||
"parameters": [
|
||||
{"name": "headers", "$ref": "Headers"},
|
||||
{"name": "mismatched", "type": "string"},
|
||||
{"name": "becameOptional", "$ref": "Headers"},
|
||||
{"name": "removedRequired", "$ref": "Headers"},
|
||||
{"name": "becameRequired", "$ref": "Headers", "optional": True},
|
||||
{"name": "removedOptional", "$ref": "Headers", "optional": True},
|
||||
],
|
||||
"returns": [
|
||||
{"name": "mimeType", "type": "string"},
|
||||
{"name": "becameOptional", "type": "string"},
|
||||
{"name": "removedRequired", "type": "string"},
|
||||
{"name": "becameRequired", "type": "string", "optional": True},
|
||||
{"name": "removedOptional", "type": "string", "optional": True},
|
||||
]
|
||||
}
|
||||
],
|
||||
"events": [
|
||||
{
|
||||
"name": "requestWillBeSent",
|
||||
"parameters": [
|
||||
{"name": "frameId", "type": "string", "experimental": True},
|
||||
{"name": "request", "$ref": "Request"},
|
||||
{"name": "becameOptional", "type": "string"},
|
||||
{"name": "removedRequired", "type": "string"},
|
||||
{"name": "becameRequired", "type": "string", "optional": True},
|
||||
{"name": "removedOptional", "type": "string", "optional": True},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "removedEvent",
|
||||
"parameters": [
|
||||
{"name": "errorText", "type": "string"},
|
||||
{"name": "canceled", "type": "boolean", "optional": True}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"domain": "removedDomain"
|
||||
}
|
||||
]
|
||||
|
||||
def create_test_schema_2():
|
||||
return [
|
||||
{
|
||||
"domain": "Network",
|
||||
"types": [
|
||||
{
|
||||
"id": "LoaderId",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"id": "Request",
|
||||
"type": "object",
|
||||
"properties": [
|
||||
{"name": "url", "type": "string"},
|
||||
{"name": "method", "type": "string"},
|
||||
{"name": "headers", "type": "object"},
|
||||
{"name": "becameOptionalField", "type": "string", "optional": True},
|
||||
]
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
{
|
||||
"name": "addedCommand",
|
||||
},
|
||||
{
|
||||
"name": "setExtraHTTPHeaders",
|
||||
"parameters": [
|
||||
{"name": "headers", "type": "object"},
|
||||
{"name": "mismatched", "type": "object"},
|
||||
{"name": "becameOptional", "type": "object", "optional": True},
|
||||
{"name": "addedRequired", "type": "object"},
|
||||
{"name": "becameRequired", "type": "object"},
|
||||
{"name": "addedOptional", "type": "object", "optional": True},
|
||||
],
|
||||
"returns": [
|
||||
{"name": "mimeType", "type": "string"},
|
||||
{"name": "becameOptional", "type": "string", "optional": True},
|
||||
{"name": "addedRequired", "type": "string"},
|
||||
{"name": "becameRequired", "type": "string"},
|
||||
{"name": "addedOptional", "type": "string", "optional": True},
|
||||
]
|
||||
}
|
||||
],
|
||||
"events": [
|
||||
{
|
||||
"name": "requestWillBeSent",
|
||||
"parameters": [
|
||||
{"name": "request", "$ref": "Request"},
|
||||
{"name": "becameOptional", "type": "string", "optional": True},
|
||||
{"name": "addedRequired", "type": "string"},
|
||||
{"name": "becameRequired", "type": "string"},
|
||||
{"name": "addedOptional", "type": "string", "optional": True},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "addedEvent"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"domain": "addedDomain"
|
||||
}
|
||||
]
|
||||
|
||||
expected_errors = [
|
||||
"removedDomain: domain has been removed",
|
||||
"Network.removedCommand: command has been removed",
|
||||
"Network.removedEvent: event has been removed",
|
||||
"Network.setExtraHTTPHeaders.mismatched: parameter base type mismatch, 'object' vs 'string'",
|
||||
"Network.setExtraHTTPHeaders.addedRequired: required parameter has been added",
|
||||
"Network.setExtraHTTPHeaders.becameRequired: optional parameter is now required",
|
||||
"Network.setExtraHTTPHeaders.removedRequired: required response parameter has been removed",
|
||||
"Network.setExtraHTTPHeaders.becameOptional: required response parameter is now optional",
|
||||
"Network.requestWillBeSent.removedRequired: required parameter has been removed",
|
||||
"Network.requestWillBeSent.becameOptional: required parameter is now optional",
|
||||
"Network.requestWillBeSent.request parameter->Network.Request.removedField: required property has been removed",
|
||||
"Network.requestWillBeSent.request parameter->Network.Request.becameOptionalField: required property is now optional",
|
||||
]
|
||||
|
||||
expected_errors_reverse = [
|
||||
"addedDomain: domain has been added",
|
||||
"Network.addedEvent: event has been added",
|
||||
"Network.addedCommand: command has been added",
|
||||
"Network.setExtraHTTPHeaders.mismatched: parameter base type mismatch, 'string' vs 'object'",
|
||||
"Network.setExtraHTTPHeaders.removedRequired: required parameter has been removed",
|
||||
"Network.setExtraHTTPHeaders.becameOptional: required parameter is now optional",
|
||||
"Network.setExtraHTTPHeaders.addedRequired: required response parameter has been added",
|
||||
"Network.setExtraHTTPHeaders.becameRequired: optional response parameter is now required",
|
||||
"Network.requestWillBeSent.becameRequired: optional parameter is now required",
|
||||
"Network.requestWillBeSent.addedRequired: required parameter has been added",
|
||||
]
|
||||
|
||||
def is_subset(subset, superset, message):
|
||||
for i in range(len(subset)):
|
||||
if subset[i] not in superset:
|
||||
sys.stderr.write("%s error: %s\n" % (message, subset[i]))
|
||||
return False
|
||||
return True
|
||||
|
||||
def errors_match(expected, actual):
|
||||
return (is_subset(actual, expected, "Unexpected") and
|
||||
is_subset(expected, actual, "Missing"))
|
||||
|
||||
return (errors_match(expected_errors,
|
||||
compare_schemas(create_test_schema_1(), create_test_schema_2(), False)) and
|
||||
errors_match(expected_errors_reverse,
|
||||
compare_schemas(create_test_schema_2(), create_test_schema_1(), True)))
|
||||
|
||||
|
||||
def load_domains_and_baselines(file_name, domains, baseline_domains):
|
||||
version = load_schema(os.path.normpath(file_name), domains)
|
||||
suffix = "-%s.%s.json" % (version["major"], version["minor"])
|
||||
baseline_file = file_name.replace(".json", suffix)
|
||||
load_schema(os.path.normpath(baseline_file), baseline_domains)
|
||||
return version
|
||||
|
||||
|
||||
def main():
|
||||
if not self_test():
|
||||
sys.stderr.write("Self-test failed")
|
||||
return 1
|
||||
|
||||
cmdline_parser = optparse.OptionParser()
|
||||
cmdline_parser.add_option("--show_changes")
|
||||
cmdline_parser.add_option("--expected_errors")
|
||||
cmdline_parser.add_option("--stamp")
|
||||
arg_options, arg_values = cmdline_parser.parse_args()
|
||||
|
||||
if len(arg_values) < 1:
|
||||
sys.stderr.write("Usage: %s [--show_changes] <protocol-1> [, <protocol-2>...]\n" % sys.argv[0])
|
||||
return 1
|
||||
|
||||
domains = []
|
||||
baseline_domains = []
|
||||
version = load_domains_and_baselines(arg_values[0], domains, baseline_domains)
|
||||
for dependency in arg_values[1:]:
|
||||
load_domains_and_baselines(dependency, domains, baseline_domains)
|
||||
|
||||
expected_errors = []
|
||||
if arg_options.expected_errors:
|
||||
expected_errors_file = open(arg_options.expected_errors, "r")
|
||||
expected_errors = json.loads(expected_errors_file.read())["errors"]
|
||||
expected_errors_file.close()
|
||||
|
||||
errors = compare_schemas(baseline_domains, domains, False)
|
||||
unexpected_errors = []
|
||||
for i in range(len(errors)):
|
||||
if errors[i] not in expected_errors:
|
||||
unexpected_errors.append(errors[i])
|
||||
if len(unexpected_errors) > 0:
|
||||
sys.stderr.write(" Compatibility checks FAILED\n")
|
||||
for error in unexpected_errors:
|
||||
sys.stderr.write(" %s\n" % error)
|
||||
return 1
|
||||
|
||||
if arg_options.show_changes:
|
||||
changes = compare_schemas(domains, baseline_domains, True)
|
||||
if len(changes) > 0:
|
||||
print " Public changes since %s:" % version
|
||||
for change in changes:
|
||||
print " %s" % change
|
||||
|
||||
if arg_options.stamp:
|
||||
with open(arg_options.stamp, 'a') as _:
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
sys.exit(check_protocol_compatibility.main())
|
||||
|
652
third_party/inspector_protocol/CodeGenerator.py
vendored
Normal file → Executable file
652
third_party/inspector_protocol/CodeGenerator.py
vendored
Normal file → Executable file
@ -1,654 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
import optparse
|
||||
import collections
|
||||
import functools
|
||||
import re
|
||||
import copy
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
# Path handling for libraries and templates
|
||||
# Paths have to be normalized because Jinja uses the exact template path to
|
||||
# determine the hash used in the cache filename, and we need a pre-caching step
|
||||
# to be concurrency-safe. Use absolute path because __file__ is absolute if
|
||||
# module is imported, and relative if executed directly.
|
||||
# If paths differ between pre-caching and individual file compilation, the cache
|
||||
# is regenerated, which causes a race condition and breaks concurrent build,
|
||||
# since some compile processes will try to read the partially written cache.
|
||||
module_path, module_filename = os.path.split(os.path.realpath(__file__))
|
||||
|
||||
def read_config():
|
||||
# pylint: disable=W0703
|
||||
def json_to_object(data, output_base, config_base):
|
||||
def json_object_hook(object_dict):
|
||||
items = [(k, os.path.join(config_base, v) if k == "path" else v) for (k, v) in object_dict.items()]
|
||||
items = [(k, os.path.join(output_base, v) if k == "output" else v) for (k, v) in items]
|
||||
keys, values = zip(*items)
|
||||
return collections.namedtuple('X', keys)(*values)
|
||||
return json.loads(data, object_hook=json_object_hook)
|
||||
|
||||
def init_defaults(config_tuple, path, defaults):
|
||||
keys = list(config_tuple._fields) # pylint: disable=E1101
|
||||
values = [getattr(config_tuple, k) for k in keys]
|
||||
for i in xrange(len(keys)):
|
||||
if hasattr(values[i], "_fields"):
|
||||
values[i] = init_defaults(values[i], path + "." + keys[i], defaults)
|
||||
for optional in defaults:
|
||||
if optional.find(path + ".") != 0:
|
||||
continue
|
||||
optional_key = optional[len(path) + 1:]
|
||||
if optional_key.find(".") == -1 and optional_key not in keys:
|
||||
keys.append(optional_key)
|
||||
values.append(defaults[optional])
|
||||
return collections.namedtuple('X', keys)(*values)
|
||||
|
||||
try:
|
||||
cmdline_parser = optparse.OptionParser()
|
||||
cmdline_parser.add_option("--output_base")
|
||||
cmdline_parser.add_option("--jinja_dir")
|
||||
cmdline_parser.add_option("--config")
|
||||
cmdline_parser.add_option("--config_value", action="append", type="string")
|
||||
arg_options, _ = cmdline_parser.parse_args()
|
||||
jinja_dir = arg_options.jinja_dir
|
||||
if not jinja_dir:
|
||||
raise Exception("jinja directory must be specified")
|
||||
jinja_dir = jinja_dir.decode('utf8')
|
||||
output_base = arg_options.output_base
|
||||
if not output_base:
|
||||
raise Exception("Base output directory must be specified")
|
||||
output_base = output_base.decode('utf8')
|
||||
config_file = arg_options.config
|
||||
if not config_file:
|
||||
raise Exception("Config file name must be specified")
|
||||
config_file = config_file.decode('utf8')
|
||||
config_base = os.path.dirname(config_file)
|
||||
config_values = arg_options.config_value
|
||||
if not config_values:
|
||||
config_values = []
|
||||
except Exception:
|
||||
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
|
||||
exc = sys.exc_info()[1]
|
||||
sys.stderr.write("Failed to parse command-line arguments: %s\n\n" % exc)
|
||||
exit(1)
|
||||
|
||||
try:
|
||||
config_json_file = open(config_file, "r")
|
||||
config_json_string = config_json_file.read()
|
||||
config_partial = json_to_object(config_json_string, output_base, config_base)
|
||||
config_json_file.close()
|
||||
defaults = {
|
||||
".use_snake_file_names": False,
|
||||
".use_title_case_methods": False,
|
||||
".imported": False,
|
||||
".imported.export_macro": "",
|
||||
".imported.export_header": False,
|
||||
".imported.header": False,
|
||||
".imported.package": False,
|
||||
".imported.options": False,
|
||||
".protocol.export_macro": "",
|
||||
".protocol.export_header": False,
|
||||
".protocol.options": False,
|
||||
".exported": False,
|
||||
".exported.export_macro": "",
|
||||
".exported.export_header": False,
|
||||
".lib": False,
|
||||
".lib.export_macro": "",
|
||||
".lib.export_header": False,
|
||||
}
|
||||
for key_value in config_values:
|
||||
parts = key_value.split("=")
|
||||
if len(parts) == 2:
|
||||
defaults["." + parts[0]] = parts[1]
|
||||
return (jinja_dir, config_file, init_defaults(config_partial, "", defaults))
|
||||
except Exception:
|
||||
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
|
||||
exc = sys.exc_info()[1]
|
||||
sys.stderr.write("Failed to parse config file: %s\n\n" % exc)
|
||||
exit(1)
|
||||
|
||||
|
||||
# ---- Begin of utilities exposed to generator ----
|
||||
|
||||
|
||||
def to_title_case(name):
|
||||
return name[:1].upper() + name[1:]
|
||||
|
||||
|
||||
def dash_to_camelcase(word):
|
||||
prefix = ""
|
||||
if word[0] == "-":
|
||||
prefix = "Negative"
|
||||
word = word[1:]
|
||||
return prefix + "".join(to_title_case(x) or "-" for x in word.split("-"))
|
||||
|
||||
|
||||
def to_snake_case(name):
|
||||
return re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", name, sys.maxint).lower()
|
||||
|
||||
|
||||
def to_method_case(config, name):
|
||||
if config.use_title_case_methods:
|
||||
return to_title_case(name)
|
||||
return name
|
||||
|
||||
|
||||
def join_arrays(dict, keys):
|
||||
result = []
|
||||
for key in keys:
|
||||
if key in dict:
|
||||
result += dict[key]
|
||||
return result
|
||||
|
||||
|
||||
def format_include(config, header, file_name=None):
|
||||
if file_name is not None:
|
||||
header = header + "/" + file_name + ".h"
|
||||
header = "\"" + header + "\"" if header[0] not in "<\"" else header
|
||||
if config.use_snake_file_names:
|
||||
header = to_snake_case(header)
|
||||
return header
|
||||
|
||||
|
||||
def to_file_name(config, file_name):
|
||||
if config.use_snake_file_names:
|
||||
return to_snake_case(file_name).replace(".cpp", ".cc")
|
||||
return file_name
|
||||
|
||||
|
||||
# ---- End of utilities exposed to generator ----
|
||||
|
||||
|
||||
def initialize_jinja_env(jinja_dir, cache_dir, config):
|
||||
# pylint: disable=F0401
|
||||
sys.path.insert(1, os.path.abspath(jinja_dir))
|
||||
import jinja2
|
||||
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(module_path),
|
||||
# Bytecode cache is not concurrency-safe unless pre-cached:
|
||||
# if pre-cached this is read-only, but writing creates a race condition.
|
||||
bytecode_cache=jinja2.FileSystemBytecodeCache(cache_dir),
|
||||
keep_trailing_newline=True, # newline-terminate generated files
|
||||
lstrip_blocks=True, # so can indent control flow tags
|
||||
trim_blocks=True)
|
||||
jinja_env.filters.update({"to_title_case": to_title_case, "dash_to_camelcase": dash_to_camelcase, "to_method_case": functools.partial(to_method_case, config)})
|
||||
jinja_env.add_extension("jinja2.ext.loopcontrols")
|
||||
return jinja_env
|
||||
|
||||
|
||||
def create_imported_type_definition(domain_name, type, imported_namespace):
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
|
||||
"pass_type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
|
||||
"raw_type": "%s::%s::API::%s" % (imported_namespace, domain_name, type["id"]),
|
||||
"raw_pass_type": "%s::%s::API::%s*" % (imported_namespace, domain_name, type["id"]),
|
||||
"raw_return_type": "%s::%s::API::%s*" % (imported_namespace, domain_name, type["id"]),
|
||||
}
|
||||
|
||||
|
||||
def create_user_type_definition(domain_name, type):
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<protocol::%s::%s>" % (domain_name, type["id"]),
|
||||
"pass_type": "std::unique_ptr<protocol::%s::%s>" % (domain_name, type["id"]),
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<protocol::%s::%s>" % (domain_name, type["id"]),
|
||||
"raw_type": "protocol::%s::%s" % (domain_name, type["id"]),
|
||||
"raw_pass_type": "protocol::%s::%s*" % (domain_name, type["id"]),
|
||||
"raw_return_type": "protocol::%s::%s*" % (domain_name, type["id"]),
|
||||
}
|
||||
|
||||
|
||||
def create_object_type_definition():
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<protocol::DictionaryValue>",
|
||||
"pass_type": "std::unique_ptr<protocol::DictionaryValue>",
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<protocol::DictionaryValue>",
|
||||
"raw_type": "protocol::DictionaryValue",
|
||||
"raw_pass_type": "protocol::DictionaryValue*",
|
||||
"raw_return_type": "protocol::DictionaryValue*",
|
||||
}
|
||||
|
||||
|
||||
def create_any_type_definition():
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<protocol::Value>",
|
||||
"pass_type": "std::unique_ptr<protocol::Value>",
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<protocol::Value>",
|
||||
"raw_type": "protocol::Value",
|
||||
"raw_pass_type": "protocol::Value*",
|
||||
"raw_return_type": "protocol::Value*",
|
||||
}
|
||||
|
||||
|
||||
def create_string_type_definition():
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "String",
|
||||
"pass_type": "const String&",
|
||||
"to_pass_type": "%s",
|
||||
"to_raw_type": "%s",
|
||||
"to_rvalue": "%s",
|
||||
"type": "String",
|
||||
"raw_type": "String",
|
||||
"raw_pass_type": "const String&",
|
||||
"raw_return_type": "String",
|
||||
}
|
||||
|
||||
|
||||
def create_primitive_type_definition(type):
|
||||
# pylint: disable=W0622
|
||||
typedefs = {
|
||||
"number": "double",
|
||||
"integer": "int",
|
||||
"boolean": "bool"
|
||||
}
|
||||
defaults = {
|
||||
"number": "0",
|
||||
"integer": "0",
|
||||
"boolean": "false"
|
||||
}
|
||||
jsontypes = {
|
||||
"number": "TypeDouble",
|
||||
"integer": "TypeInteger",
|
||||
"boolean": "TypeBoolean",
|
||||
}
|
||||
return {
|
||||
"return_type": typedefs[type],
|
||||
"pass_type": typedefs[type],
|
||||
"to_pass_type": "%s",
|
||||
"to_raw_type": "%s",
|
||||
"to_rvalue": "%s",
|
||||
"type": typedefs[type],
|
||||
"raw_type": typedefs[type],
|
||||
"raw_pass_type": typedefs[type],
|
||||
"raw_return_type": typedefs[type],
|
||||
"default_value": defaults[type]
|
||||
}
|
||||
|
||||
|
||||
def wrap_array_definition(type):
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<protocol::Array<%s>>" % type["raw_type"],
|
||||
"pass_type": "std::unique_ptr<protocol::Array<%s>>" % type["raw_type"],
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<protocol::Array<%s>>" % type["raw_type"],
|
||||
"raw_type": "protocol::Array<%s>" % type["raw_type"],
|
||||
"raw_pass_type": "protocol::Array<%s>*" % type["raw_type"],
|
||||
"raw_return_type": "protocol::Array<%s>*" % type["raw_type"],
|
||||
"out_type": "protocol::Array<%s>&" % type["raw_type"],
|
||||
}
|
||||
|
||||
|
||||
class Protocol(object):
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.json_api = {"domains": []}
|
||||
self.imported_domains = []
|
||||
self.exported_domains = []
|
||||
self.generate_domains = self.read_protocol_file(config.protocol.path)
|
||||
|
||||
if config.protocol.options:
|
||||
self.generate_domains = [rule.domain for rule in config.protocol.options]
|
||||
self.exported_domains = [rule.domain for rule in config.protocol.options if hasattr(rule, "exported")]
|
||||
|
||||
if config.imported:
|
||||
self.imported_domains = self.read_protocol_file(config.imported.path)
|
||||
if config.imported.options:
|
||||
self.imported_domains = [rule.domain for rule in config.imported.options]
|
||||
|
||||
self.patch_full_qualified_refs()
|
||||
self.create_notification_types()
|
||||
self.create_type_definitions()
|
||||
self.generate_used_types()
|
||||
|
||||
|
||||
def read_protocol_file(self, file_name):
|
||||
input_file = open(file_name, "r")
|
||||
json_string = input_file.read()
|
||||
input_file.close()
|
||||
parsed_json = json.loads(json_string)
|
||||
version = parsed_json["version"]["major"] + "." + parsed_json["version"]["minor"]
|
||||
domains = []
|
||||
for domain in parsed_json["domains"]:
|
||||
domains.append(domain["domain"])
|
||||
domain["version"] = version
|
||||
self.json_api["domains"] += parsed_json["domains"]
|
||||
return domains
|
||||
|
||||
|
||||
def patch_full_qualified_refs(self):
|
||||
def patch_full_qualified_refs_in_domain(json, domain_name):
|
||||
if isinstance(json, list):
|
||||
for item in json:
|
||||
patch_full_qualified_refs_in_domain(item, domain_name)
|
||||
if not isinstance(json, dict):
|
||||
return
|
||||
for key in json:
|
||||
if key == "type" and json[key] == "string":
|
||||
json[key] = domain_name + ".string"
|
||||
if key != "$ref":
|
||||
patch_full_qualified_refs_in_domain(json[key], domain_name)
|
||||
continue
|
||||
if json["$ref"].find(".") == -1:
|
||||
json["$ref"] = domain_name + "." + json["$ref"]
|
||||
return
|
||||
|
||||
for domain in self.json_api["domains"]:
|
||||
patch_full_qualified_refs_in_domain(domain, domain["domain"])
|
||||
|
||||
|
||||
def all_references(self, json):
|
||||
refs = set()
|
||||
if isinstance(json, list):
|
||||
for item in json:
|
||||
refs |= self.all_references(item)
|
||||
if not isinstance(json, dict):
|
||||
return refs
|
||||
for key in json:
|
||||
if key != "$ref":
|
||||
refs |= self.all_references(json[key])
|
||||
else:
|
||||
refs.add(json["$ref"])
|
||||
return refs
|
||||
|
||||
def generate_used_types(self):
|
||||
all_refs = set()
|
||||
for domain in self.json_api["domains"]:
|
||||
domain_name = domain["domain"]
|
||||
if "commands" in domain:
|
||||
for command in domain["commands"]:
|
||||
if self.generate_command(domain_name, command["name"]):
|
||||
all_refs |= self.all_references(command)
|
||||
if "events" in domain:
|
||||
for event in domain["events"]:
|
||||
if self.generate_event(domain_name, event["name"]):
|
||||
all_refs |= self.all_references(event)
|
||||
all_refs.add(domain_name + "." + to_title_case(event["name"]) + "Notification")
|
||||
|
||||
dependencies = self.generate_type_dependencies()
|
||||
queue = set(all_refs)
|
||||
while len(queue):
|
||||
ref = queue.pop()
|
||||
if ref in dependencies:
|
||||
queue |= dependencies[ref] - all_refs
|
||||
all_refs |= dependencies[ref]
|
||||
self.used_types = all_refs
|
||||
|
||||
|
||||
def generate_type_dependencies(self):
|
||||
dependencies = dict()
|
||||
domains_with_types = (x for x in self.json_api["domains"] if "types" in x)
|
||||
for domain in domains_with_types:
|
||||
domain_name = domain["domain"]
|
||||
for type in domain["types"]:
|
||||
related_types = self.all_references(type)
|
||||
if len(related_types):
|
||||
dependencies[domain_name + "." + type["id"]] = related_types
|
||||
return dependencies
|
||||
|
||||
|
||||
def create_notification_types(self):
|
||||
for domain in self.json_api["domains"]:
|
||||
if "events" in domain:
|
||||
for event in domain["events"]:
|
||||
event_type = dict()
|
||||
event_type["description"] = "Wrapper for notification params"
|
||||
event_type["type"] = "object"
|
||||
event_type["id"] = to_title_case(event["name"]) + "Notification"
|
||||
if "parameters" in event:
|
||||
event_type["properties"] = copy.deepcopy(event["parameters"])
|
||||
if "types" not in domain:
|
||||
domain["types"] = list()
|
||||
domain["types"].append(event_type)
|
||||
|
||||
|
||||
def create_type_definitions(self):
|
||||
imported_namespace = "::".join(self.config.imported.namespace) if self.config.imported else ""
|
||||
self.type_definitions = {}
|
||||
self.type_definitions["number"] = create_primitive_type_definition("number")
|
||||
self.type_definitions["integer"] = create_primitive_type_definition("integer")
|
||||
self.type_definitions["boolean"] = create_primitive_type_definition("boolean")
|
||||
self.type_definitions["object"] = create_object_type_definition()
|
||||
self.type_definitions["any"] = create_any_type_definition()
|
||||
for domain in self.json_api["domains"]:
|
||||
self.type_definitions[domain["domain"] + ".string"] = create_string_type_definition()
|
||||
if not ("types" in domain):
|
||||
continue
|
||||
for type in domain["types"]:
|
||||
type_name = domain["domain"] + "." + type["id"]
|
||||
if type["type"] == "object" and domain["domain"] in self.imported_domains:
|
||||
self.type_definitions[type_name] = create_imported_type_definition(domain["domain"], type, imported_namespace)
|
||||
elif type["type"] == "object":
|
||||
self.type_definitions[type_name] = create_user_type_definition(domain["domain"], type)
|
||||
elif type["type"] == "array":
|
||||
items_type = type["items"]["type"]
|
||||
self.type_definitions[type_name] = wrap_array_definition(self.type_definitions[items_type])
|
||||
elif type["type"] == domain["domain"] + ".string":
|
||||
self.type_definitions[type_name] = create_string_type_definition()
|
||||
else:
|
||||
self.type_definitions[type_name] = create_primitive_type_definition(type["type"])
|
||||
|
||||
|
||||
def check_options(self, options, domain, name, include_attr, exclude_attr, default):
|
||||
for rule in options:
|
||||
if rule.domain != domain:
|
||||
continue
|
||||
if include_attr and hasattr(rule, include_attr):
|
||||
return name in getattr(rule, include_attr)
|
||||
if exclude_attr and hasattr(rule, exclude_attr):
|
||||
return name not in getattr(rule, exclude_attr)
|
||||
return default
|
||||
return False
|
||||
|
||||
|
||||
# ---- Begin of methods exposed to generator
|
||||
|
||||
|
||||
def type_definition(self, name):
|
||||
return self.type_definitions[name]
|
||||
|
||||
|
||||
def resolve_type(self, prop):
|
||||
if "$ref" in prop:
|
||||
return self.type_definitions[prop["$ref"]]
|
||||
if prop["type"] == "array":
|
||||
return wrap_array_definition(self.resolve_type(prop["items"]))
|
||||
return self.type_definitions[prop["type"]]
|
||||
|
||||
|
||||
def generate_command(self, domain, command):
|
||||
if not self.config.protocol.options:
|
||||
return domain in self.generate_domains
|
||||
return self.check_options(self.config.protocol.options, domain, command, "include", "exclude", True)
|
||||
|
||||
|
||||
def generate_event(self, domain, event):
|
||||
if not self.config.protocol.options:
|
||||
return domain in self.generate_domains
|
||||
return self.check_options(self.config.protocol.options, domain, event, "include_events", "exclude_events", True)
|
||||
|
||||
|
||||
def generate_type(self, domain, typename):
|
||||
return domain + "." + typename in self.used_types
|
||||
|
||||
|
||||
def is_async_command(self, domain, command):
|
||||
if not self.config.protocol.options:
|
||||
return False
|
||||
return self.check_options(self.config.protocol.options, domain, command, "async", None, False)
|
||||
|
||||
|
||||
def is_exported(self, domain, name):
|
||||
if not self.config.protocol.options:
|
||||
return False
|
||||
return self.check_options(self.config.protocol.options, domain, name, "exported", None, False)
|
||||
|
||||
|
||||
def is_imported(self, domain, name):
|
||||
if not self.config.imported:
|
||||
return False
|
||||
if not self.config.imported.options:
|
||||
return domain in self.imported_domains
|
||||
return self.check_options(self.config.imported.options, domain, name, "imported", None, False)
|
||||
|
||||
|
||||
def is_exported_domain(self, domain):
|
||||
return domain in self.exported_domains
|
||||
|
||||
|
||||
def generate_disable(self, domain):
|
||||
if "commands" not in domain:
|
||||
return True
|
||||
for command in domain["commands"]:
|
||||
if command["name"] == "disable" and self.generate_command(domain["domain"], "disable"):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_imported_dependency(self, domain):
|
||||
return domain in self.generate_domains or domain in self.imported_domains
|
||||
|
||||
|
||||
def main():
|
||||
jinja_dir, config_file, config = read_config()
|
||||
|
||||
protocol = Protocol(config)
|
||||
|
||||
if not config.exported and len(protocol.exported_domains):
|
||||
sys.stderr.write("Domains [%s] are exported, but config is missing export entry\n\n" % ", ".join(protocol.exported_domains))
|
||||
exit(1)
|
||||
|
||||
if not os.path.exists(config.protocol.output):
|
||||
os.mkdir(config.protocol.output)
|
||||
if len(protocol.exported_domains) and not os.path.exists(config.exported.output):
|
||||
os.mkdir(config.exported.output)
|
||||
jinja_env = initialize_jinja_env(jinja_dir, config.protocol.output, config)
|
||||
|
||||
inputs = []
|
||||
inputs.append(__file__)
|
||||
inputs.append(config_file)
|
||||
inputs.append(config.protocol.path)
|
||||
if config.imported:
|
||||
inputs.append(config.imported.path)
|
||||
templates_dir = os.path.join(module_path, "templates")
|
||||
inputs.append(os.path.join(templates_dir, "TypeBuilder_h.template"))
|
||||
inputs.append(os.path.join(templates_dir, "TypeBuilder_cpp.template"))
|
||||
inputs.append(os.path.join(templates_dir, "Exported_h.template"))
|
||||
inputs.append(os.path.join(templates_dir, "Imported_h.template"))
|
||||
|
||||
h_template = jinja_env.get_template("templates/TypeBuilder_h.template")
|
||||
cpp_template = jinja_env.get_template("templates/TypeBuilder_cpp.template")
|
||||
exported_template = jinja_env.get_template("templates/Exported_h.template")
|
||||
imported_template = jinja_env.get_template("templates/Imported_h.template")
|
||||
|
||||
outputs = dict()
|
||||
|
||||
for domain in protocol.json_api["domains"]:
|
||||
class_name = domain["domain"]
|
||||
template_context = {
|
||||
"protocol": protocol,
|
||||
"config": config,
|
||||
"domain": domain,
|
||||
"join_arrays": join_arrays,
|
||||
"format_include": functools.partial(format_include, config),
|
||||
}
|
||||
|
||||
if domain["domain"] in protocol.generate_domains:
|
||||
outputs[os.path.join(config.protocol.output, to_file_name(config, class_name + ".h"))] = h_template.render(template_context)
|
||||
outputs[os.path.join(config.protocol.output, to_file_name(config, class_name + ".cpp"))] = cpp_template.render(template_context)
|
||||
if domain["domain"] in protocol.exported_domains:
|
||||
outputs[os.path.join(config.exported.output, to_file_name(config, class_name + ".h"))] = exported_template.render(template_context)
|
||||
if domain["domain"] in protocol.imported_domains:
|
||||
outputs[os.path.join(config.protocol.output, to_file_name(config, class_name + ".h"))] = imported_template.render(template_context)
|
||||
|
||||
if config.lib:
|
||||
template_context = {
|
||||
"config": config,
|
||||
"format_include": functools.partial(format_include, config),
|
||||
}
|
||||
|
||||
lib_templates_dir = os.path.join(module_path, "lib")
|
||||
# Note these should be sorted in the right order.
|
||||
# TODO(dgozman): sort them programmatically based on commented includes.
|
||||
lib_h_templates = [
|
||||
"Collections_h.template",
|
||||
"ErrorSupport_h.template",
|
||||
"Values_h.template",
|
||||
"Object_h.template",
|
||||
"ValueConversions_h.template",
|
||||
"Maybe_h.template",
|
||||
"Array_h.template",
|
||||
"DispatcherBase_h.template",
|
||||
"Parser_h.template",
|
||||
]
|
||||
|
||||
lib_cpp_templates = [
|
||||
"Protocol_cpp.template",
|
||||
"ErrorSupport_cpp.template",
|
||||
"Values_cpp.template",
|
||||
"Object_cpp.template",
|
||||
"DispatcherBase_cpp.template",
|
||||
"Parser_cpp.template",
|
||||
]
|
||||
|
||||
forward_h_templates = [
|
||||
"Forward_h.template",
|
||||
"Allocator_h.template",
|
||||
"FrontendChannel_h.template",
|
||||
]
|
||||
|
||||
def generate_lib_file(file_name, template_files):
|
||||
parts = []
|
||||
for template_file in template_files:
|
||||
inputs.append(os.path.join(lib_templates_dir, template_file))
|
||||
template = jinja_env.get_template("lib/" + template_file)
|
||||
parts.append(template.render(template_context))
|
||||
outputs[file_name] = "\n\n".join(parts)
|
||||
|
||||
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Forward.h")), forward_h_templates)
|
||||
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Protocol.h")), lib_h_templates)
|
||||
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Protocol.cpp")), lib_cpp_templates)
|
||||
|
||||
# Make gyp / make generatos happy, otherwise make rebuilds world.
|
||||
inputs_ts = max(map(os.path.getmtime, inputs))
|
||||
up_to_date = True
|
||||
for output_file in outputs.iterkeys():
|
||||
if not os.path.exists(output_file) or os.path.getmtime(output_file) < inputs_ts:
|
||||
up_to_date = False
|
||||
break
|
||||
if up_to_date:
|
||||
sys.exit()
|
||||
|
||||
for file_name, content in outputs.iteritems():
|
||||
out_file = open(file_name, "w")
|
||||
out_file.write(content)
|
||||
out_file.close()
|
||||
|
||||
|
||||
main()
|
||||
import code_generator
|
||||
|
183
third_party/inspector_protocol/ConvertProtocolToJSON.py
vendored
Normal file → Executable file
183
third_party/inspector_protocol/ConvertProtocolToJSON.py
vendored
Normal file → Executable file
@ -1,183 +1,10 @@
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import collections
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
file_name = None
|
||||
description = ''
|
||||
|
||||
primitiveTypes = ['integer', 'number', 'boolean', 'string', 'object', 'any', 'array']
|
||||
import convert_protocol_to_json
|
||||
|
||||
|
||||
def assignType(item, type, isArray=False):
|
||||
if isArray:
|
||||
item['type'] = 'array'
|
||||
item['items'] = collections.OrderedDict()
|
||||
assignType(item['items'], type)
|
||||
return
|
||||
|
||||
if type == 'enum':
|
||||
type = 'string'
|
||||
if type in primitiveTypes:
|
||||
item['type'] = type
|
||||
else:
|
||||
item['$ref'] = type
|
||||
|
||||
|
||||
def createItem(d, experimental, deprecated, name=None):
|
||||
result = collections.OrderedDict(d)
|
||||
if name:
|
||||
result['name'] = name
|
||||
global description
|
||||
if description:
|
||||
result['description'] = description.strip()
|
||||
if experimental:
|
||||
result['experimental'] = True
|
||||
if deprecated:
|
||||
result['deprecated'] = True
|
||||
return result
|
||||
|
||||
|
||||
def parse(data):
|
||||
protocol = collections.OrderedDict()
|
||||
protocol['version'] = collections.OrderedDict()
|
||||
protocol['domains'] = []
|
||||
domain = None
|
||||
item = None
|
||||
subitems = None
|
||||
nukeDescription = False
|
||||
global description
|
||||
lines = data.split('\n')
|
||||
for i in range(0, len(lines)):
|
||||
if nukeDescription:
|
||||
description = ''
|
||||
nukeDescription = False
|
||||
line = lines[i]
|
||||
trimLine = line.strip()
|
||||
|
||||
if trimLine.startswith('#'):
|
||||
if len(description):
|
||||
description += '\n'
|
||||
description += trimLine[2:]
|
||||
continue
|
||||
else:
|
||||
nukeDescription = True
|
||||
|
||||
if len(trimLine) == 0:
|
||||
continue
|
||||
|
||||
match = re.compile('^(experimental )?(deprecated )?domain (.*)').match(line)
|
||||
if match:
|
||||
domain = createItem({'domain' : match.group(3)}, match.group(1), match.group(2))
|
||||
protocol['domains'].append(domain)
|
||||
continue
|
||||
|
||||
match = re.compile('^ depends on ([^\s]+)').match(line)
|
||||
if match:
|
||||
if 'dependencies' not in domain:
|
||||
domain['dependencies'] = []
|
||||
domain['dependencies'].append(match.group(1))
|
||||
continue
|
||||
|
||||
match = re.compile('^ (experimental )?(deprecated )?type (.*) extends (array of )?([^\s]+)').match(line)
|
||||
if match:
|
||||
if 'types' not in domain:
|
||||
domain['types'] = []
|
||||
item = createItem({'id': match.group(3)}, match.group(1), match.group(2))
|
||||
assignType(item, match.group(5), match.group(4))
|
||||
domain['types'].append(item)
|
||||
continue
|
||||
|
||||
match = re.compile('^ (experimental )?(deprecated )?(command|event) (.*)').match(line)
|
||||
if match:
|
||||
list = []
|
||||
if match.group(3) == 'command':
|
||||
if 'commands' in domain:
|
||||
list = domain['commands']
|
||||
else:
|
||||
list = domain['commands'] = []
|
||||
else:
|
||||
if 'events' in domain:
|
||||
list = domain['events']
|
||||
else:
|
||||
list = domain['events'] = []
|
||||
|
||||
item = createItem({}, match.group(1), match.group(2), match.group(4))
|
||||
list.append(item)
|
||||
continue
|
||||
|
||||
match = re.compile('^ (experimental )?(deprecated )?(optional )?(array of )?([^\s]+) ([^\s]+)').match(line)
|
||||
if match:
|
||||
param = createItem({}, match.group(1), match.group(2), match.group(6))
|
||||
if match.group(3):
|
||||
param['optional'] = True
|
||||
assignType(param, match.group(5), match.group(4))
|
||||
if match.group(5) == 'enum':
|
||||
enumliterals = param['enum'] = []
|
||||
subitems.append(param)
|
||||
continue
|
||||
|
||||
match = re.compile('^ (parameters|returns|properties)').match(line)
|
||||
if match:
|
||||
subitems = item[match.group(1)] = []
|
||||
continue
|
||||
|
||||
match = re.compile('^ enum').match(line)
|
||||
if match:
|
||||
enumliterals = item['enum'] = []
|
||||
continue
|
||||
|
||||
match = re.compile('^version').match(line)
|
||||
if match:
|
||||
continue
|
||||
|
||||
match = re.compile('^ major (\d+)').match(line)
|
||||
if match:
|
||||
protocol['version']['major'] = match.group(1)
|
||||
continue
|
||||
|
||||
match = re.compile('^ minor (\d+)').match(line)
|
||||
if match:
|
||||
protocol['version']['minor'] = match.group(1)
|
||||
continue
|
||||
|
||||
match = re.compile('^ redirect ([^\s]+)').match(line)
|
||||
if match:
|
||||
item['redirect'] = match.group(1)
|
||||
continue
|
||||
|
||||
match = re.compile('^ ( )?[^\s]+$').match(line)
|
||||
if match:
|
||||
# enum literal
|
||||
enumliterals.append(trimLine)
|
||||
continue
|
||||
|
||||
print 'Error in %s:%s, illegal token: \t%s' % (file_name, i, line)
|
||||
sys.exit(1)
|
||||
return protocol
|
||||
|
||||
def main(argv):
|
||||
if len(argv) < 2:
|
||||
sys.stderr.write("Usage: %s <protocol.pdl> <protocol.json>\n" % sys.argv[0])
|
||||
return 1
|
||||
global file_name
|
||||
file_name = os.path.normpath(argv[0])
|
||||
input_file = open(file_name, "r")
|
||||
pdl_string = input_file.read()
|
||||
protocol = parse(pdl_string)
|
||||
output_file = open(argv[0].replace('.pdl', '.json'), 'wb')
|
||||
json.dump(protocol, output_file, indent=4, separators=(',', ': '))
|
||||
output_file.close()
|
||||
|
||||
output_file = open(os.path.normpath(argv[1]), 'wb')
|
||||
json.dump(protocol, output_file, indent=4, separators=(',', ': '))
|
||||
output_file.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
def main():
|
||||
convert_protocol_to_json.main()
|
||||
|
10
third_party/inspector_protocol/README.md
vendored
Normal file
10
third_party/inspector_protocol/README.md
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
# Chromium inspector (devtools) protocol
|
||||
|
||||
This package contains code generators and templates for the Chromium
|
||||
inspector protocol.
|
||||
|
||||
In the Chromium tree, it's rolled into
|
||||
https://cs.chromium.org/chromium/src/third_party/inspector_protocol/
|
||||
|
||||
In the V8 tree, it's rolled into
|
||||
https://cs.chromium.org/chromium/src/v8/third_party/inspector_protocol/
|
2
third_party/inspector_protocol/README.v8
vendored
2
third_party/inspector_protocol/README.v8
vendored
@ -2,7 +2,7 @@ Name: inspector protocol
|
||||
Short Name: inspector_protocol
|
||||
URL: https://chromium.googlesource.com/deps/inspector_protocol/
|
||||
Version: 0
|
||||
Revision: 752d4abd13119010cf30e454e8ef9b5fb7ef43a3
|
||||
Revision: 0d4255502019144a5dec5669d7992165ae8924e7
|
||||
License: BSD
|
||||
License File: LICENSE
|
||||
Security Critical: no
|
||||
|
482
third_party/inspector_protocol/check_protocol_compatibility.py
vendored
Executable file
482
third_party/inspector_protocol/check_protocol_compatibility.py
vendored
Executable file
@ -0,0 +1,482 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2011 Google Inc. All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
#
|
||||
# Inspector protocol validator.
|
||||
#
|
||||
# Tests that subsequent protocol changes are not breaking backwards compatibility.
|
||||
# Following violations are reported:
|
||||
#
|
||||
# - Domain has been removed
|
||||
# - Command has been removed
|
||||
# - Required command parameter was added or changed from optional
|
||||
# - Required response parameter was removed or changed to optional
|
||||
# - Event has been removed
|
||||
# - Required event parameter was removed or changed to optional
|
||||
# - Parameter type has changed.
|
||||
#
|
||||
# For the parameters with composite types the above checks are also applied
|
||||
# recursively to every property of the type.
|
||||
#
|
||||
# Adding --show_changes to the command line prints out a list of valid public API changes.
|
||||
|
||||
import copy
|
||||
import os.path
|
||||
import optparse
|
||||
import sys
|
||||
|
||||
import pdl
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
|
||||
def list_to_map(items, key):
|
||||
result = {}
|
||||
for item in items:
|
||||
if "experimental" not in item and "hidden" not in item:
|
||||
result[item[key]] = item
|
||||
return result
|
||||
|
||||
|
||||
def named_list_to_map(container, name, key):
|
||||
if name in container:
|
||||
return list_to_map(container[name], key)
|
||||
return {}
|
||||
|
||||
|
||||
def removed(reverse):
|
||||
if reverse:
|
||||
return "added"
|
||||
return "removed"
|
||||
|
||||
|
||||
def required(reverse):
|
||||
if reverse:
|
||||
return "optional"
|
||||
return "required"
|
||||
|
||||
|
||||
def compare_schemas(d_1, d_2, reverse):
|
||||
errors = []
|
||||
domains_1 = copy.deepcopy(d_1)
|
||||
domains_2 = copy.deepcopy(d_2)
|
||||
types_1 = normalize_types_in_schema(domains_1)
|
||||
types_2 = normalize_types_in_schema(domains_2)
|
||||
|
||||
domains_by_name_1 = list_to_map(domains_1, "domain")
|
||||
domains_by_name_2 = list_to_map(domains_2, "domain")
|
||||
|
||||
for name in domains_by_name_1:
|
||||
domain_1 = domains_by_name_1[name]
|
||||
if name not in domains_by_name_2:
|
||||
errors.append("%s: domain has been %s" % (name, removed(reverse)))
|
||||
continue
|
||||
compare_domains(domain_1, domains_by_name_2[name], types_1, types_2, errors, reverse)
|
||||
return errors
|
||||
|
||||
|
||||
def compare_domains(domain_1, domain_2, types_map_1, types_map_2, errors, reverse):
|
||||
domain_name = domain_1["domain"]
|
||||
commands_1 = named_list_to_map(domain_1, "commands", "name")
|
||||
commands_2 = named_list_to_map(domain_2, "commands", "name")
|
||||
for name in commands_1:
|
||||
command_1 = commands_1[name]
|
||||
if name not in commands_2:
|
||||
errors.append("%s.%s: command has been %s" % (domain_1["domain"], name, removed(reverse)))
|
||||
continue
|
||||
compare_commands(domain_name, command_1, commands_2[name], types_map_1, types_map_2, errors, reverse)
|
||||
|
||||
events_1 = named_list_to_map(domain_1, "events", "name")
|
||||
events_2 = named_list_to_map(domain_2, "events", "name")
|
||||
for name in events_1:
|
||||
event_1 = events_1[name]
|
||||
if name not in events_2:
|
||||
errors.append("%s.%s: event has been %s" % (domain_1["domain"], name, removed(reverse)))
|
||||
continue
|
||||
compare_events(domain_name, event_1, events_2[name], types_map_1, types_map_2, errors, reverse)
|
||||
|
||||
|
||||
def compare_commands(domain_name, command_1, command_2, types_map_1, types_map_2, errors, reverse):
|
||||
context = domain_name + "." + command_1["name"]
|
||||
|
||||
params_1 = named_list_to_map(command_1, "parameters", "name")
|
||||
params_2 = named_list_to_map(command_2, "parameters", "name")
|
||||
# Note the reversed order: we allow removing but forbid adding parameters.
|
||||
compare_params_list(context, "parameter", params_2, params_1, types_map_2, types_map_1, 0, errors, not reverse)
|
||||
|
||||
returns_1 = named_list_to_map(command_1, "returns", "name")
|
||||
returns_2 = named_list_to_map(command_2, "returns", "name")
|
||||
compare_params_list(context, "response parameter", returns_1, returns_2, types_map_1, types_map_2, 0, errors, reverse)
|
||||
|
||||
|
||||
def compare_events(domain_name, event_1, event_2, types_map_1, types_map_2, errors, reverse):
|
||||
context = domain_name + "." + event_1["name"]
|
||||
params_1 = named_list_to_map(event_1, "parameters", "name")
|
||||
params_2 = named_list_to_map(event_2, "parameters", "name")
|
||||
compare_params_list(context, "parameter", params_1, params_2, types_map_1, types_map_2, 0, errors, reverse)
|
||||
|
||||
|
||||
def compare_params_list(context, kind, params_1, params_2, types_map_1, types_map_2, depth, errors, reverse):
|
||||
for name in params_1:
|
||||
param_1 = params_1[name]
|
||||
if name not in params_2:
|
||||
if "optional" not in param_1:
|
||||
errors.append("%s.%s: required %s has been %s" % (context, name, kind, removed(reverse)))
|
||||
continue
|
||||
|
||||
param_2 = params_2[name]
|
||||
if param_2 and "optional" in param_2 and "optional" not in param_1:
|
||||
errors.append("%s.%s: %s %s is now %s" % (context, name, required(reverse), kind, required(not reverse)))
|
||||
continue
|
||||
type_1 = extract_type(param_1, types_map_1, errors)
|
||||
type_2 = extract_type(param_2, types_map_2, errors)
|
||||
compare_types(context + "." + name, kind, type_1, type_2, types_map_1, types_map_2, depth, errors, reverse)
|
||||
|
||||
|
||||
def compare_types(context, kind, type_1, type_2, types_map_1, types_map_2, depth, errors, reverse):
|
||||
if depth > 5:
|
||||
return
|
||||
|
||||
base_type_1 = type_1["type"]
|
||||
base_type_2 = type_2["type"]
|
||||
|
||||
if base_type_1 != base_type_2:
|
||||
errors.append("%s: %s base type mismatch, '%s' vs '%s'" % (context, kind, base_type_1, base_type_2))
|
||||
elif base_type_1 == "object":
|
||||
params_1 = named_list_to_map(type_1, "properties", "name")
|
||||
params_2 = named_list_to_map(type_2, "properties", "name")
|
||||
# If both parameters have the same named type use it in the context.
|
||||
if "id" in type_1 and "id" in type_2 and type_1["id"] == type_2["id"]:
|
||||
type_name = type_1["id"]
|
||||
else:
|
||||
type_name = "<object>"
|
||||
context += " %s->%s" % (kind, type_name)
|
||||
compare_params_list(context, "property", params_1, params_2, types_map_1, types_map_2, depth + 1, errors, reverse)
|
||||
elif base_type_1 == "array":
|
||||
item_type_1 = extract_type(type_1["items"], types_map_1, errors)
|
||||
item_type_2 = extract_type(type_2["items"], types_map_2, errors)
|
||||
compare_types(context, kind, item_type_1, item_type_2, types_map_1, types_map_2, depth + 1, errors, reverse)
|
||||
|
||||
|
||||
def extract_type(typed_object, types_map, errors):
|
||||
if "type" in typed_object:
|
||||
result = {"id": "<transient>", "type": typed_object["type"]}
|
||||
if typed_object["type"] == "object":
|
||||
result["properties"] = []
|
||||
elif typed_object["type"] == "array":
|
||||
result["items"] = typed_object["items"]
|
||||
return result
|
||||
elif "$ref" in typed_object:
|
||||
ref = typed_object["$ref"]
|
||||
if ref not in types_map:
|
||||
errors.append("Can not resolve type: %s" % ref)
|
||||
types_map[ref] = {"id": "<transient>", "type": "object"}
|
||||
return types_map[ref]
|
||||
|
||||
|
||||
def normalize_types_in_schema(domains):
|
||||
types = {}
|
||||
for domain in domains:
|
||||
domain_name = domain["domain"]
|
||||
normalize_types(domain, domain_name, types)
|
||||
return types
|
||||
|
||||
|
||||
def normalize_types(obj, domain_name, types):
|
||||
if isinstance(obj, list):
|
||||
for item in obj:
|
||||
normalize_types(item, domain_name, types)
|
||||
elif isinstance(obj, dict):
|
||||
for key, value in obj.items():
|
||||
if key == "$ref" and value.find(".") == -1:
|
||||
obj[key] = "%s.%s" % (domain_name, value)
|
||||
elif key == "id":
|
||||
obj[key] = "%s.%s" % (domain_name, value)
|
||||
types[obj[key]] = obj
|
||||
else:
|
||||
normalize_types(value, domain_name, types)
|
||||
|
||||
|
||||
def load_schema(file_name, domains):
|
||||
# pylint: disable=W0613
|
||||
if not os.path.isfile(file_name):
|
||||
return
|
||||
input_file = open(file_name, "r")
|
||||
parsed_json = pdl.loads(input_file.read(), file_name)
|
||||
input_file.close()
|
||||
domains += parsed_json["domains"]
|
||||
return parsed_json["version"]
|
||||
|
||||
|
||||
def self_test():
|
||||
def create_test_schema_1():
|
||||
return [
|
||||
{
|
||||
"domain": "Network",
|
||||
"types": [
|
||||
{
|
||||
"id": "LoaderId",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"id": "Headers",
|
||||
"type": "object"
|
||||
},
|
||||
{
|
||||
"id": "Request",
|
||||
"type": "object",
|
||||
"properties": [
|
||||
{"name": "url", "type": "string"},
|
||||
{"name": "method", "type": "string"},
|
||||
{"name": "headers", "$ref": "Headers"},
|
||||
{"name": "becameOptionalField", "type": "string"},
|
||||
{"name": "removedField", "type": "string"},
|
||||
]
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
{
|
||||
"name": "removedCommand",
|
||||
},
|
||||
{
|
||||
"name": "setExtraHTTPHeaders",
|
||||
"parameters": [
|
||||
{"name": "headers", "$ref": "Headers"},
|
||||
{"name": "mismatched", "type": "string"},
|
||||
{"name": "becameOptional", "$ref": "Headers"},
|
||||
{"name": "removedRequired", "$ref": "Headers"},
|
||||
{"name": "becameRequired", "$ref": "Headers", "optional": True},
|
||||
{"name": "removedOptional", "$ref": "Headers", "optional": True},
|
||||
],
|
||||
"returns": [
|
||||
{"name": "mimeType", "type": "string"},
|
||||
{"name": "becameOptional", "type": "string"},
|
||||
{"name": "removedRequired", "type": "string"},
|
||||
{"name": "becameRequired", "type": "string", "optional": True},
|
||||
{"name": "removedOptional", "type": "string", "optional": True},
|
||||
]
|
||||
}
|
||||
],
|
||||
"events": [
|
||||
{
|
||||
"name": "requestWillBeSent",
|
||||
"parameters": [
|
||||
{"name": "frameId", "type": "string", "experimental": True},
|
||||
{"name": "request", "$ref": "Request"},
|
||||
{"name": "becameOptional", "type": "string"},
|
||||
{"name": "removedRequired", "type": "string"},
|
||||
{"name": "becameRequired", "type": "string", "optional": True},
|
||||
{"name": "removedOptional", "type": "string", "optional": True},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "removedEvent",
|
||||
"parameters": [
|
||||
{"name": "errorText", "type": "string"},
|
||||
{"name": "canceled", "type": "boolean", "optional": True}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"domain": "removedDomain"
|
||||
}
|
||||
]
|
||||
|
||||
def create_test_schema_2():
|
||||
return [
|
||||
{
|
||||
"domain": "Network",
|
||||
"types": [
|
||||
{
|
||||
"id": "LoaderId",
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"id": "Request",
|
||||
"type": "object",
|
||||
"properties": [
|
||||
{"name": "url", "type": "string"},
|
||||
{"name": "method", "type": "string"},
|
||||
{"name": "headers", "type": "object"},
|
||||
{"name": "becameOptionalField", "type": "string", "optional": True},
|
||||
]
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
{
|
||||
"name": "addedCommand",
|
||||
},
|
||||
{
|
||||
"name": "setExtraHTTPHeaders",
|
||||
"parameters": [
|
||||
{"name": "headers", "type": "object"},
|
||||
{"name": "mismatched", "type": "object"},
|
||||
{"name": "becameOptional", "type": "object", "optional": True},
|
||||
{"name": "addedRequired", "type": "object"},
|
||||
{"name": "becameRequired", "type": "object"},
|
||||
{"name": "addedOptional", "type": "object", "optional": True},
|
||||
],
|
||||
"returns": [
|
||||
{"name": "mimeType", "type": "string"},
|
||||
{"name": "becameOptional", "type": "string", "optional": True},
|
||||
{"name": "addedRequired", "type": "string"},
|
||||
{"name": "becameRequired", "type": "string"},
|
||||
{"name": "addedOptional", "type": "string", "optional": True},
|
||||
]
|
||||
}
|
||||
],
|
||||
"events": [
|
||||
{
|
||||
"name": "requestWillBeSent",
|
||||
"parameters": [
|
||||
{"name": "request", "$ref": "Request"},
|
||||
{"name": "becameOptional", "type": "string", "optional": True},
|
||||
{"name": "addedRequired", "type": "string"},
|
||||
{"name": "becameRequired", "type": "string"},
|
||||
{"name": "addedOptional", "type": "string", "optional": True},
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "addedEvent"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"domain": "addedDomain"
|
||||
}
|
||||
]
|
||||
|
||||
expected_errors = [
|
||||
"removedDomain: domain has been removed",
|
||||
"Network.removedCommand: command has been removed",
|
||||
"Network.removedEvent: event has been removed",
|
||||
"Network.setExtraHTTPHeaders.mismatched: parameter base type mismatch, 'object' vs 'string'",
|
||||
"Network.setExtraHTTPHeaders.addedRequired: required parameter has been added",
|
||||
"Network.setExtraHTTPHeaders.becameRequired: optional parameter is now required",
|
||||
"Network.setExtraHTTPHeaders.removedRequired: required response parameter has been removed",
|
||||
"Network.setExtraHTTPHeaders.becameOptional: required response parameter is now optional",
|
||||
"Network.requestWillBeSent.removedRequired: required parameter has been removed",
|
||||
"Network.requestWillBeSent.becameOptional: required parameter is now optional",
|
||||
"Network.requestWillBeSent.request parameter->Network.Request.removedField: required property has been removed",
|
||||
"Network.requestWillBeSent.request parameter->Network.Request.becameOptionalField: required property is now optional",
|
||||
]
|
||||
|
||||
expected_errors_reverse = [
|
||||
"addedDomain: domain has been added",
|
||||
"Network.addedEvent: event has been added",
|
||||
"Network.addedCommand: command has been added",
|
||||
"Network.setExtraHTTPHeaders.mismatched: parameter base type mismatch, 'string' vs 'object'",
|
||||
"Network.setExtraHTTPHeaders.removedRequired: required parameter has been removed",
|
||||
"Network.setExtraHTTPHeaders.becameOptional: required parameter is now optional",
|
||||
"Network.setExtraHTTPHeaders.addedRequired: required response parameter has been added",
|
||||
"Network.setExtraHTTPHeaders.becameRequired: optional response parameter is now required",
|
||||
"Network.requestWillBeSent.becameRequired: optional parameter is now required",
|
||||
"Network.requestWillBeSent.addedRequired: required parameter has been added",
|
||||
]
|
||||
|
||||
def is_subset(subset, superset, message):
|
||||
for i in range(len(subset)):
|
||||
if subset[i] not in superset:
|
||||
sys.stderr.write("%s error: %s\n" % (message, subset[i]))
|
||||
return False
|
||||
return True
|
||||
|
||||
def errors_match(expected, actual):
|
||||
return (is_subset(actual, expected, "Unexpected") and
|
||||
is_subset(expected, actual, "Missing"))
|
||||
|
||||
return (errors_match(expected_errors,
|
||||
compare_schemas(create_test_schema_1(), create_test_schema_2(), False)) and
|
||||
errors_match(expected_errors_reverse,
|
||||
compare_schemas(create_test_schema_2(), create_test_schema_1(), True)))
|
||||
|
||||
|
||||
def load_domains_and_baselines(file_name, domains, baseline_domains):
|
||||
version = load_schema(os.path.normpath(file_name), domains)
|
||||
suffix = "-%s.%s.json" % (version["major"], version["minor"])
|
||||
baseline_file = file_name.replace(".json", suffix)
|
||||
baseline_file = file_name.replace(".pdl", suffix)
|
||||
load_schema(os.path.normpath(baseline_file), baseline_domains)
|
||||
return version
|
||||
|
||||
|
||||
def main():
|
||||
if not self_test():
|
||||
sys.stderr.write("Self-test failed")
|
||||
return 1
|
||||
|
||||
cmdline_parser = optparse.OptionParser()
|
||||
cmdline_parser.add_option("--show_changes")
|
||||
cmdline_parser.add_option("--expected_errors")
|
||||
cmdline_parser.add_option("--stamp")
|
||||
arg_options, arg_values = cmdline_parser.parse_args()
|
||||
|
||||
if len(arg_values) < 1:
|
||||
sys.stderr.write("Usage: %s [--show_changes] <protocol-1> [, <protocol-2>...]\n" % sys.argv[0])
|
||||
return 1
|
||||
|
||||
domains = []
|
||||
baseline_domains = []
|
||||
version = load_domains_and_baselines(arg_values[0], domains, baseline_domains)
|
||||
for dependency in arg_values[1:]:
|
||||
load_domains_and_baselines(dependency, domains, baseline_domains)
|
||||
|
||||
expected_errors = []
|
||||
if arg_options.expected_errors:
|
||||
expected_errors_file = open(arg_options.expected_errors, "r")
|
||||
expected_errors = json.loads(expected_errors_file.read())["errors"]
|
||||
expected_errors_file.close()
|
||||
|
||||
errors = compare_schemas(baseline_domains, domains, False)
|
||||
unexpected_errors = []
|
||||
for i in range(len(errors)):
|
||||
if errors[i] not in expected_errors:
|
||||
unexpected_errors.append(errors[i])
|
||||
if len(unexpected_errors) > 0:
|
||||
sys.stderr.write(" Compatibility checks FAILED\n")
|
||||
for error in unexpected_errors:
|
||||
sys.stderr.write(" %s\n" % error)
|
||||
return 1
|
||||
|
||||
if arg_options.show_changes:
|
||||
changes = compare_schemas(domains, baseline_domains, True)
|
||||
if len(changes) > 0:
|
||||
print " Public changes since %s:" % version
|
||||
for change in changes:
|
||||
print " %s" % change
|
||||
|
||||
if arg_options.stamp:
|
||||
with open(arg_options.stamp, 'a') as _:
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
661
third_party/inspector_protocol/code_generator.py
vendored
Executable file
661
third_party/inspector_protocol/code_generator.py
vendored
Executable file
@ -0,0 +1,661 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os.path
|
||||
import sys
|
||||
import optparse
|
||||
import collections
|
||||
import functools
|
||||
import re
|
||||
import copy
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
import pdl
|
||||
|
||||
# Path handling for libraries and templates
|
||||
# Paths have to be normalized because Jinja uses the exact template path to
|
||||
# determine the hash used in the cache filename, and we need a pre-caching step
|
||||
# to be concurrency-safe. Use absolute path because __file__ is absolute if
|
||||
# module is imported, and relative if executed directly.
|
||||
# If paths differ between pre-caching and individual file compilation, the cache
|
||||
# is regenerated, which causes a race condition and breaks concurrent build,
|
||||
# since some compile processes will try to read the partially written cache.
|
||||
module_path, module_filename = os.path.split(os.path.realpath(__file__))
|
||||
|
||||
def read_config():
|
||||
# pylint: disable=W0703
|
||||
def json_to_object(data, output_base, config_base):
|
||||
def json_object_hook(object_dict):
|
||||
items = [(k, os.path.join(config_base, v) if k == "path" else v) for (k, v) in object_dict.items()]
|
||||
items = [(k, os.path.join(output_base, v) if k == "output" else v) for (k, v) in items]
|
||||
keys, values = zip(*items)
|
||||
return collections.namedtuple('X', keys)(*values)
|
||||
return json.loads(data, object_hook=json_object_hook)
|
||||
|
||||
def init_defaults(config_tuple, path, defaults):
|
||||
keys = list(config_tuple._fields) # pylint: disable=E1101
|
||||
values = [getattr(config_tuple, k) for k in keys]
|
||||
for i in xrange(len(keys)):
|
||||
if hasattr(values[i], "_fields"):
|
||||
values[i] = init_defaults(values[i], path + "." + keys[i], defaults)
|
||||
for optional in defaults:
|
||||
if optional.find(path + ".") != 0:
|
||||
continue
|
||||
optional_key = optional[len(path) + 1:]
|
||||
if optional_key.find(".") == -1 and optional_key not in keys:
|
||||
keys.append(optional_key)
|
||||
values.append(defaults[optional])
|
||||
return collections.namedtuple('X', keys)(*values)
|
||||
|
||||
try:
|
||||
cmdline_parser = optparse.OptionParser()
|
||||
cmdline_parser.add_option("--output_base")
|
||||
cmdline_parser.add_option("--jinja_dir")
|
||||
cmdline_parser.add_option("--config")
|
||||
cmdline_parser.add_option("--config_value", action="append", type="string")
|
||||
arg_options, _ = cmdline_parser.parse_args()
|
||||
jinja_dir = arg_options.jinja_dir
|
||||
if not jinja_dir:
|
||||
raise Exception("jinja directory must be specified")
|
||||
jinja_dir = jinja_dir.decode('utf8')
|
||||
output_base = arg_options.output_base
|
||||
if not output_base:
|
||||
raise Exception("Base output directory must be specified")
|
||||
output_base = output_base.decode('utf8')
|
||||
config_file = arg_options.config
|
||||
if not config_file:
|
||||
raise Exception("Config file name must be specified")
|
||||
config_file = config_file.decode('utf8')
|
||||
config_base = os.path.dirname(config_file)
|
||||
config_values = arg_options.config_value
|
||||
if not config_values:
|
||||
config_values = []
|
||||
except Exception:
|
||||
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
|
||||
exc = sys.exc_info()[1]
|
||||
sys.stderr.write("Failed to parse command-line arguments: %s\n\n" % exc)
|
||||
exit(1)
|
||||
|
||||
try:
|
||||
config_json_file = open(config_file, "r")
|
||||
config_json_string = config_json_file.read()
|
||||
config_partial = json_to_object(config_json_string, output_base, config_base)
|
||||
config_json_file.close()
|
||||
defaults = {
|
||||
".use_snake_file_names": False,
|
||||
".use_title_case_methods": False,
|
||||
".imported": False,
|
||||
".imported.export_macro": "",
|
||||
".imported.export_header": False,
|
||||
".imported.header": False,
|
||||
".imported.package": False,
|
||||
".imported.options": False,
|
||||
".protocol.export_macro": "",
|
||||
".protocol.export_header": False,
|
||||
".protocol.options": False,
|
||||
".protocol.file_name_prefix": "",
|
||||
".exported": False,
|
||||
".exported.export_macro": "",
|
||||
".exported.export_header": False,
|
||||
".lib": False,
|
||||
".lib.export_macro": "",
|
||||
".lib.export_header": False,
|
||||
}
|
||||
for key_value in config_values:
|
||||
parts = key_value.split("=")
|
||||
if len(parts) == 2:
|
||||
defaults["." + parts[0]] = parts[1]
|
||||
return (jinja_dir, config_file, init_defaults(config_partial, "", defaults))
|
||||
except Exception:
|
||||
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
|
||||
exc = sys.exc_info()[1]
|
||||
sys.stderr.write("Failed to parse config file: %s\n\n" % exc)
|
||||
exit(1)
|
||||
|
||||
|
||||
# ---- Begin of utilities exposed to generator ----
|
||||
|
||||
|
||||
def to_title_case(name):
|
||||
return name[:1].upper() + name[1:]
|
||||
|
||||
|
||||
def dash_to_camelcase(word):
|
||||
prefix = ""
|
||||
if word[0] == "-":
|
||||
prefix = "Negative"
|
||||
word = word[1:]
|
||||
return prefix + "".join(to_title_case(x) or "-" for x in word.split("-"))
|
||||
|
||||
|
||||
def to_snake_case(name):
|
||||
return re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", name, sys.maxint).lower()
|
||||
|
||||
|
||||
def to_method_case(config, name):
|
||||
if config.use_title_case_methods:
|
||||
return to_title_case(name)
|
||||
return name
|
||||
|
||||
|
||||
def join_arrays(dict, keys):
|
||||
result = []
|
||||
for key in keys:
|
||||
if key in dict:
|
||||
result += dict[key]
|
||||
return result
|
||||
|
||||
|
||||
def format_include(config, header, file_name=None):
|
||||
if file_name is not None:
|
||||
header = header + "/" + file_name + ".h"
|
||||
header = "\"" + header + "\"" if header[0] not in "<\"" else header
|
||||
if config.use_snake_file_names:
|
||||
header = to_snake_case(header)
|
||||
return header
|
||||
|
||||
|
||||
def format_domain_include(config, header, file_name):
|
||||
return format_include(config, header, config.protocol.file_name_prefix + file_name)
|
||||
|
||||
|
||||
def to_file_name(config, file_name):
|
||||
if config.use_snake_file_names:
|
||||
return to_snake_case(file_name).replace(".cpp", ".cc")
|
||||
return file_name
|
||||
|
||||
|
||||
# ---- End of utilities exposed to generator ----
|
||||
|
||||
|
||||
def initialize_jinja_env(jinja_dir, cache_dir, config):
|
||||
# pylint: disable=F0401
|
||||
sys.path.insert(1, os.path.abspath(jinja_dir))
|
||||
import jinja2
|
||||
|
||||
jinja_env = jinja2.Environment(
|
||||
loader=jinja2.FileSystemLoader(module_path),
|
||||
# Bytecode cache is not concurrency-safe unless pre-cached:
|
||||
# if pre-cached this is read-only, but writing creates a race condition.
|
||||
bytecode_cache=jinja2.FileSystemBytecodeCache(cache_dir),
|
||||
keep_trailing_newline=True, # newline-terminate generated files
|
||||
lstrip_blocks=True, # so can indent control flow tags
|
||||
trim_blocks=True)
|
||||
jinja_env.filters.update({"to_title_case": to_title_case, "dash_to_camelcase": dash_to_camelcase, "to_method_case": functools.partial(to_method_case, config)})
|
||||
jinja_env.add_extension("jinja2.ext.loopcontrols")
|
||||
return jinja_env
|
||||
|
||||
|
||||
def create_imported_type_definition(domain_name, type, imported_namespace):
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
|
||||
"pass_type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<%s::%s::API::%s>" % (imported_namespace, domain_name, type["id"]),
|
||||
"raw_type": "%s::%s::API::%s" % (imported_namespace, domain_name, type["id"]),
|
||||
"raw_pass_type": "%s::%s::API::%s*" % (imported_namespace, domain_name, type["id"]),
|
||||
"raw_return_type": "%s::%s::API::%s*" % (imported_namespace, domain_name, type["id"]),
|
||||
}
|
||||
|
||||
|
||||
def create_user_type_definition(domain_name, type):
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<protocol::%s::%s>" % (domain_name, type["id"]),
|
||||
"pass_type": "std::unique_ptr<protocol::%s::%s>" % (domain_name, type["id"]),
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<protocol::%s::%s>" % (domain_name, type["id"]),
|
||||
"raw_type": "protocol::%s::%s" % (domain_name, type["id"]),
|
||||
"raw_pass_type": "protocol::%s::%s*" % (domain_name, type["id"]),
|
||||
"raw_return_type": "protocol::%s::%s*" % (domain_name, type["id"]),
|
||||
}
|
||||
|
||||
|
||||
def create_object_type_definition():
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<protocol::DictionaryValue>",
|
||||
"pass_type": "std::unique_ptr<protocol::DictionaryValue>",
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<protocol::DictionaryValue>",
|
||||
"raw_type": "protocol::DictionaryValue",
|
||||
"raw_pass_type": "protocol::DictionaryValue*",
|
||||
"raw_return_type": "protocol::DictionaryValue*",
|
||||
}
|
||||
|
||||
|
||||
def create_any_type_definition():
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<protocol::Value>",
|
||||
"pass_type": "std::unique_ptr<protocol::Value>",
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<protocol::Value>",
|
||||
"raw_type": "protocol::Value",
|
||||
"raw_pass_type": "protocol::Value*",
|
||||
"raw_return_type": "protocol::Value*",
|
||||
}
|
||||
|
||||
|
||||
def create_string_type_definition():
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "String",
|
||||
"pass_type": "const String&",
|
||||
"to_pass_type": "%s",
|
||||
"to_raw_type": "%s",
|
||||
"to_rvalue": "%s",
|
||||
"type": "String",
|
||||
"raw_type": "String",
|
||||
"raw_pass_type": "const String&",
|
||||
"raw_return_type": "String",
|
||||
}
|
||||
|
||||
|
||||
def create_primitive_type_definition(type):
|
||||
# pylint: disable=W0622
|
||||
typedefs = {
|
||||
"number": "double",
|
||||
"integer": "int",
|
||||
"boolean": "bool"
|
||||
}
|
||||
defaults = {
|
||||
"number": "0",
|
||||
"integer": "0",
|
||||
"boolean": "false"
|
||||
}
|
||||
jsontypes = {
|
||||
"number": "TypeDouble",
|
||||
"integer": "TypeInteger",
|
||||
"boolean": "TypeBoolean",
|
||||
}
|
||||
return {
|
||||
"return_type": typedefs[type],
|
||||
"pass_type": typedefs[type],
|
||||
"to_pass_type": "%s",
|
||||
"to_raw_type": "%s",
|
||||
"to_rvalue": "%s",
|
||||
"type": typedefs[type],
|
||||
"raw_type": typedefs[type],
|
||||
"raw_pass_type": typedefs[type],
|
||||
"raw_return_type": typedefs[type],
|
||||
"default_value": defaults[type]
|
||||
}
|
||||
|
||||
|
||||
def wrap_array_definition(type):
|
||||
# pylint: disable=W0622
|
||||
return {
|
||||
"return_type": "std::unique_ptr<protocol::Array<%s>>" % type["raw_type"],
|
||||
"pass_type": "std::unique_ptr<protocol::Array<%s>>" % type["raw_type"],
|
||||
"to_raw_type": "%s.get()",
|
||||
"to_pass_type": "std::move(%s)",
|
||||
"to_rvalue": "std::move(%s)",
|
||||
"type": "std::unique_ptr<protocol::Array<%s>>" % type["raw_type"],
|
||||
"raw_type": "protocol::Array<%s>" % type["raw_type"],
|
||||
"raw_pass_type": "protocol::Array<%s>*" % type["raw_type"],
|
||||
"raw_return_type": "protocol::Array<%s>*" % type["raw_type"],
|
||||
"out_type": "protocol::Array<%s>&" % type["raw_type"],
|
||||
}
|
||||
|
||||
|
||||
class Protocol(object):
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.json_api = {"domains": []}
|
||||
self.imported_domains = []
|
||||
self.exported_domains = []
|
||||
self.generate_domains = self.read_protocol_file(config.protocol.path)
|
||||
|
||||
if config.protocol.options:
|
||||
self.generate_domains = [rule.domain for rule in config.protocol.options]
|
||||
self.exported_domains = [rule.domain for rule in config.protocol.options if hasattr(rule, "exported")]
|
||||
|
||||
if config.imported:
|
||||
self.imported_domains = self.read_protocol_file(config.imported.path)
|
||||
if config.imported.options:
|
||||
self.imported_domains = [rule.domain for rule in config.imported.options]
|
||||
|
||||
self.patch_full_qualified_refs()
|
||||
self.create_notification_types()
|
||||
self.create_type_definitions()
|
||||
self.generate_used_types()
|
||||
|
||||
|
||||
def read_protocol_file(self, file_name):
|
||||
input_file = open(file_name, "r")
|
||||
parsed_json = pdl.loads(input_file.read(), file_name)
|
||||
input_file.close()
|
||||
version = parsed_json["version"]["major"] + "." + parsed_json["version"]["minor"]
|
||||
domains = []
|
||||
for domain in parsed_json["domains"]:
|
||||
domains.append(domain["domain"])
|
||||
domain["version"] = version
|
||||
self.json_api["domains"] += parsed_json["domains"]
|
||||
return domains
|
||||
|
||||
|
||||
def patch_full_qualified_refs(self):
|
||||
def patch_full_qualified_refs_in_domain(json, domain_name):
|
||||
if isinstance(json, list):
|
||||
for item in json:
|
||||
patch_full_qualified_refs_in_domain(item, domain_name)
|
||||
if not isinstance(json, dict):
|
||||
return
|
||||
for key in json:
|
||||
if key == "type" and json[key] == "string":
|
||||
json[key] = domain_name + ".string"
|
||||
if key != "$ref":
|
||||
patch_full_qualified_refs_in_domain(json[key], domain_name)
|
||||
continue
|
||||
if json["$ref"].find(".") == -1:
|
||||
json["$ref"] = domain_name + "." + json["$ref"]
|
||||
return
|
||||
|
||||
for domain in self.json_api["domains"]:
|
||||
patch_full_qualified_refs_in_domain(domain, domain["domain"])
|
||||
|
||||
|
||||
def all_references(self, json):
|
||||
refs = set()
|
||||
if isinstance(json, list):
|
||||
for item in json:
|
||||
refs |= self.all_references(item)
|
||||
if not isinstance(json, dict):
|
||||
return refs
|
||||
for key in json:
|
||||
if key != "$ref":
|
||||
refs |= self.all_references(json[key])
|
||||
else:
|
||||
refs.add(json["$ref"])
|
||||
return refs
|
||||
|
||||
def generate_used_types(self):
|
||||
all_refs = set()
|
||||
for domain in self.json_api["domains"]:
|
||||
domain_name = domain["domain"]
|
||||
if "commands" in domain:
|
||||
for command in domain["commands"]:
|
||||
if self.generate_command(domain_name, command["name"]):
|
||||
all_refs |= self.all_references(command)
|
||||
if "events" in domain:
|
||||
for event in domain["events"]:
|
||||
if self.generate_event(domain_name, event["name"]):
|
||||
all_refs |= self.all_references(event)
|
||||
all_refs.add(domain_name + "." + to_title_case(event["name"]) + "Notification")
|
||||
|
||||
dependencies = self.generate_type_dependencies()
|
||||
queue = set(all_refs)
|
||||
while len(queue):
|
||||
ref = queue.pop()
|
||||
if ref in dependencies:
|
||||
queue |= dependencies[ref] - all_refs
|
||||
all_refs |= dependencies[ref]
|
||||
self.used_types = all_refs
|
||||
|
||||
|
||||
def generate_type_dependencies(self):
|
||||
dependencies = dict()
|
||||
domains_with_types = (x for x in self.json_api["domains"] if "types" in x)
|
||||
for domain in domains_with_types:
|
||||
domain_name = domain["domain"]
|
||||
for type in domain["types"]:
|
||||
related_types = self.all_references(type)
|
||||
if len(related_types):
|
||||
dependencies[domain_name + "." + type["id"]] = related_types
|
||||
return dependencies
|
||||
|
||||
|
||||
def create_notification_types(self):
|
||||
for domain in self.json_api["domains"]:
|
||||
if "events" in domain:
|
||||
for event in domain["events"]:
|
||||
event_type = dict()
|
||||
event_type["description"] = "Wrapper for notification params"
|
||||
event_type["type"] = "object"
|
||||
event_type["id"] = to_title_case(event["name"]) + "Notification"
|
||||
if "parameters" in event:
|
||||
event_type["properties"] = copy.deepcopy(event["parameters"])
|
||||
if "types" not in domain:
|
||||
domain["types"] = list()
|
||||
domain["types"].append(event_type)
|
||||
|
||||
|
||||
def create_type_definitions(self):
|
||||
imported_namespace = "::".join(self.config.imported.namespace) if self.config.imported else ""
|
||||
self.type_definitions = {}
|
||||
self.type_definitions["number"] = create_primitive_type_definition("number")
|
||||
self.type_definitions["integer"] = create_primitive_type_definition("integer")
|
||||
self.type_definitions["boolean"] = create_primitive_type_definition("boolean")
|
||||
self.type_definitions["object"] = create_object_type_definition()
|
||||
self.type_definitions["any"] = create_any_type_definition()
|
||||
for domain in self.json_api["domains"]:
|
||||
self.type_definitions[domain["domain"] + ".string"] = create_string_type_definition()
|
||||
if not ("types" in domain):
|
||||
continue
|
||||
for type in domain["types"]:
|
||||
type_name = domain["domain"] + "." + type["id"]
|
||||
if type["type"] == "object" and domain["domain"] in self.imported_domains:
|
||||
self.type_definitions[type_name] = create_imported_type_definition(domain["domain"], type, imported_namespace)
|
||||
elif type["type"] == "object":
|
||||
self.type_definitions[type_name] = create_user_type_definition(domain["domain"], type)
|
||||
elif type["type"] == "array":
|
||||
self.type_definitions[type_name] = self.resolve_type(type)
|
||||
elif type["type"] == domain["domain"] + ".string":
|
||||
self.type_definitions[type_name] = create_string_type_definition()
|
||||
else:
|
||||
self.type_definitions[type_name] = create_primitive_type_definition(type["type"])
|
||||
|
||||
|
||||
def check_options(self, options, domain, name, include_attr, exclude_attr, default):
|
||||
for rule in options:
|
||||
if rule.domain != domain:
|
||||
continue
|
||||
if include_attr and hasattr(rule, include_attr):
|
||||
return name in getattr(rule, include_attr)
|
||||
if exclude_attr and hasattr(rule, exclude_attr):
|
||||
return name not in getattr(rule, exclude_attr)
|
||||
return default
|
||||
return False
|
||||
|
||||
|
||||
# ---- Begin of methods exposed to generator
|
||||
|
||||
|
||||
def type_definition(self, name):
|
||||
return self.type_definitions[name]
|
||||
|
||||
|
||||
def resolve_type(self, prop):
|
||||
if "$ref" in prop:
|
||||
return self.type_definitions[prop["$ref"]]
|
||||
if prop["type"] == "array":
|
||||
return wrap_array_definition(self.resolve_type(prop["items"]))
|
||||
return self.type_definitions[prop["type"]]
|
||||
|
||||
|
||||
def generate_command(self, domain, command):
|
||||
if not self.config.protocol.options:
|
||||
return domain in self.generate_domains
|
||||
return self.check_options(self.config.protocol.options, domain, command, "include", "exclude", True)
|
||||
|
||||
|
||||
def generate_event(self, domain, event):
|
||||
if not self.config.protocol.options:
|
||||
return domain in self.generate_domains
|
||||
return self.check_options(self.config.protocol.options, domain, event, "include_events", "exclude_events", True)
|
||||
|
||||
|
||||
def generate_type(self, domain, typename):
|
||||
return domain + "." + typename in self.used_types
|
||||
|
||||
|
||||
def is_async_command(self, domain, command):
|
||||
if not self.config.protocol.options:
|
||||
return False
|
||||
return self.check_options(self.config.protocol.options, domain, command, "async", None, False)
|
||||
|
||||
|
||||
def is_exported(self, domain, name):
|
||||
if not self.config.protocol.options:
|
||||
return False
|
||||
return self.check_options(self.config.protocol.options, domain, name, "exported", None, False)
|
||||
|
||||
|
||||
def is_imported(self, domain, name):
|
||||
if not self.config.imported:
|
||||
return False
|
||||
if not self.config.imported.options:
|
||||
return domain in self.imported_domains
|
||||
return self.check_options(self.config.imported.options, domain, name, "imported", None, False)
|
||||
|
||||
|
||||
def is_exported_domain(self, domain):
|
||||
return domain in self.exported_domains
|
||||
|
||||
|
||||
def generate_disable(self, domain):
|
||||
if "commands" not in domain:
|
||||
return True
|
||||
for command in domain["commands"]:
|
||||
if command["name"] == "disable" and self.generate_command(domain["domain"], "disable"):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_imported_dependency(self, domain):
|
||||
return domain in self.generate_domains or domain in self.imported_domains
|
||||
|
||||
|
||||
def main():
|
||||
jinja_dir, config_file, config = read_config()
|
||||
|
||||
protocol = Protocol(config)
|
||||
|
||||
if not config.exported and len(protocol.exported_domains):
|
||||
sys.stderr.write("Domains [%s] are exported, but config is missing export entry\n\n" % ", ".join(protocol.exported_domains))
|
||||
exit(1)
|
||||
|
||||
if not os.path.exists(config.protocol.output):
|
||||
os.mkdir(config.protocol.output)
|
||||
if len(protocol.exported_domains) and not os.path.exists(config.exported.output):
|
||||
os.mkdir(config.exported.output)
|
||||
jinja_env = initialize_jinja_env(jinja_dir, config.protocol.output, config)
|
||||
|
||||
inputs = []
|
||||
inputs.append(__file__)
|
||||
inputs.append(config_file)
|
||||
inputs.append(config.protocol.path)
|
||||
if config.imported:
|
||||
inputs.append(config.imported.path)
|
||||
templates_dir = os.path.join(module_path, "templates")
|
||||
inputs.append(os.path.join(templates_dir, "TypeBuilder_h.template"))
|
||||
inputs.append(os.path.join(templates_dir, "TypeBuilder_cpp.template"))
|
||||
inputs.append(os.path.join(templates_dir, "Exported_h.template"))
|
||||
inputs.append(os.path.join(templates_dir, "Imported_h.template"))
|
||||
|
||||
h_template = jinja_env.get_template("templates/TypeBuilder_h.template")
|
||||
cpp_template = jinja_env.get_template("templates/TypeBuilder_cpp.template")
|
||||
exported_template = jinja_env.get_template("templates/Exported_h.template")
|
||||
imported_template = jinja_env.get_template("templates/Imported_h.template")
|
||||
|
||||
outputs = dict()
|
||||
|
||||
for domain in protocol.json_api["domains"]:
|
||||
class_name = domain["domain"]
|
||||
file_name = config.protocol.file_name_prefix + class_name
|
||||
template_context = {
|
||||
"protocol": protocol,
|
||||
"config": config,
|
||||
"domain": domain,
|
||||
"join_arrays": join_arrays,
|
||||
"format_include": functools.partial(format_include, config),
|
||||
"format_domain_include": functools.partial(format_domain_include, config),
|
||||
}
|
||||
|
||||
if domain["domain"] in protocol.generate_domains:
|
||||
outputs[os.path.join(config.protocol.output, to_file_name(config, file_name + ".h"))] = h_template.render(template_context)
|
||||
outputs[os.path.join(config.protocol.output, to_file_name(config, file_name + ".cpp"))] = cpp_template.render(template_context)
|
||||
if domain["domain"] in protocol.exported_domains:
|
||||
outputs[os.path.join(config.exported.output, to_file_name(config, file_name + ".h"))] = exported_template.render(template_context)
|
||||
if domain["domain"] in protocol.imported_domains:
|
||||
outputs[os.path.join(config.protocol.output, to_file_name(config, file_name + ".h"))] = imported_template.render(template_context)
|
||||
|
||||
if config.lib:
|
||||
template_context = {
|
||||
"config": config,
|
||||
"format_include": functools.partial(format_include, config),
|
||||
}
|
||||
|
||||
lib_templates_dir = os.path.join(module_path, "lib")
|
||||
# Note these should be sorted in the right order.
|
||||
# TODO(dgozman): sort them programmatically based on commented includes.
|
||||
lib_h_templates = [
|
||||
"ErrorSupport_h.template",
|
||||
"Values_h.template",
|
||||
"Object_h.template",
|
||||
"ValueConversions_h.template",
|
||||
"Maybe_h.template",
|
||||
"Array_h.template",
|
||||
"DispatcherBase_h.template",
|
||||
"Parser_h.template",
|
||||
]
|
||||
|
||||
lib_cpp_templates = [
|
||||
"Protocol_cpp.template",
|
||||
"ErrorSupport_cpp.template",
|
||||
"Values_cpp.template",
|
||||
"Object_cpp.template",
|
||||
"DispatcherBase_cpp.template",
|
||||
"Parser_cpp.template",
|
||||
]
|
||||
|
||||
forward_h_templates = [
|
||||
"Forward_h.template",
|
||||
"Allocator_h.template",
|
||||
"FrontendChannel_h.template",
|
||||
]
|
||||
|
||||
def generate_lib_file(file_name, template_files):
|
||||
parts = []
|
||||
for template_file in template_files:
|
||||
inputs.append(os.path.join(lib_templates_dir, template_file))
|
||||
template = jinja_env.get_template("lib/" + template_file)
|
||||
parts.append(template.render(template_context))
|
||||
outputs[file_name] = "\n\n".join(parts)
|
||||
|
||||
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Forward.h")), forward_h_templates)
|
||||
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Protocol.h")), lib_h_templates)
|
||||
generate_lib_file(os.path.join(config.lib.output, to_file_name(config, "Protocol.cpp")), lib_cpp_templates)
|
||||
|
||||
# Make gyp / make generatos happy, otherwise make rebuilds world.
|
||||
inputs_ts = max(map(os.path.getmtime, inputs))
|
||||
up_to_date = True
|
||||
for output_file in outputs.iterkeys():
|
||||
if not os.path.exists(output_file) or os.path.getmtime(output_file) < inputs_ts:
|
||||
up_to_date = False
|
||||
break
|
||||
if up_to_date:
|
||||
sys.exit()
|
||||
|
||||
for file_name, content in outputs.iteritems():
|
||||
out_file = open(file_name, "w")
|
||||
out_file.write(content)
|
||||
out_file.close()
|
||||
|
||||
|
||||
main()
|
6
third_party/inspector_protocol/codereview.settings
vendored
Normal file
6
third_party/inspector_protocol/codereview.settings
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
# This file is used by git-cl to get repository specific information.
|
||||
CC_LIST: chromium-reviews@chromium.org
|
||||
CODE_REVIEW_SERVER: codereview.chromium.org
|
||||
GERRIT_HOST: True
|
||||
PROJECT: inspector_protocol
|
||||
VIEW_VC: https://chromium.googlesource.com/deps/inspector_protocol/+/
|
@ -11,6 +11,7 @@ try:
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
import pdl
|
||||
|
||||
def main(argv):
|
||||
if len(argv) < 1:
|
||||
@ -25,8 +26,7 @@ def main(argv):
|
||||
sys.stderr.write("Cannot find %s\n" % file_name)
|
||||
return 1
|
||||
input_file = open(file_name, "r")
|
||||
json_string = input_file.read()
|
||||
parsed_json = json.loads(json_string)
|
||||
parsed_json = pdl.loads(input_file.read(), file_name)
|
||||
domains += parsed_json["domains"]
|
||||
version = parsed_json["version"]
|
||||
|
33
third_party/inspector_protocol/convert_protocol_to_json.py
vendored
Executable file
33
third_party/inspector_protocol/convert_protocol_to_json.py
vendored
Executable file
@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright 2017 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import collections
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
import pdl
|
||||
|
||||
def main(argv):
|
||||
if len(argv) < 2:
|
||||
sys.stderr.write("Usage: %s <protocol.pdl> <protocol.json>\n" % sys.argv[0])
|
||||
return 1
|
||||
file_name = os.path.normpath(argv[0])
|
||||
input_file = open(file_name, "r")
|
||||
pdl_string = input_file.read()
|
||||
protocol = pdl.loads(pdl_string, file_name)
|
||||
input_file.close()
|
||||
output_file = open(argv[0].replace('.pdl', '.json'), 'wb')
|
||||
json.dump(protocol, output_file, indent=4, separators=(',', ': '))
|
||||
output_file.close()
|
||||
|
||||
output_file = open(os.path.normpath(argv[1]), 'wb')
|
||||
json.dump(protocol, output_file, indent=4, separators=(',', ': '))
|
||||
output_file.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main(sys.argv[1:]))
|
@ -27,13 +27,12 @@ template("inspector_protocol_generate") {
|
||||
inspector_protocol_dir = invoker.inspector_protocol_dir
|
||||
|
||||
action(target_name) {
|
||||
script = "$inspector_protocol_dir/CodeGenerator.py"
|
||||
script = "$inspector_protocol_dir/code_generator.py"
|
||||
|
||||
inputs = [
|
||||
invoker.config_file,
|
||||
"$inspector_protocol_dir/lib/Allocator_h.template",
|
||||
"$inspector_protocol_dir/lib/Array_h.template",
|
||||
"$inspector_protocol_dir/lib/Collections_h.template",
|
||||
"$inspector_protocol_dir/lib/DispatcherBase_cpp.template",
|
||||
"$inspector_protocol_dir/lib/DispatcherBase_h.template",
|
||||
"$inspector_protocol_dir/lib/ErrorSupport_cpp.template",
|
||||
|
@ -7,7 +7,6 @@
|
||||
'inspector_protocol_files': [
|
||||
'lib/Allocator_h.template',
|
||||
'lib/Array_h.template',
|
||||
'lib/Collections_h.template',
|
||||
'lib/DispatcherBase_cpp.template',
|
||||
'lib/DispatcherBase_h.template',
|
||||
'lib/ErrorSupport_cpp.template',
|
||||
@ -27,7 +26,7 @@
|
||||
'templates/Imported_h.template',
|
||||
'templates/TypeBuilder_cpp.template',
|
||||
'templates/TypeBuilder_h.template',
|
||||
'CodeGenerator.py',
|
||||
'code_generator.py',
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -1,43 +0,0 @@
|
||||
// Copyright 2016 The Chromium Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef {{"_".join(config.protocol.namespace)}}_Collections_h
|
||||
#define {{"_".join(config.protocol.namespace)}}_Collections_h
|
||||
|
||||
#include {{format_include(config.protocol.package, "Forward")}}
|
||||
#include <cstddef>
|
||||
|
||||
#if defined(__APPLE__) && !defined(_LIBCPP_VERSION)
|
||||
#include <map>
|
||||
#include <set>
|
||||
|
||||
{% for namespace in config.protocol.namespace %}
|
||||
namespace {{namespace}} {
|
||||
{% endfor %}
|
||||
|
||||
template <class Key, class T> using HashMap = std::map<Key, T>;
|
||||
template <class Key> using HashSet = std::set<Key>;
|
||||
|
||||
{% for namespace in config.protocol.namespace %}
|
||||
} // namespace {{namespace}}
|
||||
{% endfor %}
|
||||
|
||||
#else
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
|
||||
{% for namespace in config.protocol.namespace %}
|
||||
namespace {{namespace}} {
|
||||
{% endfor %}
|
||||
|
||||
template <class Key, class T> using HashMap = std::unordered_map<Key, T>;
|
||||
template <class Key> using HashSet = std::unordered_set<Key>;
|
||||
|
||||
{% for namespace in config.protocol.namespace %}
|
||||
} // namespace {{namespace}}
|
||||
{% endfor %}
|
||||
|
||||
#endif // defined(__APPLE__) && !defined(_LIBCPP_VERSION)
|
||||
|
||||
#endif // !defined({{"_".join(config.protocol.namespace)}}_Collections_h)
|
@ -231,7 +231,7 @@ void UberDispatcher::registerBackend(const String& name, std::unique_ptr<protoco
|
||||
m_dispatchers[name] = std::move(dispatcher);
|
||||
}
|
||||
|
||||
void UberDispatcher::setupRedirects(const HashMap<String, String>& redirects)
|
||||
void UberDispatcher::setupRedirects(const std::unordered_map<String, String>& redirects)
|
||||
{
|
||||
for (const auto& pair : redirects)
|
||||
m_redirects[pair.first] = pair.second;
|
||||
@ -269,7 +269,7 @@ DispatchResponse::Status UberDispatcher::dispatch(std::unique_ptr<Value> parsedM
|
||||
return DispatchResponse::kError;
|
||||
}
|
||||
|
||||
HashMap<String, String>::iterator redirectIt = m_redirects.find(method);
|
||||
std::unordered_map<String, String>::iterator redirectIt = m_redirects.find(method);
|
||||
if (redirectIt != m_redirects.end())
|
||||
method = redirectIt->second;
|
||||
|
||||
|
@ -5,9 +5,8 @@
|
||||
#ifndef {{"_".join(config.protocol.namespace)}}_DispatcherBase_h
|
||||
#define {{"_".join(config.protocol.namespace)}}_DispatcherBase_h
|
||||
|
||||
//#include "Collections.h"
|
||||
//#include "ErrorSupport.h"
|
||||
//#include "Forward.h"
|
||||
//#include "ErrorSupport.h"
|
||||
//#include "Values.h"
|
||||
|
||||
{% for namespace in config.protocol.namespace %}
|
||||
@ -101,7 +100,7 @@ public:
|
||||
|
||||
private:
|
||||
FrontendChannel* m_frontendChannel;
|
||||
protocol::HashSet<WeakPtr*> m_weakPtrs;
|
||||
std::unordered_set<WeakPtr*> m_weakPtrs;
|
||||
int m_lastCallbackId;
|
||||
bool m_lastCallbackFallThrough;
|
||||
};
|
||||
@ -111,7 +110,7 @@ class {{config.lib.export_macro}} UberDispatcher {
|
||||
public:
|
||||
explicit UberDispatcher(FrontendChannel*);
|
||||
void registerBackend(const String& name, std::unique_ptr<protocol::DispatcherBase>);
|
||||
void setupRedirects(const HashMap<String, String>&);
|
||||
void setupRedirects(const std::unordered_map<String, String>&);
|
||||
DispatchResponse::Status dispatch(std::unique_ptr<Value> message, int* callId = nullptr, String* method = nullptr);
|
||||
FrontendChannel* channel() { return m_frontendChannel; }
|
||||
bool fallThroughForNotFound() { return m_fallThroughForNotFound; }
|
||||
@ -122,8 +121,8 @@ public:
|
||||
private:
|
||||
FrontendChannel* m_frontendChannel;
|
||||
bool m_fallThroughForNotFound;
|
||||
HashMap<String, String> m_redirects;
|
||||
protocol::HashMap<String, std::unique_ptr<protocol::DispatcherBase>> m_dispatchers;
|
||||
std::unordered_map<String, String> m_redirects;
|
||||
std::unordered_map<String, std::unique_ptr<protocol::DispatcherBase>> m_dispatchers;
|
||||
};
|
||||
|
||||
class InternalResponse : public Serializable {
|
||||
|
@ -5,7 +5,7 @@
|
||||
#ifndef {{"_".join(config.protocol.namespace)}}_ErrorSupport_h
|
||||
#define {{"_".join(config.protocol.namespace)}}_ErrorSupport_h
|
||||
|
||||
//#include "Forward.h"
|
||||
#include {{format_include(config.protocol.package, "Forward")}}
|
||||
|
||||
{% for namespace in config.protocol.namespace %}
|
||||
namespace {{namespace}} {
|
||||
|
@ -10,7 +10,10 @@
|
||||
{% endif %}
|
||||
#include {{format_include(config.lib.string_header)}}
|
||||
|
||||
#include <cstddef>
|
||||
#include <vector>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
|
||||
{% for namespace in config.protocol.namespace %}
|
||||
namespace {{namespace}} {
|
||||
|
@ -425,9 +425,8 @@ std::unique_ptr<Value> buildValue(const Char* start, const Char* end, const Char
|
||||
double value = charactersToDouble(tokenStart, tokenEnd - tokenStart, &ok);
|
||||
if (!ok)
|
||||
return nullptr;
|
||||
int number = static_cast<int>(value);
|
||||
if (number == value)
|
||||
result = FundamentalValue::create(number);
|
||||
if (value >= INT_MIN && value <= INT_MAX && static_cast<int>(value) == value)
|
||||
result = FundamentalValue::create(static_cast<int>(value));
|
||||
else
|
||||
result = FundamentalValue::create(value);
|
||||
break;
|
||||
|
@ -7,6 +7,6 @@
|
||||
#include {{format_include(config.protocol.package, "Protocol")}}
|
||||
|
||||
#include <algorithm>
|
||||
#include <climits>
|
||||
#include <cmath>
|
||||
|
||||
#include <cstring>
|
||||
|
@ -6,7 +6,6 @@
|
||||
#define {{"_".join(config.protocol.namespace)}}_Values_h
|
||||
|
||||
//#include "Allocator.h"
|
||||
//#include "Collections.h"
|
||||
//#include "Forward.h"
|
||||
|
||||
{% for namespace in config.protocol.namespace %}
|
||||
@ -200,7 +199,7 @@ private:
|
||||
m_order.push_back(key);
|
||||
}
|
||||
|
||||
using Dictionary = protocol::HashMap<String, std::unique_ptr<Value>>;
|
||||
using Dictionary = std::unordered_map<String, std::unique_ptr<Value>>;
|
||||
Dictionary m_data;
|
||||
std::vector<String> m_order;
|
||||
};
|
||||
|
167
third_party/inspector_protocol/pdl.py
vendored
Normal file
167
third_party/inspector_protocol/pdl.py
vendored
Normal file
@ -0,0 +1,167 @@
|
||||
# Copyright 2018 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import collections
|
||||
import json
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
|
||||
description = ''
|
||||
|
||||
primitiveTypes = ['integer', 'number', 'boolean', 'string', 'object', 'any', 'array']
|
||||
|
||||
|
||||
def assignType(item, type, isArray=False):
|
||||
if isArray:
|
||||
item['type'] = 'array'
|
||||
item['items'] = collections.OrderedDict()
|
||||
assignType(item['items'], type)
|
||||
return
|
||||
|
||||
if type == 'enum':
|
||||
type = 'string'
|
||||
if type in primitiveTypes:
|
||||
item['type'] = type
|
||||
else:
|
||||
item['$ref'] = type
|
||||
|
||||
|
||||
def createItem(d, experimental, deprecated, name=None):
|
||||
result = collections.OrderedDict(d)
|
||||
if name:
|
||||
result['name'] = name
|
||||
global description
|
||||
if description:
|
||||
result['description'] = description.strip()
|
||||
if experimental:
|
||||
result['experimental'] = True
|
||||
if deprecated:
|
||||
result['deprecated'] = True
|
||||
return result
|
||||
|
||||
|
||||
def parse(data, file_name):
|
||||
protocol = collections.OrderedDict()
|
||||
protocol['version'] = collections.OrderedDict()
|
||||
protocol['domains'] = []
|
||||
domain = None
|
||||
item = None
|
||||
subitems = None
|
||||
nukeDescription = False
|
||||
global description
|
||||
lines = data.split('\n')
|
||||
for i in range(0, len(lines)):
|
||||
if nukeDescription:
|
||||
description = ''
|
||||
nukeDescription = False
|
||||
line = lines[i]
|
||||
trimLine = line.strip()
|
||||
|
||||
if trimLine.startswith('#'):
|
||||
if len(description):
|
||||
description += '\n'
|
||||
description += trimLine[2:]
|
||||
continue
|
||||
else:
|
||||
nukeDescription = True
|
||||
|
||||
if len(trimLine) == 0:
|
||||
continue
|
||||
|
||||
match = re.compile('^(experimental )?(deprecated )?domain (.*)').match(line)
|
||||
if match:
|
||||
domain = createItem({'domain' : match.group(3)}, match.group(1), match.group(2))
|
||||
protocol['domains'].append(domain)
|
||||
continue
|
||||
|
||||
match = re.compile('^ depends on ([^\s]+)').match(line)
|
||||
if match:
|
||||
if 'dependencies' not in domain:
|
||||
domain['dependencies'] = []
|
||||
domain['dependencies'].append(match.group(1))
|
||||
continue
|
||||
|
||||
match = re.compile('^ (experimental )?(deprecated )?type (.*) extends (array of )?([^\s]+)').match(line)
|
||||
if match:
|
||||
if 'types' not in domain:
|
||||
domain['types'] = []
|
||||
item = createItem({'id': match.group(3)}, match.group(1), match.group(2))
|
||||
assignType(item, match.group(5), match.group(4))
|
||||
domain['types'].append(item)
|
||||
continue
|
||||
|
||||
match = re.compile('^ (experimental )?(deprecated )?(command|event) (.*)').match(line)
|
||||
if match:
|
||||
list = []
|
||||
if match.group(3) == 'command':
|
||||
if 'commands' in domain:
|
||||
list = domain['commands']
|
||||
else:
|
||||
list = domain['commands'] = []
|
||||
else:
|
||||
if 'events' in domain:
|
||||
list = domain['events']
|
||||
else:
|
||||
list = domain['events'] = []
|
||||
|
||||
item = createItem({}, match.group(1), match.group(2), match.group(4))
|
||||
list.append(item)
|
||||
continue
|
||||
|
||||
match = re.compile('^ (experimental )?(deprecated )?(optional )?(array of )?([^\s]+) ([^\s]+)').match(line)
|
||||
if match:
|
||||
param = createItem({}, match.group(1), match.group(2), match.group(6))
|
||||
if match.group(3):
|
||||
param['optional'] = True
|
||||
assignType(param, match.group(5), match.group(4))
|
||||
if match.group(5) == 'enum':
|
||||
enumliterals = param['enum'] = []
|
||||
subitems.append(param)
|
||||
continue
|
||||
|
||||
match = re.compile('^ (parameters|returns|properties)').match(line)
|
||||
if match:
|
||||
subitems = item[match.group(1)] = []
|
||||
continue
|
||||
|
||||
match = re.compile('^ enum').match(line)
|
||||
if match:
|
||||
enumliterals = item['enum'] = []
|
||||
continue
|
||||
|
||||
match = re.compile('^version').match(line)
|
||||
if match:
|
||||
continue
|
||||
|
||||
match = re.compile('^ major (\d+)').match(line)
|
||||
if match:
|
||||
protocol['version']['major'] = match.group(1)
|
||||
continue
|
||||
|
||||
match = re.compile('^ minor (\d+)').match(line)
|
||||
if match:
|
||||
protocol['version']['minor'] = match.group(1)
|
||||
continue
|
||||
|
||||
match = re.compile('^ redirect ([^\s]+)').match(line)
|
||||
if match:
|
||||
item['redirect'] = match.group(1)
|
||||
continue
|
||||
|
||||
match = re.compile('^ ( )?[^\s]+$').match(line)
|
||||
if match:
|
||||
# enum literal
|
||||
enumliterals.append(trimLine)
|
||||
continue
|
||||
|
||||
print 'Error in %s:%s, illegal token: \t%s' % (file_name, i, line)
|
||||
sys.exit(1)
|
||||
return protocol
|
||||
|
||||
|
||||
def loads(data, file_name):
|
||||
if file_name.endswith(".pdl"):
|
||||
return parse(data, file_name)
|
||||
return json.loads(data)
|
@ -4,7 +4,7 @@
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include {{format_include(config.protocol.package, domain.domain)}}
|
||||
#include {{format_domain_include(config.protocol.package, domain.domain)}}
|
||||
|
||||
#include {{format_include(config.protocol.package, "Protocol")}}
|
||||
|
||||
@ -24,7 +24,7 @@ const char Metainfo::version[] = "{{domain.version}}";
|
||||
|
||||
namespace {{type.id}}Enum {
|
||||
{% for literal in type.enum %}
|
||||
const char* {{ literal | dash_to_camelcase}} = "{{literal}}";
|
||||
const char {{ literal | dash_to_camelcase}}[] = "{{literal}}";
|
||||
{% endfor %}
|
||||
} // namespace {{type.id}}Enum
|
||||
{% if protocol.is_exported(domain.domain, type.id) %}
|
||||
@ -211,13 +211,13 @@ public:
|
||||
}
|
||||
~DispatcherImpl() override { }
|
||||
DispatchResponse::Status dispatch(int callId, const String& method, std::unique_ptr<protocol::DictionaryValue> messageObject) override;
|
||||
HashMap<String, String>& redirects() { return m_redirects; }
|
||||
std::unordered_map<String, String>& redirects() { return m_redirects; }
|
||||
|
||||
protected:
|
||||
using CallHandler = DispatchResponse::Status (DispatcherImpl::*)(int callId, std::unique_ptr<DictionaryValue> messageObject, ErrorSupport* errors);
|
||||
using DispatchMap = protocol::HashMap<String, CallHandler>;
|
||||
using DispatchMap = std::unordered_map<String, CallHandler>;
|
||||
DispatchMap m_dispatchMap;
|
||||
HashMap<String, String> m_redirects;
|
||||
std::unordered_map<String, String> m_redirects;
|
||||
|
||||
{% for command in domain.commands %}
|
||||
{% if "redirect" in command %}{% continue %}{% endif %}
|
||||
@ -231,7 +231,7 @@ protected:
|
||||
|
||||
DispatchResponse::Status DispatcherImpl::dispatch(int callId, const String& method, std::unique_ptr<protocol::DictionaryValue> messageObject)
|
||||
{
|
||||
protocol::HashMap<String, CallHandler>::iterator it = m_dispatchMap.find(method);
|
||||
std::unordered_map<String, CallHandler>::iterator it = m_dispatchMap.find(method);
|
||||
if (it == m_dispatchMap.end()) {
|
||||
if (m_fallThroughForNotFound)
|
||||
return DispatchResponse::kFallThrough;
|
||||
|
@ -15,7 +15,7 @@
|
||||
// and include Domain::API version from there.
|
||||
{% for name in domain.dependencies %}
|
||||
{% if protocol.is_imported_dependency(name) %}
|
||||
#include {{format_include(config.protocol.package, name)}}
|
||||
#include {{format_domain_include(config.protocol.package, name)}}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% if protocol.is_exported_domain(domain.domain) %}
|
||||
@ -46,7 +46,7 @@ using {{type.id}} = {{protocol.resolve_type(type).type}};
|
||||
|
||||
namespace {{type.id}}Enum {
|
||||
{% for literal in type.enum %}
|
||||
{{config.protocol.export_macro}} extern const char* {{ literal | dash_to_camelcase}};
|
||||
{{config.protocol.export_macro}} extern const char {{ literal | dash_to_camelcase}}[];
|
||||
{% endfor %}
|
||||
} // namespace {{type.id}}Enum
|
||||
{% endif %}
|
||||
|
Loading…
Reference in New Issue
Block a user