2013-10-18 18:33:56 +02:00
|
|
|
# Copyright (c) 2013 The Chromium Embedded Framework Authors. All rights
|
|
|
|
# reserved. Use of this source code is governed by a BSD-style license that
|
|
|
|
# can be found in the LICENSE file.
|
|
|
|
|
2020-01-09 21:22:11 +01:00
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import print_function
|
2013-10-18 18:33:56 +02:00
|
|
|
from file_util import *
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import string
|
|
|
|
import sys
|
|
|
|
import textwrap
|
|
|
|
import time
|
|
|
|
import itertools
|
|
|
|
import hashlib
|
|
|
|
|
2020-08-07 00:33:06 +02:00
|
|
|
# Determines string type for python 2 and python 3.
|
|
|
|
if sys.version_info[0] == 3:
|
|
|
|
string_type = str
|
|
|
|
else:
|
|
|
|
string_type = basestring
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
class cef_api_hash:
|
2017-05-28 15:04:18 +02:00
|
|
|
""" CEF API hash calculator """
|
|
|
|
|
|
|
|
def __init__(self, headerdir, debugdir=None, verbose=False):
|
|
|
|
if headerdir is None or len(headerdir) == 0:
|
|
|
|
raise AssertionError("headerdir is not specified")
|
|
|
|
|
|
|
|
self.__headerdir = headerdir
|
|
|
|
self.__debugdir = debugdir
|
|
|
|
self.__verbose = verbose
|
|
|
|
self.__debug_enabled = not (self.__debugdir is
|
|
|
|
None) and len(self.__debugdir) > 0
|
|
|
|
|
2020-08-29 00:39:23 +02:00
|
|
|
self.platforms = ["windows", "mac", "linux"]
|
2017-05-28 15:04:18 +02:00
|
|
|
|
|
|
|
self.platform_files = {
|
2020-04-30 21:59:23 +02:00
|
|
|
# List of includes_win_capi from cef_paths2.gypi.
|
2017-05-28 15:04:18 +02:00
|
|
|
"windows": [
|
|
|
|
"internal/cef_types_win.h",
|
|
|
|
],
|
2020-04-30 21:59:23 +02:00
|
|
|
# List of includes_mac_capi from cef_paths2.gypi.
|
2020-08-29 00:39:23 +02:00
|
|
|
"mac": [
|
2017-05-28 15:04:18 +02:00
|
|
|
"internal/cef_types_mac.h",
|
|
|
|
],
|
2020-04-30 21:59:23 +02:00
|
|
|
# List of includes_linux_capi from cef_paths2.gypi.
|
2017-05-28 15:04:18 +02:00
|
|
|
"linux": [
|
|
|
|
"internal/cef_types_linux.h",
|
|
|
|
]
|
|
|
|
}
|
|
|
|
|
|
|
|
self.included_files = []
|
|
|
|
|
2020-04-30 21:59:23 +02:00
|
|
|
# List of include/ and include/internal/ files from cef_paths2.gypi.
|
2017-05-28 15:04:18 +02:00
|
|
|
self.excluded_files = [
|
2020-04-30 21:59:23 +02:00
|
|
|
# includes_common
|
2019-03-14 16:18:06 +01:00
|
|
|
"cef_api_hash.h",
|
2020-04-30 21:59:23 +02:00
|
|
|
"cef_base.h",
|
|
|
|
"cef_config.h",
|
2017-05-28 15:04:18 +02:00
|
|
|
"cef_version.h",
|
2020-04-30 21:59:23 +02:00
|
|
|
"internal/cef_export.h",
|
|
|
|
"internal/cef_ptr.h",
|
2017-05-28 15:04:18 +02:00
|
|
|
"internal/cef_string_wrappers.h",
|
2020-04-30 21:59:23 +02:00
|
|
|
"internal/cef_types_wrappers.h",
|
|
|
|
# includes_win
|
|
|
|
"cef_sandbox_win.h",
|
2017-05-28 15:04:18 +02:00
|
|
|
"internal/cef_win.h",
|
2020-04-30 21:59:23 +02:00
|
|
|
# includes_mac
|
|
|
|
"cef_application_mac.h",
|
|
|
|
"cef_sandbox_mac.h",
|
2017-05-28 15:04:18 +02:00
|
|
|
"internal/cef_mac.h",
|
2020-04-30 21:59:23 +02:00
|
|
|
# includes_linux
|
2017-05-28 15:04:18 +02:00
|
|
|
"internal/cef_linux.h",
|
|
|
|
]
|
|
|
|
|
|
|
|
def calculate(self):
|
|
|
|
filenames = [
|
|
|
|
filename for filename in self.__get_filenames()
|
|
|
|
if not filename in self.excluded_files
|
|
|
|
]
|
|
|
|
|
|
|
|
objects = []
|
|
|
|
for filename in filenames:
|
|
|
|
if self.__verbose:
|
2020-01-09 21:22:11 +01:00
|
|
|
print("Processing " + filename + "...")
|
2017-05-28 15:04:18 +02:00
|
|
|
content = read_file(os.path.join(self.__headerdir, filename), True)
|
|
|
|
platforms = list([
|
|
|
|
p for p in self.platforms if self.__is_platform_filename(filename, p)
|
|
|
|
])
|
|
|
|
|
|
|
|
# Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration
|
|
|
|
content_objects = None
|
|
|
|
if filename == "internal/cef_string.h":
|
|
|
|
content_objects = self.__parse_string_type(content)
|
|
|
|
else:
|
|
|
|
content_objects = self.__parse_objects(content)
|
|
|
|
|
|
|
|
for o in content_objects:
|
|
|
|
o["text"] = self.__prepare_text(o["text"])
|
|
|
|
o["platforms"] = platforms
|
|
|
|
o["filename"] = filename
|
|
|
|
objects.append(o)
|
|
|
|
|
|
|
|
# objects will be sorted including filename, to make stable universal hashes
|
|
|
|
objects = sorted(objects, key=lambda o: o["name"] + "@" + o["filename"])
|
|
|
|
|
|
|
|
if self.__debug_enabled:
|
|
|
|
namelen = max([len(o["name"]) for o in objects])
|
|
|
|
filenamelen = max([len(o["filename"]) for o in objects])
|
|
|
|
dumpsig = []
|
|
|
|
for o in objects:
|
|
|
|
dumpsig.append(
|
|
|
|
format(o["name"], str(namelen) + "s") + "|" + format(
|
|
|
|
o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"])
|
|
|
|
self.__write_debug_file("objects.txt", dumpsig)
|
|
|
|
|
|
|
|
revisions = {}
|
|
|
|
|
|
|
|
for platform in itertools.chain(["universal"], self.platforms):
|
|
|
|
sig = self.__get_final_sig(objects, platform)
|
|
|
|
if self.__debug_enabled:
|
|
|
|
self.__write_debug_file(platform + ".sig", sig)
|
2020-01-09 21:22:11 +01:00
|
|
|
revstr = hashlib.sha1(sig.encode('utf-8')).hexdigest()
|
2017-05-28 15:04:18 +02:00
|
|
|
revisions[platform] = revstr
|
|
|
|
|
|
|
|
return revisions
|
|
|
|
|
|
|
|
def __parse_objects(self, content):
|
|
|
|
""" Returns array of objects in content file. """
|
|
|
|
objects = []
|
|
|
|
content = re.sub("//.*\n", "", content)
|
|
|
|
|
|
|
|
# function declarations
|
|
|
|
for m in re.finditer(
|
|
|
|
"\nCEF_EXPORT\s+?.*?\s+?(\w+)\s*?\(.*?\)\s*?;",
|
|
|
|
content,
|
|
|
|
flags=re.DOTALL):
|
|
|
|
object = {"name": m.group(1), "text": m.group(0).strip()}
|
|
|
|
objects.append(object)
|
|
|
|
|
|
|
|
# structs
|
|
|
|
for m in re.finditer(
|
|
|
|
"\ntypedef\s+?struct\s+?(\w+)\s+?\{.*?\}\s+?(\w+)\s*?;",
|
|
|
|
content,
|
|
|
|
flags=re.DOTALL):
|
|
|
|
object = {"name": m.group(2), "text": m.group(0).strip()}
|
|
|
|
objects.append(object)
|
|
|
|
|
|
|
|
# enums
|
|
|
|
for m in re.finditer(
|
2020-08-07 00:33:06 +02:00
|
|
|
"\ntypedef\s+?enum\s+?\{.*?\}\s+?(\w+)\s*?;", content, flags=re.DOTALL):
|
2017-05-28 15:04:18 +02:00
|
|
|
object = {"name": m.group(1), "text": m.group(0).strip()}
|
|
|
|
objects.append(object)
|
|
|
|
|
|
|
|
# typedefs
|
|
|
|
for m in re.finditer("\ntypedef\s+?.*?\s+(\w+);", content, flags=0):
|
|
|
|
object = {"name": m.group(1), "text": m.group(0).strip()}
|
|
|
|
objects.append(object)
|
|
|
|
|
|
|
|
return objects
|
|
|
|
|
|
|
|
def __parse_string_type(self, content):
|
|
|
|
""" Grab defined CEF_STRING_TYPE_xxx """
|
|
|
|
objects = []
|
|
|
|
for m in re.finditer(
|
|
|
|
"\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content,
|
|
|
|
flags=0):
|
|
|
|
object = {
|
|
|
|
"name": m.group(1),
|
|
|
|
"text": m.group(0),
|
|
|
|
}
|
|
|
|
objects.append(object)
|
|
|
|
return objects
|
|
|
|
|
|
|
|
def __prepare_text(self, text):
|
|
|
|
text = text.strip()
|
|
|
|
text = re.sub("\s+", " ", text)
|
|
|
|
text = re.sub("\(\s+", "(", text)
|
|
|
|
return text
|
|
|
|
|
|
|
|
def __get_final_sig(self, objects, platform):
|
|
|
|
sig = []
|
|
|
|
|
|
|
|
for o in objects:
|
|
|
|
if platform == "universal" or platform in o["platforms"]:
|
|
|
|
sig.append(o["text"])
|
|
|
|
|
|
|
|
return "\n".join(sig)
|
|
|
|
|
|
|
|
def __get_filenames(self):
|
|
|
|
""" Returns file names to be processed, relative to headerdir """
|
|
|
|
headers = [
|
|
|
|
os.path.join(self.__headerdir, filename)
|
|
|
|
for filename in self.included_files
|
|
|
|
]
|
2020-09-02 20:42:30 +02:00
|
|
|
|
|
|
|
capi_dir = os.path.join(self.__headerdir, "capi")
|
|
|
|
headers = itertools.chain(headers, get_files(os.path.join(capi_dir, "*.h")))
|
|
|
|
|
|
|
|
# Also include capi sub-directories.
|
|
|
|
for root, dirs, files in os.walk(capi_dir):
|
|
|
|
for name in dirs:
|
|
|
|
headers = itertools.chain(headers,
|
|
|
|
get_files(os.path.join(root, name, "*.h")))
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
headers = itertools.chain(
|
|
|
|
headers, get_files(os.path.join(self.__headerdir, "internal", "*.h")))
|
|
|
|
|
|
|
|
for v in self.platform_files.values():
|
|
|
|
headers = itertools.chain(headers,
|
|
|
|
[os.path.join(self.__headerdir, f) for f in v])
|
|
|
|
|
|
|
|
normalized = [
|
|
|
|
os.path.relpath(filename, self.__headerdir) for filename in headers
|
|
|
|
]
|
|
|
|
normalized = [f.replace('\\', '/').lower() for f in normalized]
|
|
|
|
|
|
|
|
return list(set(normalized))
|
|
|
|
|
|
|
|
def __is_platform_filename(self, filename, platform):
|
|
|
|
if platform == "universal":
|
|
|
|
return True
|
|
|
|
if not platform in self.platform_files:
|
|
|
|
return False
|
|
|
|
listed = False
|
|
|
|
for p in self.platforms:
|
|
|
|
if filename in self.platform_files[p]:
|
|
|
|
if p == platform:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
listed = True
|
|
|
|
return not listed
|
|
|
|
|
|
|
|
def __write_debug_file(self, filename, content):
|
|
|
|
make_dir(self.__debugdir)
|
|
|
|
outfile = os.path.join(self.__debugdir, filename)
|
|
|
|
dir = os.path.dirname(outfile)
|
|
|
|
make_dir(dir)
|
2020-08-07 00:33:06 +02:00
|
|
|
if not isinstance(content, string_type):
|
2017-05-28 15:04:18 +02:00
|
|
|
content = "\n".join(content)
|
|
|
|
write_file(outfile, content)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2017-05-28 15:04:18 +02:00
|
|
|
from optparse import OptionParser
|
|
|
|
import time
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
disc = """
|
2013-10-18 18:33:56 +02:00
|
|
|
This utility calculates CEF API hash.
|
|
|
|
"""
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
parser = OptionParser(description=disc)
|
|
|
|
parser.add_option(
|
|
|
|
'--cpp-header-dir',
|
|
|
|
dest='cppheaderdir',
|
|
|
|
metavar='DIR',
|
|
|
|
help='input directory for C++ header files [required]')
|
|
|
|
parser.add_option(
|
|
|
|
'--debug-dir',
|
|
|
|
dest='debugdir',
|
|
|
|
metavar='DIR',
|
|
|
|
help='intermediate directory for easy debugging')
|
|
|
|
parser.add_option(
|
|
|
|
'-v',
|
|
|
|
'--verbose',
|
|
|
|
action='store_true',
|
|
|
|
dest='verbose',
|
|
|
|
default=False,
|
|
|
|
help='output detailed status information')
|
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
|
|
|
|
# the cppheader option is required
|
|
|
|
if options.cppheaderdir is None:
|
|
|
|
parser.print_help(sys.stdout)
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
# calculate
|
|
|
|
c_start_time = time.time()
|
|
|
|
|
|
|
|
calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose)
|
|
|
|
revisions = calc.calculate()
|
|
|
|
|
|
|
|
c_completed_in = time.time() - c_start_time
|
|
|
|
|
2020-01-09 21:22:11 +01:00
|
|
|
print("{")
|
2017-05-28 15:04:18 +02:00
|
|
|
for k in sorted(revisions.keys()):
|
2020-01-09 21:22:11 +01:00
|
|
|
print(format("\"" + k + "\"", ">12s") + ": \"" + revisions[k] + "\"")
|
|
|
|
print("}")
|
2017-05-28 15:04:18 +02:00
|
|
|
# print
|
|
|
|
# print 'Completed in: ' + str(c_completed_in)
|
|
|
|
# print
|
|
|
|
|
|
|
|
# print "Press any key to continue...";
|
|
|
|
# sys.stdin.readline();
|