mirror of
https://bitbucket.org/chromiumembedded/cef
synced 2025-06-05 21:39:12 +02:00
Add initial support for API versioning (see #3836)
- Generated files are now created when running cef_create_projects or the new version_manager.py tool. These files are still created in the cef/ source tree (same location as before) but Git ignores them due to the generated .gitignore file. - API hashes are committed to Git as a new cef_api_versions.json file. This file is used for both code generation and CEF version calculation (replacing the previous usage of cef_api_hash.h for this purpose). It will be updated by the CEF admin before merging breaking API changes upstream. - As an added benefit to the above, contributor PRs will no longer contain generated code that is susceptible to frequent merge conflicts. - From a code generation perspective, the main difference is that we now use versioned structs (e.g. cef_browser_0_t instead of cef_browser_t) on the libcef (dll/framework) side. Most of the make_*.py tool changes are related to supporting this. - From the client perspective, you can now define CEF_API_VERSION in the project configuration (or get CEF_EXPERIMENTAL by default). This define will change the API exposed in CEF’s include/ and include/capi header files. All client-side targets including libcef_dll_wrapper will need be recompiled when changing this define. - Examples of the new API-related define usage are provided in cef_api_version_test.h, api_version_test_impl.cc and api_version_unittest.cc. To test: - Run `ceftests --gtest_filter=ApiVersionTest.*` - Add `cef_api_version=13300` to GN_DEFINES. Re-run configure, build and ceftests steps. - Repeat with 13301, 13302, 13303 (all supported test versions).
This commit is contained in:
@ -4,16 +4,16 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
from clang_util import clang_eval
|
||||
from file_util import *
|
||||
import hashlib
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import string
|
||||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
import itertools
|
||||
import hashlib
|
||||
from version_util import EXP_VERSION
|
||||
|
||||
# Determines string type for python 2 and python 3.
|
||||
if sys.version_info[0] == 3:
|
||||
@ -22,93 +22,172 @@ else:
|
||||
string_type = basestring
|
||||
|
||||
|
||||
def _run_clang_eval(filename, content, api_version, added_defines, verbose):
|
||||
# Add a tag so we know where the header-specific output begins.
|
||||
tag = 'int begin_includes_tag;\n'
|
||||
find = '#ifdef __cplusplus\nextern "C" {'
|
||||
pos = content.find(find)
|
||||
assert pos > 0, filename
|
||||
content = content[0:pos] + tag + content[pos:]
|
||||
|
||||
defines = [
|
||||
# Makes sure CEF_EXPORT is defined.
|
||||
'USING_CEF_SHARED',
|
||||
|
||||
# Avoid include of generated headers.
|
||||
'GENERATING_CEF_API_HASH',
|
||||
]
|
||||
|
||||
if filename.find('test/') >= 0:
|
||||
# Avoids errors parsing test includes.
|
||||
defines.append('UNIT_TEST')
|
||||
|
||||
# Not the experimental version.
|
||||
api_version = int(api_version)
|
||||
if api_version != EXP_VERSION:
|
||||
# Specify the exact version.
|
||||
defines.append('CEF_API_VERSION=%d' % api_version)
|
||||
|
||||
if not added_defines is None:
|
||||
defines.extend(added_defines)
|
||||
|
||||
includes = [
|
||||
# Includes relative to the 'src/cef' directory.
|
||||
'.',
|
||||
# Includes relative to the 'src' directory.
|
||||
'..',
|
||||
]
|
||||
|
||||
result = clang_eval(
|
||||
filename,
|
||||
content,
|
||||
defines=defines,
|
||||
includes=includes,
|
||||
as_cpp=False,
|
||||
verbose=verbose)
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
pos = result.find(tag)
|
||||
assert pos > 0, filename
|
||||
result = result[pos + len(tag):]
|
||||
|
||||
replacements = [
|
||||
# Undo substitutions from cef_export.h
|
||||
['__declspec(dllimport)', 'CEF_EXPORT'],
|
||||
['__attribute__((visibility("default")))', 'CEF_EXPORT'],
|
||||
['__stdcall', ''],
|
||||
]
|
||||
|
||||
for find, replace in replacements:
|
||||
result = result.replace(find, replace)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class cef_api_hash:
|
||||
""" CEF API hash calculator """
|
||||
|
||||
def __init__(self, headerdir, debugdir=None, verbose=False):
|
||||
def __init__(self, headerdir, verbose=False):
|
||||
if headerdir is None or len(headerdir) == 0:
|
||||
raise AssertionError("headerdir is not specified")
|
||||
|
||||
self.__headerdir = headerdir
|
||||
self.__debugdir = debugdir
|
||||
self.__verbose = verbose
|
||||
self.__debug_enabled = not (self.__debugdir is
|
||||
None) and len(self.__debugdir) > 0
|
||||
|
||||
self.platforms = ["windows", "mac", "linux"]
|
||||
|
||||
cef_dir = os.path.abspath(os.path.join(self.__headerdir, os.pardir))
|
||||
|
||||
# Read the variables list from the autogenerated cef_paths.gypi file.
|
||||
cef_paths = eval_file(os.path.join(cef_dir, 'cef_paths.gypi'))
|
||||
cef_paths = cef_paths['variables']
|
||||
|
||||
# Read the variables list from the manually edited cef_paths2.gypi file.
|
||||
cef_paths2 = eval_file(os.path.join(cef_dir, 'cef_paths2.gypi'))
|
||||
cef_paths2 = cef_paths2['variables']
|
||||
|
||||
# Excluded files (paths relative to the include/ directory).
|
||||
excluded_files = []
|
||||
|
||||
# List of platform-specific C API include/ files.
|
||||
self.platform_files = {
|
||||
# List of includes_win_capi from cef_paths2.gypi.
|
||||
"windows": [
|
||||
"internal/cef_app_win.h",
|
||||
"internal/cef_types_win.h",
|
||||
],
|
||||
# List of includes_mac_capi from cef_paths2.gypi.
|
||||
"mac": [
|
||||
"internal/cef_types_mac.h",
|
||||
],
|
||||
# List of includes_linux_capi from cef_paths2.gypi.
|
||||
"linux": [
|
||||
"internal/cef_types_linux.h",
|
||||
]
|
||||
"windows":
|
||||
self.__get_filenames(cef_dir, cef_paths2['includes_win_capi'],
|
||||
excluded_files),
|
||||
"mac":
|
||||
self.__get_filenames(cef_dir, cef_paths2['includes_mac_capi'],
|
||||
excluded_files),
|
||||
"linux":
|
||||
self.__get_filenames(cef_dir, cef_paths2['includes_linux_capi'],
|
||||
excluded_files)
|
||||
}
|
||||
|
||||
self.included_files = []
|
||||
# List of all C API include/ files.
|
||||
paths = cef_paths2['includes_capi'] + cef_paths2['includes_common_capi'] + \
|
||||
cef_paths2['includes_linux_capi'] + cef_paths2['includes_mac_capi'] + \
|
||||
cef_paths2['includes_win_capi'] + cef_paths['autogen_capi_includes']
|
||||
self.filenames = self.__get_filenames(cef_dir, paths, excluded_files)
|
||||
|
||||
# List of include/ and include/internal/ files from cef_paths2.gypi.
|
||||
self.excluded_files = [
|
||||
# includes_common
|
||||
"cef_api_hash.h",
|
||||
"cef_base.h",
|
||||
"cef_version.h",
|
||||
"internal/cef_export.h",
|
||||
"internal/cef_ptr.h",
|
||||
"internal/cef_string_wrappers.h",
|
||||
"internal/cef_time_wrappers.h",
|
||||
"internal/cef_types_wrappers.h",
|
||||
# includes_win
|
||||
"cef_sandbox_win.h",
|
||||
"internal/cef_win.h",
|
||||
# includes_mac
|
||||
"cef_application_mac.h",
|
||||
"cef_sandbox_mac.h",
|
||||
"internal/cef_mac.h",
|
||||
# includes_linux
|
||||
"internal/cef_linux.h",
|
||||
]
|
||||
self.filecontents = {}
|
||||
self.filecontentobjs = {}
|
||||
|
||||
def calculate(self):
|
||||
filenames = [
|
||||
filename for filename in self.__get_filenames()
|
||||
if not filename in self.excluded_files
|
||||
]
|
||||
|
||||
objects = []
|
||||
for filename in filenames:
|
||||
# Cache values that will not change between calls to calculate().
|
||||
for filename in self.filenames:
|
||||
if self.__verbose:
|
||||
print("Processing " + filename + "...")
|
||||
|
||||
assert not filename in self.filecontents, filename
|
||||
assert not filename in self.filecontentobjs, filename
|
||||
|
||||
content = read_file(os.path.join(self.__headerdir, filename), True)
|
||||
platforms = list([
|
||||
p for p in self.platforms if self.__is_platform_filename(filename, p)
|
||||
])
|
||||
content_objects = None
|
||||
|
||||
# Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration
|
||||
content_objects = None
|
||||
if filename == "internal/cef_string.h":
|
||||
content_objects = self.__parse_string_type(content)
|
||||
elif content.find('#if CEF_API') >= 0:
|
||||
# Needs to be passed to clang with version-specific defines.
|
||||
self.filecontents[filename] = content
|
||||
else:
|
||||
content_objects = self.__parse_objects(content)
|
||||
|
||||
for o in content_objects:
|
||||
o["text"] = self.__prepare_text(o["text"])
|
||||
o["platforms"] = platforms
|
||||
o["filename"] = filename
|
||||
objects.append(o)
|
||||
if not content_objects is None:
|
||||
self.__prepare_objects(filename, content_objects)
|
||||
self.filecontentobjs[filename] = content_objects
|
||||
|
||||
def calculate(self, api_version, debug_dir=None, added_defines=None):
|
||||
debug_enabled = not (debug_dir is None) and len(debug_dir) > 0
|
||||
|
||||
objects = []
|
||||
for filename in self.filenames:
|
||||
if self.__verbose:
|
||||
print("Processing " + filename + "...")
|
||||
|
||||
content = self.filecontents.get(filename, None)
|
||||
if not content is None:
|
||||
assert content.find('#if CEF_API') >= 0, filename
|
||||
content = _run_clang_eval(filename, content, api_version, added_defines,
|
||||
self.__verbose)
|
||||
if content is None:
|
||||
sys.stderr.write(
|
||||
'ERROR: Failed to compute API hash for %s\n' % filename)
|
||||
return False
|
||||
if debug_enabled:
|
||||
self.__write_debug_file(
|
||||
debug_dir, 'clang-' + filename.replace('/', '-'), content)
|
||||
content_objects = self.__parse_objects(content)
|
||||
self.__prepare_objects(filename, content_objects)
|
||||
else:
|
||||
content_objects = self.filecontentobjs.get(filename, None)
|
||||
|
||||
assert not content_objects is None, filename
|
||||
objects.extend(content_objects)
|
||||
|
||||
# objects will be sorted including filename, to make stable universal hashes
|
||||
objects = sorted(objects, key=lambda o: o["name"] + "@" + o["filename"])
|
||||
|
||||
if self.__debug_enabled:
|
||||
if debug_enabled:
|
||||
namelen = max([len(o["name"]) for o in objects])
|
||||
filenamelen = max([len(o["filename"]) for o in objects])
|
||||
dumpsig = []
|
||||
@ -116,14 +195,14 @@ class cef_api_hash:
|
||||
dumpsig.append(
|
||||
format(o["name"], str(namelen) + "s") + "|" + format(
|
||||
o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"])
|
||||
self.__write_debug_file("objects.txt", dumpsig)
|
||||
self.__write_debug_file(debug_dir, "objects.txt", dumpsig)
|
||||
|
||||
revisions = {}
|
||||
|
||||
for platform in itertools.chain(["universal"], self.platforms):
|
||||
sig = self.__get_final_sig(objects, platform)
|
||||
if self.__debug_enabled:
|
||||
self.__write_debug_file(platform + ".sig", sig)
|
||||
if debug_enabled:
|
||||
self.__write_debug_file(debug_dir, platform + ".sig", sig)
|
||||
revstr = hashlib.sha1(sig.encode('utf-8')).hexdigest()
|
||||
revisions[platform] = revstr
|
||||
|
||||
@ -152,7 +231,8 @@ class cef_api_hash:
|
||||
|
||||
# enums
|
||||
for m in re.finditer(
|
||||
r"\ntypedef\s+?enum\s+?\{.*?\}\s+?(\w+)\s*?;", content, flags=re.DOTALL):
|
||||
r"\ntypedef\s+?enum\s+?\{.*?\}\s+?(\w+)\s*?;", content,
|
||||
flags=re.DOTALL):
|
||||
object = {"name": m.group(1), "text": m.group(0).strip()}
|
||||
objects.append(object)
|
||||
|
||||
@ -163,11 +243,20 @@ class cef_api_hash:
|
||||
|
||||
return objects
|
||||
|
||||
def __prepare_objects(self, filename, objects):
|
||||
platforms = list(
|
||||
[p for p in self.platforms if self.__is_platform_filename(filename, p)])
|
||||
for o in objects:
|
||||
o["text"] = self.__prepare_text(o["text"])
|
||||
o["platforms"] = platforms
|
||||
o["filename"] = filename
|
||||
|
||||
def __parse_string_type(self, content):
|
||||
""" Grab defined CEF_STRING_TYPE_xxx """
|
||||
objects = []
|
||||
for m in re.finditer(
|
||||
r"\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content,
|
||||
r"\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n",
|
||||
content,
|
||||
flags=0):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
@ -191,35 +280,20 @@ class cef_api_hash:
|
||||
|
||||
return "\n".join(sig)
|
||||
|
||||
def __get_filenames(self):
|
||||
def __get_filenames(self, cef_dir, paths, excluded_files):
|
||||
""" Returns file names to be processed, relative to headerdir """
|
||||
headers = [
|
||||
os.path.join(self.__headerdir, filename)
|
||||
for filename in self.included_files
|
||||
filenames = [
|
||||
os.path.relpath(os.path.join(cef_dir, filename),
|
||||
self.__headerdir).replace('\\', '/').lower()
|
||||
for filename in paths
|
||||
]
|
||||
|
||||
capi_dir = os.path.join(self.__headerdir, "capi")
|
||||
headers = itertools.chain(headers, get_files(os.path.join(capi_dir, "*.h")))
|
||||
if len(excluded_files) == 0:
|
||||
return filenames
|
||||
|
||||
# Also include capi sub-directories.
|
||||
for root, dirs, files in os.walk(capi_dir):
|
||||
for name in dirs:
|
||||
headers = itertools.chain(headers,
|
||||
get_files(os.path.join(root, name, "*.h")))
|
||||
|
||||
headers = itertools.chain(
|
||||
headers, get_files(os.path.join(self.__headerdir, "internal", "*.h")))
|
||||
|
||||
for v in self.platform_files.values():
|
||||
headers = itertools.chain(headers,
|
||||
[os.path.join(self.__headerdir, f) for f in v])
|
||||
|
||||
normalized = [
|
||||
os.path.relpath(filename, self.__headerdir) for filename in headers
|
||||
return [
|
||||
filename for filename in filenames if not filename in excluded_files
|
||||
]
|
||||
normalized = [f.replace('\\', '/').lower() for f in normalized]
|
||||
|
||||
return list(set(normalized))
|
||||
|
||||
def __is_platform_filename(self, filename, platform):
|
||||
if platform == "universal":
|
||||
@ -235,9 +309,9 @@ class cef_api_hash:
|
||||
listed = True
|
||||
return not listed
|
||||
|
||||
def __write_debug_file(self, filename, content):
|
||||
make_dir(self.__debugdir)
|
||||
outfile = os.path.join(self.__debugdir, filename)
|
||||
def __write_debug_file(self, debug_dir, filename, content):
|
||||
make_dir(debug_dir)
|
||||
outfile = os.path.join(debug_dir, filename)
|
||||
dir = os.path.dirname(outfile)
|
||||
make_dir(dir)
|
||||
if not isinstance(content, string_type):
|
||||
@ -282,14 +356,16 @@ if __name__ == "__main__":
|
||||
c_start_time = time.time()
|
||||
|
||||
calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose)
|
||||
revisions = calc.calculate()
|
||||
revisions = calc.calculate(api_version=EXP_VERSION)
|
||||
|
||||
c_completed_in = time.time() - c_start_time
|
||||
|
||||
print("{")
|
||||
for k in sorted(revisions.keys()):
|
||||
print(format("\"" + k + "\"", ">12s") + ": \"" + revisions[k] + "\"")
|
||||
print("}")
|
||||
if bool(revisions):
|
||||
print("{")
|
||||
for k in sorted(revisions.keys()):
|
||||
print(format("\"" + k + "\"", ">12s") + ": \"" + revisions[k] + "\"")
|
||||
print("}")
|
||||
|
||||
# print
|
||||
# print 'Completed in: ' + str(c_completed_in)
|
||||
# print
|
||||
|
Reference in New Issue
Block a user