mirror of
https://bitbucket.org/chromiumembedded/cef
synced 2025-06-05 21:39:12 +02:00
Apply yapf formatting to all Python files (issue #2171)
This commit is contained in:
@ -21,7 +21,6 @@ depot_tools_archive_url = 'https://storage.googleapis.com/chrome-infra/depot_too
|
|||||||
|
|
||||||
cef_git_url = 'https://bitbucket.org/chromiumembedded/cef.git'
|
cef_git_url = 'https://bitbucket.org/chromiumembedded/cef.git'
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Global system variables.
|
# Global system variables.
|
||||||
##
|
##
|
||||||
@ -29,15 +28,16 @@ cef_git_url = 'https://bitbucket.org/chromiumembedded/cef.git'
|
|||||||
# Script directory.
|
# Script directory.
|
||||||
script_dir = os.path.dirname(__file__)
|
script_dir = os.path.dirname(__file__)
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Helper functions.
|
# Helper functions.
|
||||||
##
|
##
|
||||||
|
|
||||||
|
|
||||||
def msg(message):
|
def msg(message):
|
||||||
""" Output a message. """
|
""" Output a message. """
|
||||||
sys.stdout.write('--> ' + message + "\n")
|
sys.stdout.write('--> ' + message + "\n")
|
||||||
|
|
||||||
|
|
||||||
def run(command_line, working_dir, depot_tools_dir=None, output_file=None):
|
def run(command_line, working_dir, depot_tools_dir=None, output_file=None):
|
||||||
""" Runs the specified command. """
|
""" Runs the specified command. """
|
||||||
# add depot_tools to the path
|
# add depot_tools to the path
|
||||||
@ -51,27 +51,34 @@ def run(command_line, working_dir, depot_tools_dir=None, output_file=None):
|
|||||||
args = shlex.split(command_line.replace('\\', '\\\\'))
|
args = shlex.split(command_line.replace('\\', '\\\\'))
|
||||||
|
|
||||||
if not output_file:
|
if not output_file:
|
||||||
return subprocess.check_call(args, cwd=working_dir, env=env,
|
return subprocess.check_call(
|
||||||
shell=(sys.platform == 'win32'))
|
args, cwd=working_dir, env=env, shell=(sys.platform == 'win32'))
|
||||||
with open(output_file, "w") as f:
|
with open(output_file, "w") as f:
|
||||||
return subprocess.check_call(args, cwd=working_dir, env=env,
|
return subprocess.check_call(
|
||||||
|
args,
|
||||||
|
cwd=working_dir,
|
||||||
|
env=env,
|
||||||
shell=(sys.platform == 'win32'),
|
shell=(sys.platform == 'win32'),
|
||||||
stderr=subprocess.STDOUT, stdout=f)
|
stderr=subprocess.STDOUT,
|
||||||
|
stdout=f)
|
||||||
|
|
||||||
|
|
||||||
def create_directory(path):
|
def create_directory(path):
|
||||||
""" Creates a directory if it doesn't already exist. """
|
""" Creates a directory if it doesn't already exist. """
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
msg("Creating directory %s" % (path));
|
msg("Creating directory %s" % (path))
|
||||||
if not options.dryrun:
|
if not options.dryrun:
|
||||||
os.makedirs(path)
|
os.makedirs(path)
|
||||||
|
|
||||||
|
|
||||||
def delete_directory(path):
|
def delete_directory(path):
|
||||||
""" Removes an existing directory. """
|
""" Removes an existing directory. """
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
msg("Removing directory %s" % (path));
|
msg("Removing directory %s" % (path))
|
||||||
if not options.dryrun:
|
if not options.dryrun:
|
||||||
shutil.rmtree(path, onerror=onerror)
|
shutil.rmtree(path, onerror=onerror)
|
||||||
|
|
||||||
|
|
||||||
def copy_directory(source, target, allow_overwrite=False):
|
def copy_directory(source, target, allow_overwrite=False):
|
||||||
""" Copies a directory from source to target. """
|
""" Copies a directory from source to target. """
|
||||||
if not options.dryrun and os.path.exists(target):
|
if not options.dryrun and os.path.exists(target):
|
||||||
@ -79,10 +86,11 @@ def copy_directory(source, target, allow_overwrite=False):
|
|||||||
raise Exception("Directory %s already exists" % (target))
|
raise Exception("Directory %s already exists" % (target))
|
||||||
remove_directory(target)
|
remove_directory(target)
|
||||||
if os.path.exists(source):
|
if os.path.exists(source):
|
||||||
msg("Copying directory %s to %s" % (source, target));
|
msg("Copying directory %s to %s" % (source, target))
|
||||||
if not options.dryrun:
|
if not options.dryrun:
|
||||||
shutil.copytree(source, target)
|
shutil.copytree(source, target)
|
||||||
|
|
||||||
|
|
||||||
def move_directory(source, target, allow_overwrite=False):
|
def move_directory(source, target, allow_overwrite=False):
|
||||||
""" Copies a directory from source to target. """
|
""" Copies a directory from source to target. """
|
||||||
if not options.dryrun and os.path.exists(target):
|
if not options.dryrun and os.path.exists(target):
|
||||||
@ -90,14 +98,16 @@ def move_directory(source, target, allow_overwrite=False):
|
|||||||
raise Exception("Directory %s already exists" % (target))
|
raise Exception("Directory %s already exists" % (target))
|
||||||
remove_directory(target)
|
remove_directory(target)
|
||||||
if os.path.exists(source):
|
if os.path.exists(source):
|
||||||
msg("Moving directory %s to %s" % (source, target));
|
msg("Moving directory %s to %s" % (source, target))
|
||||||
if not options.dryrun:
|
if not options.dryrun:
|
||||||
shutil.move(source, target)
|
shutil.move(source, target)
|
||||||
|
|
||||||
|
|
||||||
def is_git_checkout(path):
|
def is_git_checkout(path):
|
||||||
""" Returns true if the path represents a git checkout. """
|
""" Returns true if the path represents a git checkout. """
|
||||||
return os.path.exists(os.path.join(path, '.git'))
|
return os.path.exists(os.path.join(path, '.git'))
|
||||||
|
|
||||||
|
|
||||||
def exec_cmd(cmd, path):
|
def exec_cmd(cmd, path):
|
||||||
""" Execute the specified command and return the result. """
|
""" Execute the specified command and return the result. """
|
||||||
out = ''
|
out = ''
|
||||||
@ -105,7 +115,9 @@ def exec_cmd(cmd, path):
|
|||||||
sys.stdout.write("-------- Running \"%s\" in \"%s\"...\n" % (cmd, path))
|
sys.stdout.write("-------- Running \"%s\" in \"%s\"...\n" % (cmd, path))
|
||||||
parts = cmd.split()
|
parts = cmd.split()
|
||||||
try:
|
try:
|
||||||
process = subprocess.Popen(parts, cwd=path,
|
process = subprocess.Popen(
|
||||||
|
parts,
|
||||||
|
cwd=path,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE,
|
||||||
shell=(sys.platform == 'win32'))
|
shell=(sys.platform == 'win32'))
|
||||||
@ -116,6 +128,7 @@ def exec_cmd(cmd, path):
|
|||||||
raise
|
raise
|
||||||
return {'out': out, 'err': err}
|
return {'out': out, 'err': err}
|
||||||
|
|
||||||
|
|
||||||
def get_git_hash(path, branch):
|
def get_git_hash(path, branch):
|
||||||
""" Returns the git hash for the specified branch/tag/hash. """
|
""" Returns the git hash for the specified branch/tag/hash. """
|
||||||
cmd = "%s rev-parse %s" % (git_exe, branch)
|
cmd = "%s rev-parse %s" % (git_exe, branch)
|
||||||
@ -124,6 +137,7 @@ def get_git_hash(path, branch):
|
|||||||
return result['out'].strip()
|
return result['out'].strip()
|
||||||
return 'Unknown'
|
return 'Unknown'
|
||||||
|
|
||||||
|
|
||||||
def get_git_url(path):
|
def get_git_url(path):
|
||||||
""" Returns the origin url for the specified path. """
|
""" Returns the origin url for the specified path. """
|
||||||
cmd = "%s config --get remote.origin.url" % (git_exe)
|
cmd = "%s config --get remote.origin.url" % (git_exe)
|
||||||
@ -132,6 +146,7 @@ def get_git_url(path):
|
|||||||
return result['out'].strip()
|
return result['out'].strip()
|
||||||
return 'Unknown'
|
return 'Unknown'
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract(src, target):
|
def download_and_extract(src, target):
|
||||||
""" Extracts the contents of src, which may be a URL or local file, to the
|
""" Extracts the contents of src, which may be a URL or local file, to the
|
||||||
target directory. """
|
target directory. """
|
||||||
@ -169,6 +184,7 @@ def download_and_extract(src, target):
|
|||||||
if temporary and os.path.exists(archive_path):
|
if temporary and os.path.exists(archive_path):
|
||||||
os.remove(archive_path)
|
os.remove(archive_path)
|
||||||
|
|
||||||
|
|
||||||
def read_file(path):
|
def read_file(path):
|
||||||
""" Read a file. """
|
""" Read a file. """
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
@ -179,11 +195,13 @@ def read_file(path):
|
|||||||
else:
|
else:
|
||||||
raise Exception("Path does not exist: %s" % (path))
|
raise Exception("Path does not exist: %s" % (path))
|
||||||
|
|
||||||
|
|
||||||
def read_config_file(path):
|
def read_config_file(path):
|
||||||
""" Read a configuration file. """
|
""" Read a configuration file. """
|
||||||
# Parse the contents.
|
# Parse the contents.
|
||||||
return eval(read_file(path), {'__builtins__': None}, None)
|
return eval(read_file(path), {'__builtins__': None}, None)
|
||||||
|
|
||||||
|
|
||||||
def write_config_file(path, contents):
|
def write_config_file(path, contents):
|
||||||
""" Write a configuration file. """
|
""" Write a configuration file. """
|
||||||
msg('Writing file: %s' % path)
|
msg('Writing file: %s' % path)
|
||||||
@ -195,6 +213,7 @@ def write_config_file(path, contents):
|
|||||||
fp.write("}\n")
|
fp.write("}\n")
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
|
|
||||||
def read_branch_config_file(path):
|
def read_branch_config_file(path):
|
||||||
""" Read the CEF branch from the specified path. """
|
""" Read the CEF branch from the specified path. """
|
||||||
config_file = os.path.join(path, 'cef.branch')
|
config_file = os.path.join(path, 'cef.branch')
|
||||||
@ -204,12 +223,14 @@ def read_branch_config_file(path):
|
|||||||
return contents['branch']
|
return contents['branch']
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
|
||||||
def write_branch_config_file(path, branch):
|
def write_branch_config_file(path, branch):
|
||||||
""" Write the CEF branch to the specified path. """
|
""" Write the CEF branch to the specified path. """
|
||||||
config_file = os.path.join(path, 'cef.branch')
|
config_file = os.path.join(path, 'cef.branch')
|
||||||
if not os.path.isfile(config_file):
|
if not os.path.isfile(config_file):
|
||||||
write_config_file(config_file, {'branch': branch})
|
write_config_file(config_file, {'branch': branch})
|
||||||
|
|
||||||
|
|
||||||
def remove_deps_entry(path, entry):
|
def remove_deps_entry(path, entry):
|
||||||
""" Remove an entry from the Chromium DEPS file at the specified path. """
|
""" Remove an entry from the Chromium DEPS file at the specified path. """
|
||||||
msg('Updating DEPS file: %s' % path)
|
msg('Updating DEPS file: %s' % path)
|
||||||
@ -233,6 +254,7 @@ def remove_deps_entry(path, entry):
|
|||||||
fp.write(line)
|
fp.write(line)
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
|
|
||||||
def apply_deps_patch():
|
def apply_deps_patch():
|
||||||
""" Patch the Chromium DEPS file if necessary. """
|
""" Patch the Chromium DEPS file if necessary. """
|
||||||
# Starting with 43.0.2357.126 the DEPS file is now 100% Git and the .DEPS.git
|
# Starting with 43.0.2357.126 the DEPS file is now 100% Git and the .DEPS.git
|
||||||
@ -251,8 +273,8 @@ def apply_deps_patch():
|
|||||||
# Attempt to apply the DEPS patch file that may exist with newer branches.
|
# Attempt to apply the DEPS patch file that may exist with newer branches.
|
||||||
patch_tool = os.path.join(cef_dir, 'tools', 'patcher.py')
|
patch_tool = os.path.join(cef_dir, 'tools', 'patcher.py')
|
||||||
run('%s %s --patch-file "%s" --patch-dir "%s"' %
|
run('%s %s --patch-file "%s" --patch-dir "%s"' %
|
||||||
(python_exe, patch_tool, patch_file, chromium_src_dir),
|
(python_exe, patch_tool, patch_file,
|
||||||
chromium_src_dir, depot_tools_dir)
|
chromium_src_dir), chromium_src_dir, depot_tools_dir)
|
||||||
elif cef_branch != 'trunk' and int(cef_branch) <= 1916:
|
elif cef_branch != 'trunk' and int(cef_branch) <= 1916:
|
||||||
# Release branch DEPS files older than 37.0.2007.0 may include a 'src'
|
# Release branch DEPS files older than 37.0.2007.0 may include a 'src'
|
||||||
# entry. This entry needs to be removed otherwise `gclient sync` will
|
# entry. This entry needs to be removed otherwise `gclient sync` will
|
||||||
@ -261,6 +283,7 @@ def apply_deps_patch():
|
|||||||
else:
|
else:
|
||||||
raise Exception("Path does not exist: %s" % (deps_path))
|
raise Exception("Path does not exist: %s" % (deps_path))
|
||||||
|
|
||||||
|
|
||||||
def onerror(func, path, exc_info):
|
def onerror(func, path, exc_info):
|
||||||
"""
|
"""
|
||||||
Error handler for ``shutil.rmtree``.
|
Error handler for ``shutil.rmtree``.
|
||||||
@ -299,10 +322,17 @@ distribution of CEF.
|
|||||||
parser = OptionParser(description=disc)
|
parser = OptionParser(description=disc)
|
||||||
|
|
||||||
# Setup options.
|
# Setup options.
|
||||||
parser.add_option('--download-dir', dest='downloaddir', metavar='DIR',
|
parser.add_option(
|
||||||
|
'--download-dir',
|
||||||
|
dest='downloaddir',
|
||||||
|
metavar='DIR',
|
||||||
help='Download directory with no spaces [required].')
|
help='Download directory with no spaces [required].')
|
||||||
parser.add_option('--depot-tools-dir', dest='depottoolsdir', metavar='DIR',
|
parser.add_option(
|
||||||
help='Download directory for depot_tools.', default='')
|
'--depot-tools-dir',
|
||||||
|
dest='depottoolsdir',
|
||||||
|
metavar='DIR',
|
||||||
|
help='Download directory for depot_tools.',
|
||||||
|
default='')
|
||||||
parser.add_option('--depot-tools-archive', dest='depottoolsarchive',
|
parser.add_option('--depot-tools-archive', dest='depottoolsarchive',
|
||||||
help='Zip archive file that contains a single top-level '+\
|
help='Zip archive file that contains a single top-level '+\
|
||||||
'depot_tools directory.', default='')
|
'depot_tools directory.', default='')
|
||||||
@ -331,8 +361,11 @@ parser.add_option('--chromium-checkout', dest='chromiumcheckout',
|
|||||||
default='')
|
default='')
|
||||||
|
|
||||||
# Miscellaneous options.
|
# Miscellaneous options.
|
||||||
parser.add_option('--force-config',
|
parser.add_option(
|
||||||
action='store_true', dest='forceconfig', default=False,
|
'--force-config',
|
||||||
|
action='store_true',
|
||||||
|
dest='forceconfig',
|
||||||
|
default=False,
|
||||||
help='Force creation of a new gclient config file.')
|
help='Force creation of a new gclient config file.')
|
||||||
parser.add_option('--force-clean',
|
parser.add_option('--force-clean',
|
||||||
action='store_true', dest='forceclean', default=False,
|
action='store_true', dest='forceclean', default=False,
|
||||||
@ -342,8 +375,11 @@ parser.add_option('--force-clean-deps',
|
|||||||
action='store_true', dest='forcecleandeps', default=False,
|
action='store_true', dest='forcecleandeps', default=False,
|
||||||
help='Force a clean checkout of Chromium dependencies. Used'+\
|
help='Force a clean checkout of Chromium dependencies. Used'+\
|
||||||
' in combination with --force-clean.')
|
' in combination with --force-clean.')
|
||||||
parser.add_option('--dry-run',
|
parser.add_option(
|
||||||
action='store_true', dest='dryrun', default=False,
|
'--dry-run',
|
||||||
|
action='store_true',
|
||||||
|
dest='dryrun',
|
||||||
|
default=False,
|
||||||
help="Output commands without executing them.")
|
help="Output commands without executing them.")
|
||||||
parser.add_option('--dry-run-platform', dest='dryrunplatform', default=None,
|
parser.add_option('--dry-run-platform', dest='dryrunplatform', default=None,
|
||||||
help='Simulate a dry run on the specified platform '+\
|
help='Simulate a dry run on the specified platform '+\
|
||||||
@ -365,11 +401,17 @@ parser.add_option('--no-cef-update',
|
|||||||
help='Do not update CEF. Pass --force-build or '+\
|
help='Do not update CEF. Pass --force-build or '+\
|
||||||
'--force-distrib if you desire a new build or '+\
|
'--force-distrib if you desire a new build or '+\
|
||||||
'distribution.')
|
'distribution.')
|
||||||
parser.add_option('--no-chromium-update',
|
parser.add_option(
|
||||||
action='store_true', dest='nochromiumupdate', default=False,
|
'--no-chromium-update',
|
||||||
|
action='store_true',
|
||||||
|
dest='nochromiumupdate',
|
||||||
|
default=False,
|
||||||
help='Do not update Chromium.')
|
help='Do not update Chromium.')
|
||||||
parser.add_option('--no-depot-tools-update',
|
parser.add_option(
|
||||||
action='store_true', dest='nodepottoolsupdate', default=False,
|
'--no-depot-tools-update',
|
||||||
|
action='store_true',
|
||||||
|
dest='nodepottoolsupdate',
|
||||||
|
default=False,
|
||||||
help='Do not update depot_tools.')
|
help='Do not update depot_tools.')
|
||||||
|
|
||||||
# Build-related options.
|
# Build-related options.
|
||||||
@ -378,62 +420,113 @@ parser.add_option('--force-build',
|
|||||||
help='Force CEF debug and release builds. This builds '+\
|
help='Force CEF debug and release builds. This builds '+\
|
||||||
'[build-target] on all platforms and chrome_sandbox '+\
|
'[build-target] on all platforms and chrome_sandbox '+\
|
||||||
'on Linux.')
|
'on Linux.')
|
||||||
parser.add_option('--no-build',
|
parser.add_option(
|
||||||
action='store_true', dest='nobuild', default=False,
|
'--no-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='nobuild',
|
||||||
|
default=False,
|
||||||
help='Do not build CEF.')
|
help='Do not build CEF.')
|
||||||
parser.add_option('--build-target', dest='buildtarget', default='cefclient',
|
parser.add_option(
|
||||||
|
'--build-target',
|
||||||
|
dest='buildtarget',
|
||||||
|
default='cefclient',
|
||||||
help='Target name(s) to build (defaults to "cefclient").')
|
help='Target name(s) to build (defaults to "cefclient").')
|
||||||
parser.add_option('--build-tests',
|
parser.add_option(
|
||||||
action='store_true', dest='buildtests', default=False,
|
'--build-tests',
|
||||||
|
action='store_true',
|
||||||
|
dest='buildtests',
|
||||||
|
default=False,
|
||||||
help='Also build the ceftests target.')
|
help='Also build the ceftests target.')
|
||||||
parser.add_option('--no-debug-build',
|
parser.add_option(
|
||||||
action='store_true', dest='nodebugbuild', default=False,
|
'--no-debug-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='nodebugbuild',
|
||||||
|
default=False,
|
||||||
help="Don't perform the CEF debug build.")
|
help="Don't perform the CEF debug build.")
|
||||||
parser.add_option('--no-release-build',
|
parser.add_option(
|
||||||
action='store_true', dest='noreleasebuild', default=False,
|
'--no-release-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='noreleasebuild',
|
||||||
|
default=False,
|
||||||
help="Don't perform the CEF release build.")
|
help="Don't perform the CEF release build.")
|
||||||
parser.add_option('--verbose-build',
|
parser.add_option(
|
||||||
action='store_true', dest='verbosebuild', default=False,
|
'--verbose-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='verbosebuild',
|
||||||
|
default=False,
|
||||||
help='Show all command lines while building.')
|
help='Show all command lines while building.')
|
||||||
parser.add_option('--build-log-file',
|
parser.add_option('--build-log-file',
|
||||||
action='store_true', dest='buildlogfile', default=False,
|
action='store_true', dest='buildlogfile', default=False,
|
||||||
help='Write build logs to file. The file will be named '+\
|
help='Write build logs to file. The file will be named '+\
|
||||||
'"build-[branch]-[debug|release].log" in the download '+\
|
'"build-[branch]-[debug|release].log" in the download '+\
|
||||||
'directory.')
|
'directory.')
|
||||||
parser.add_option('--x64-build',
|
parser.add_option(
|
||||||
action='store_true', dest='x64build', default=False,
|
'--x64-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='x64build',
|
||||||
|
default=False,
|
||||||
help='Create a 64-bit build.')
|
help='Create a 64-bit build.')
|
||||||
parser.add_option('--arm-build',
|
parser.add_option(
|
||||||
action='store_true', dest='armbuild', default=False,
|
'--arm-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='armbuild',
|
||||||
|
default=False,
|
||||||
help='Create an ARM build.')
|
help='Create an ARM build.')
|
||||||
|
|
||||||
# Distribution-related options.
|
# Distribution-related options.
|
||||||
parser.add_option('--force-distrib',
|
parser.add_option(
|
||||||
action='store_true', dest='forcedistrib', default=False,
|
'--force-distrib',
|
||||||
|
action='store_true',
|
||||||
|
dest='forcedistrib',
|
||||||
|
default=False,
|
||||||
help='Force creation of a CEF binary distribution.')
|
help='Force creation of a CEF binary distribution.')
|
||||||
parser.add_option('--no-distrib',
|
parser.add_option(
|
||||||
action='store_true', dest='nodistrib', default=False,
|
'--no-distrib',
|
||||||
|
action='store_true',
|
||||||
|
dest='nodistrib',
|
||||||
|
default=False,
|
||||||
help="Don't create a CEF binary distribution.")
|
help="Don't create a CEF binary distribution.")
|
||||||
parser.add_option('--minimal-distrib',
|
parser.add_option(
|
||||||
action='store_true', dest='minimaldistrib', default=False,
|
'--minimal-distrib',
|
||||||
|
action='store_true',
|
||||||
|
dest='minimaldistrib',
|
||||||
|
default=False,
|
||||||
help='Create a minimal CEF binary distribution.')
|
help='Create a minimal CEF binary distribution.')
|
||||||
parser.add_option('--minimal-distrib-only',
|
parser.add_option(
|
||||||
action='store_true', dest='minimaldistribonly', default=False,
|
'--minimal-distrib-only',
|
||||||
|
action='store_true',
|
||||||
|
dest='minimaldistribonly',
|
||||||
|
default=False,
|
||||||
help='Create a minimal CEF binary distribution only.')
|
help='Create a minimal CEF binary distribution only.')
|
||||||
parser.add_option('--client-distrib',
|
parser.add_option(
|
||||||
action='store_true', dest='clientdistrib', default=False,
|
'--client-distrib',
|
||||||
|
action='store_true',
|
||||||
|
dest='clientdistrib',
|
||||||
|
default=False,
|
||||||
help='Create a client CEF binary distribution.')
|
help='Create a client CEF binary distribution.')
|
||||||
parser.add_option('--client-distrib-only',
|
parser.add_option(
|
||||||
action='store_true', dest='clientdistribonly', default=False,
|
'--client-distrib-only',
|
||||||
|
action='store_true',
|
||||||
|
dest='clientdistribonly',
|
||||||
|
default=False,
|
||||||
help='Create a client CEF binary distribution only.')
|
help='Create a client CEF binary distribution only.')
|
||||||
parser.add_option('--no-distrib-docs',
|
parser.add_option(
|
||||||
action='store_true', dest='nodistribdocs', default=False,
|
'--no-distrib-docs',
|
||||||
|
action='store_true',
|
||||||
|
dest='nodistribdocs',
|
||||||
|
default=False,
|
||||||
help="Don't create CEF documentation.")
|
help="Don't create CEF documentation.")
|
||||||
parser.add_option('--no-distrib-archive',
|
parser.add_option(
|
||||||
action='store_true', dest='nodistribarchive', default=False,
|
'--no-distrib-archive',
|
||||||
|
action='store_true',
|
||||||
|
dest='nodistribarchive',
|
||||||
|
default=False,
|
||||||
help="Don't create archives for output directories.")
|
help="Don't create archives for output directories.")
|
||||||
parser.add_option('--clean-artifacts',
|
parser.add_option(
|
||||||
action='store_true', dest='cleanartifacts', default=False,
|
'--clean-artifacts',
|
||||||
|
action='store_true',
|
||||||
|
dest='cleanartifacts',
|
||||||
|
default=False,
|
||||||
help='Clean the artifacts output directory.')
|
help='Clean the artifacts output directory.')
|
||||||
parser.add_option('--distrib-subdir', dest='distribsubdir',
|
parser.add_option('--distrib-subdir', dest='distribsubdir',
|
||||||
help='CEF distrib dir name, child of '+\
|
help='CEF distrib dir name, child of '+\
|
||||||
@ -584,7 +677,6 @@ if platform == 'windows':
|
|||||||
# Avoid errors when the "vs_toolchain.py update" Chromium hook runs.
|
# Avoid errors when the "vs_toolchain.py update" Chromium hook runs.
|
||||||
os.environ['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0'
|
os.environ['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0'
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Manage the download directory.
|
# Manage the download directory.
|
||||||
##
|
##
|
||||||
@ -595,7 +687,6 @@ create_directory(download_dir)
|
|||||||
|
|
||||||
msg("Download Directory: %s" % (download_dir))
|
msg("Download Directory: %s" % (download_dir))
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Manage the depot_tools directory.
|
# Manage the depot_tools directory.
|
||||||
##
|
##
|
||||||
@ -629,9 +720,9 @@ if not options.nodepottoolsupdate:
|
|||||||
# On Windows this will download required python and git binaries.
|
# On Windows this will download required python and git binaries.
|
||||||
msg('Updating depot_tools')
|
msg('Updating depot_tools')
|
||||||
if platform == 'windows':
|
if platform == 'windows':
|
||||||
run('update_depot_tools.bat', depot_tools_dir, depot_tools_dir);
|
run('update_depot_tools.bat', depot_tools_dir, depot_tools_dir)
|
||||||
else:
|
else:
|
||||||
run('update_depot_tools', depot_tools_dir, depot_tools_dir);
|
run('update_depot_tools', depot_tools_dir, depot_tools_dir)
|
||||||
|
|
||||||
# Determine the executables to use.
|
# Determine the executables to use.
|
||||||
if platform == 'windows':
|
if platform == 'windows':
|
||||||
@ -648,7 +739,6 @@ else:
|
|||||||
git_exe = 'git'
|
git_exe = 'git'
|
||||||
python_exe = 'python'
|
python_exe = 'python'
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Manage the cef directory.
|
# Manage the cef directory.
|
||||||
##
|
##
|
||||||
@ -722,7 +812,6 @@ if not options.nocefupdate and os.path.exists(cef_dir):
|
|||||||
else:
|
else:
|
||||||
cef_checkout_changed = False
|
cef_checkout_changed = False
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Manage the out directory.
|
# Manage the out directory.
|
||||||
##
|
##
|
||||||
@ -735,7 +824,6 @@ if options.forceclean and os.path.exists(out_dir):
|
|||||||
|
|
||||||
msg("CEF Output Directory: %s" % (out_dir))
|
msg("CEF Output Directory: %s" % (out_dir))
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Manage the chromium directory.
|
# Manage the chromium directory.
|
||||||
##
|
##
|
||||||
@ -749,7 +837,7 @@ cef_src_dir = os.path.join(chromium_src_dir, 'cef')
|
|||||||
out_src_dir = os.path.join(chromium_src_dir, 'out')
|
out_src_dir = os.path.join(chromium_src_dir, 'out')
|
||||||
|
|
||||||
if options.chromiumurl != '':
|
if options.chromiumurl != '':
|
||||||
chromium_url = options.chromiumurl;
|
chromium_url = options.chromiumurl
|
||||||
else:
|
else:
|
||||||
chromium_url = 'https://chromium.googlesource.com/chromium/src.git'
|
chromium_url = 'https://chromium.googlesource.com/chromium/src.git'
|
||||||
|
|
||||||
@ -899,7 +987,6 @@ elif not out_src_dir_exists:
|
|||||||
# Write the config file for identifying the branch.
|
# Write the config file for identifying the branch.
|
||||||
write_branch_config_file(out_src_dir, cef_branch)
|
write_branch_config_file(out_src_dir, cef_branch)
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Build CEF.
|
# Build CEF.
|
||||||
##
|
##
|
||||||
@ -995,7 +1082,6 @@ elif not options.nobuild:
|
|||||||
msg('Not building. The source hashes have not changed and ' +
|
msg('Not building. The source hashes have not changed and ' +
|
||||||
'the output folder "%s" already exists' % (out_src_dir))
|
'the output folder "%s" already exists' % (out_src_dir))
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
# Create the CEF binary distribution.
|
# Create the CEF binary distribution.
|
||||||
##
|
##
|
||||||
|
@ -21,12 +21,13 @@ class cef_api_hash:
|
|||||||
if headerdir is None or len(headerdir) == 0:
|
if headerdir is None or len(headerdir) == 0:
|
||||||
raise AssertionError("headerdir is not specified")
|
raise AssertionError("headerdir is not specified")
|
||||||
|
|
||||||
self.__headerdir = headerdir;
|
self.__headerdir = headerdir
|
||||||
self.__debugdir = debugdir;
|
self.__debugdir = debugdir
|
||||||
self.__verbose = verbose;
|
self.__verbose = verbose
|
||||||
self.__debug_enabled = not (self.__debugdir is None) and len(self.__debugdir) > 0;
|
self.__debug_enabled = not (self.__debugdir is
|
||||||
|
None) and len(self.__debugdir) > 0
|
||||||
|
|
||||||
self.platforms = [ "windows", "macosx", "linux" ];
|
self.platforms = ["windows", "macosx", "linux"]
|
||||||
|
|
||||||
self.platform_files = {
|
self.platform_files = {
|
||||||
"windows": [
|
"windows": [
|
||||||
@ -38,10 +39,9 @@ class cef_api_hash:
|
|||||||
"linux": [
|
"linux": [
|
||||||
"internal/cef_types_linux.h",
|
"internal/cef_types_linux.h",
|
||||||
]
|
]
|
||||||
};
|
}
|
||||||
|
|
||||||
self.included_files = [
|
self.included_files = []
|
||||||
];
|
|
||||||
|
|
||||||
self.excluded_files = [
|
self.excluded_files = [
|
||||||
"cef_version.h",
|
"cef_version.h",
|
||||||
@ -51,17 +51,22 @@ class cef_api_hash:
|
|||||||
"internal/cef_win.h",
|
"internal/cef_win.h",
|
||||||
"internal/cef_mac.h",
|
"internal/cef_mac.h",
|
||||||
"internal/cef_linux.h",
|
"internal/cef_linux.h",
|
||||||
];
|
]
|
||||||
|
|
||||||
def calculate(self):
|
def calculate(self):
|
||||||
filenames = [filename for filename in self.__get_filenames() if not filename in self.excluded_files]
|
filenames = [
|
||||||
|
filename for filename in self.__get_filenames()
|
||||||
|
if not filename in self.excluded_files
|
||||||
|
]
|
||||||
|
|
||||||
objects = []
|
objects = []
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
if self.__verbose:
|
if self.__verbose:
|
||||||
print "Processing " + filename + "..."
|
print "Processing " + filename + "..."
|
||||||
content = read_file(os.path.join(self.__headerdir, filename), True)
|
content = read_file(os.path.join(self.__headerdir, filename), True)
|
||||||
platforms = list([p for p in self.platforms if self.__is_platform_filename(filename, p)])
|
platforms = list([
|
||||||
|
p for p in self.platforms if self.__is_platform_filename(filename, p)
|
||||||
|
])
|
||||||
|
|
||||||
# Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration
|
# Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration
|
||||||
content_objects = None
|
content_objects = None
|
||||||
@ -82,18 +87,20 @@ class cef_api_hash:
|
|||||||
if self.__debug_enabled:
|
if self.__debug_enabled:
|
||||||
namelen = max([len(o["name"]) for o in objects])
|
namelen = max([len(o["name"]) for o in objects])
|
||||||
filenamelen = max([len(o["filename"]) for o in objects])
|
filenamelen = max([len(o["filename"]) for o in objects])
|
||||||
dumpsig = [];
|
dumpsig = []
|
||||||
for o in objects:
|
for o in objects:
|
||||||
dumpsig.append(format(o["name"], str(namelen) + "s") + "|" + format(o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"]);
|
dumpsig.append(
|
||||||
|
format(o["name"], str(namelen) + "s") + "|" + format(
|
||||||
|
o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"])
|
||||||
self.__write_debug_file("objects.txt", dumpsig)
|
self.__write_debug_file("objects.txt", dumpsig)
|
||||||
|
|
||||||
revisions = { };
|
revisions = {}
|
||||||
|
|
||||||
for platform in itertools.chain(["universal"], self.platforms):
|
for platform in itertools.chain(["universal"], self.platforms):
|
||||||
sig = self.__get_final_sig(objects, platform)
|
sig = self.__get_final_sig(objects, platform)
|
||||||
if self.__debug_enabled:
|
if self.__debug_enabled:
|
||||||
self.__write_debug_file(platform + ".sig", sig)
|
self.__write_debug_file(platform + ".sig", sig)
|
||||||
rev = hashlib.sha1(sig).digest();
|
rev = hashlib.sha1(sig).digest()
|
||||||
revstr = ''.join(format(ord(i), '0>2x') for i in rev)
|
revstr = ''.join(format(ord(i), '0>2x') for i in rev)
|
||||||
revisions[platform] = revstr
|
revisions[platform] = revstr
|
||||||
|
|
||||||
@ -105,35 +112,30 @@ class cef_api_hash:
|
|||||||
content = re.sub("//.*\n", "", content)
|
content = re.sub("//.*\n", "", content)
|
||||||
|
|
||||||
# function declarations
|
# function declarations
|
||||||
for m in re.finditer("\nCEF_EXPORT\s+?.*?\s+?(\w+)\s*?\(.*?\)\s*?;", content, flags = re.DOTALL):
|
for m in re.finditer(
|
||||||
object = {
|
"\nCEF_EXPORT\s+?.*?\s+?(\w+)\s*?\(.*?\)\s*?;",
|
||||||
"name": m.group(1),
|
content,
|
||||||
"text": m.group(0).strip()
|
flags=re.DOTALL):
|
||||||
}
|
object = {"name": m.group(1), "text": m.group(0).strip()}
|
||||||
objects.append(object)
|
objects.append(object)
|
||||||
|
|
||||||
# structs
|
# structs
|
||||||
for m in re.finditer("\ntypedef\s+?struct\s+?(\w+)\s+?\{.*?\}\s+?(\w+)\s*?;", content, flags = re.DOTALL):
|
for m in re.finditer(
|
||||||
object = {
|
"\ntypedef\s+?struct\s+?(\w+)\s+?\{.*?\}\s+?(\w+)\s*?;",
|
||||||
"name": m.group(2),
|
content,
|
||||||
"text": m.group(0).strip()
|
flags=re.DOTALL):
|
||||||
}
|
object = {"name": m.group(2), "text": m.group(0).strip()}
|
||||||
objects.append(object)
|
objects.append(object)
|
||||||
|
|
||||||
# enums
|
# enums
|
||||||
for m in re.finditer("\nenum\s+?(\w+)\s+?\{.*?\}\s*?;", content, flags = re.DOTALL):
|
for m in re.finditer(
|
||||||
object = {
|
"\nenum\s+?(\w+)\s+?\{.*?\}\s*?;", content, flags=re.DOTALL):
|
||||||
"name": m.group(1),
|
object = {"name": m.group(1), "text": m.group(0).strip()}
|
||||||
"text": m.group(0).strip()
|
|
||||||
}
|
|
||||||
objects.append(object)
|
objects.append(object)
|
||||||
|
|
||||||
# typedefs
|
# typedefs
|
||||||
for m in re.finditer("\ntypedef\s+?.*?\s+(\w+);", content, flags=0):
|
for m in re.finditer("\ntypedef\s+?.*?\s+(\w+);", content, flags=0):
|
||||||
object = {
|
object = {"name": m.group(1), "text": m.group(0).strip()}
|
||||||
"name": m.group(1),
|
|
||||||
"text": m.group(0).strip()
|
|
||||||
}
|
|
||||||
objects.append(object)
|
objects.append(object)
|
||||||
|
|
||||||
return objects
|
return objects
|
||||||
@ -141,7 +143,9 @@ class cef_api_hash:
|
|||||||
def __parse_string_type(self, content):
|
def __parse_string_type(self, content):
|
||||||
""" Grab defined CEF_STRING_TYPE_xxx """
|
""" Grab defined CEF_STRING_TYPE_xxx """
|
||||||
objects = []
|
objects = []
|
||||||
for m in re.finditer("\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content, flags = 0):
|
for m in re.finditer(
|
||||||
|
"\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content,
|
||||||
|
flags=0):
|
||||||
object = {
|
object = {
|
||||||
"name": m.group(1),
|
"name": m.group(1),
|
||||||
"text": m.group(0),
|
"text": m.group(0),
|
||||||
@ -151,8 +155,8 @@ class cef_api_hash:
|
|||||||
|
|
||||||
def __prepare_text(self, text):
|
def __prepare_text(self, text):
|
||||||
text = text.strip()
|
text = text.strip()
|
||||||
text = re.sub("\s+", " ", text);
|
text = re.sub("\s+", " ", text)
|
||||||
text = re.sub("\(\s+", "(", text);
|
text = re.sub("\(\s+", "(", text)
|
||||||
return text
|
return text
|
||||||
|
|
||||||
def __get_final_sig(self, objects, platform):
|
def __get_final_sig(self, objects, platform):
|
||||||
@ -166,17 +170,25 @@ class cef_api_hash:
|
|||||||
|
|
||||||
def __get_filenames(self):
|
def __get_filenames(self):
|
||||||
""" Returns file names to be processed, relative to headerdir """
|
""" Returns file names to be processed, relative to headerdir """
|
||||||
headers = [os.path.join(self.__headerdir, filename) for filename in self.included_files];
|
headers = [
|
||||||
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "capi", "*.h")))
|
os.path.join(self.__headerdir, filename)
|
||||||
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "internal", "*.h")))
|
for filename in self.included_files
|
||||||
|
]
|
||||||
|
headers = itertools.chain(
|
||||||
|
headers, get_files(os.path.join(self.__headerdir, "capi", "*.h")))
|
||||||
|
headers = itertools.chain(
|
||||||
|
headers, get_files(os.path.join(self.__headerdir, "internal", "*.h")))
|
||||||
|
|
||||||
for v in self.platform_files.values():
|
for v in self.platform_files.values():
|
||||||
headers = itertools.chain(headers, [os.path.join(self.__headerdir, f) for f in v])
|
headers = itertools.chain(headers,
|
||||||
|
[os.path.join(self.__headerdir, f) for f in v])
|
||||||
|
|
||||||
normalized = [os.path.relpath(filename, self.__headerdir) for filename in headers];
|
normalized = [
|
||||||
normalized = [f.replace('\\', '/').lower() for f in normalized];
|
os.path.relpath(filename, self.__headerdir) for filename in headers
|
||||||
|
]
|
||||||
|
normalized = [f.replace('\\', '/').lower() for f in normalized]
|
||||||
|
|
||||||
return list(set(normalized));
|
return list(set(normalized))
|
||||||
|
|
||||||
def __is_platform_filename(self, filename, platform):
|
def __is_platform_filename(self, filename, platform):
|
||||||
if platform == "universal":
|
if platform == "universal":
|
||||||
@ -193,10 +205,10 @@ class cef_api_hash:
|
|||||||
return not listed
|
return not listed
|
||||||
|
|
||||||
def __write_debug_file(self, filename, content):
|
def __write_debug_file(self, filename, content):
|
||||||
make_dir(self.__debugdir);
|
make_dir(self.__debugdir)
|
||||||
outfile = os.path.join(self.__debugdir, filename);
|
outfile = os.path.join(self.__debugdir, filename)
|
||||||
dir = os.path.dirname(outfile);
|
dir = os.path.dirname(outfile)
|
||||||
make_dir(dir);
|
make_dir(dir)
|
||||||
if not isinstance(content, basestring):
|
if not isinstance(content, basestring):
|
||||||
content = "\n".join(content)
|
content = "\n".join(content)
|
||||||
write_file(outfile, content)
|
write_file(outfile, content)
|
||||||
@ -211,12 +223,22 @@ if __name__ == "__main__":
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
parser = OptionParser(description=disc)
|
parser = OptionParser(description=disc)
|
||||||
parser.add_option('--cpp-header-dir', dest='cppheaderdir', metavar='DIR',
|
parser.add_option(
|
||||||
|
'--cpp-header-dir',
|
||||||
|
dest='cppheaderdir',
|
||||||
|
metavar='DIR',
|
||||||
help='input directory for C++ header files [required]')
|
help='input directory for C++ header files [required]')
|
||||||
parser.add_option('--debug-dir', dest='debugdir', metavar='DIR',
|
parser.add_option(
|
||||||
|
'--debug-dir',
|
||||||
|
dest='debugdir',
|
||||||
|
metavar='DIR',
|
||||||
help='intermediate directory for easy debugging')
|
help='intermediate directory for easy debugging')
|
||||||
parser.add_option('-v', '--verbose',
|
parser.add_option(
|
||||||
action='store_true', dest='verbose', default=False,
|
'-v',
|
||||||
|
'--verbose',
|
||||||
|
action='store_true',
|
||||||
|
dest='verbose',
|
||||||
|
default=False,
|
||||||
help='output detailed status information')
|
help='output detailed status information')
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
@ -228,8 +250,8 @@ if __name__ == "__main__":
|
|||||||
# calculate
|
# calculate
|
||||||
c_start_time = time.time()
|
c_start_time = time.time()
|
||||||
|
|
||||||
calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose);
|
calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose)
|
||||||
revisions = calc.calculate();
|
revisions = calc.calculate()
|
||||||
|
|
||||||
c_completed_in = time.time() - c_start_time
|
c_completed_in = time.time() - c_start_time
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ def notify(msg):
|
|||||||
""" Display a message. """
|
""" Display a message. """
|
||||||
sys.stdout.write(' NOTE: ' + msg + '\n')
|
sys.stdout.write(' NOTE: ' + msg + '\n')
|
||||||
|
|
||||||
|
|
||||||
def wrap_text(text, indent='', maxchars=80):
|
def wrap_text(text, indent='', maxchars=80):
|
||||||
""" Wrap the text to the specified number of characters. If
|
""" Wrap the text to the specified number of characters. If
|
||||||
necessary a line will be broken and wrapped after a word.
|
necessary a line will be broken and wrapped after a word.
|
||||||
@ -27,16 +28,19 @@ def wrap_text(text, indent = '', maxchars = 80):
|
|||||||
result += indent + line + '\n'
|
result += indent + line + '\n'
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def is_base_class(clsname):
|
def is_base_class(clsname):
|
||||||
""" Returns true if |clsname| is a known base (root) class in the object
|
""" Returns true if |clsname| is a known base (root) class in the object
|
||||||
hierarchy.
|
hierarchy.
|
||||||
"""
|
"""
|
||||||
return clsname == 'CefBaseRefCounted' or clsname == 'CefBaseScoped'
|
return clsname == 'CefBaseRefCounted' or clsname == 'CefBaseScoped'
|
||||||
|
|
||||||
|
|
||||||
def get_capi_file_name(cppname):
|
def get_capi_file_name(cppname):
|
||||||
""" Convert a C++ header file name to a C API header file name. """
|
""" Convert a C++ header file name to a C API header file name. """
|
||||||
return cppname[:-2] + '_capi.h'
|
return cppname[:-2] + '_capi.h'
|
||||||
|
|
||||||
|
|
||||||
def get_capi_name(cppname, isclassname, prefix=None):
|
def get_capi_name(cppname, isclassname, prefix=None):
|
||||||
""" Convert a C++ CamelCaps name to a C API underscore name. """
|
""" Convert a C++ CamelCaps name to a C API underscore name. """
|
||||||
result = ''
|
result = ''
|
||||||
@ -66,11 +70,13 @@ def get_capi_name(cppname, isclassname, prefix = None):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def get_wrapper_type_enum(cppname):
|
def get_wrapper_type_enum(cppname):
|
||||||
""" Returns the wrapper type enumeration value for the specified C++ class
|
""" Returns the wrapper type enumeration value for the specified C++ class
|
||||||
name. """
|
name. """
|
||||||
return 'WT_' + get_capi_name(cppname, False)[4:].upper()
|
return 'WT_' + get_capi_name(cppname, False)[4:].upper()
|
||||||
|
|
||||||
|
|
||||||
def get_prev_line(body, pos):
|
def get_prev_line(body, pos):
|
||||||
""" Retrieve the start and end positions and value for the line immediately
|
""" Retrieve the start and end positions and value for the line immediately
|
||||||
before the line containing the specified position.
|
before the line containing the specified position.
|
||||||
@ -80,6 +86,7 @@ def get_prev_line(body, pos):
|
|||||||
line = body[start:end]
|
line = body[start:end]
|
||||||
return {'start': start, 'end': end, 'line': line}
|
return {'start': start, 'end': end, 'line': line}
|
||||||
|
|
||||||
|
|
||||||
def get_comment(body, name):
|
def get_comment(body, name):
|
||||||
""" Retrieve the comment for a class or function. """
|
""" Retrieve the comment for a class or function. """
|
||||||
result = []
|
result = []
|
||||||
@ -107,6 +114,7 @@ def get_comment(body, name):
|
|||||||
result.reverse()
|
result.reverse()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def validate_comment(file, name, comment):
|
def validate_comment(file, name, comment):
|
||||||
""" Validate the comment array returned by get_comment(). """
|
""" Validate the comment array returned by get_comment(). """
|
||||||
# Verify that the comment contains beginning and ending '///' as required by
|
# Verify that the comment contains beginning and ending '///' as required by
|
||||||
@ -122,6 +130,7 @@ def validate_comment(file, name, comment):
|
|||||||
raise Exception('Missing or incorrect comment in %s for: %s' % \
|
raise Exception('Missing or incorrect comment in %s for: %s' % \
|
||||||
(file, name))
|
(file, name))
|
||||||
|
|
||||||
|
|
||||||
def format_comment(comment, indent, translate_map=None, maxchars=80):
|
def format_comment(comment, indent, translate_map=None, maxchars=80):
|
||||||
""" Return the comments array as a formatted string. """
|
""" Return the comments array as a formatted string. """
|
||||||
result = ''
|
result = ''
|
||||||
@ -180,6 +189,7 @@ def format_comment(comment, indent, translate_map = None, maxchars = 80):
|
|||||||
result = '\n' + result
|
result = '\n' + result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def format_translation_changes(old, new):
|
def format_translation_changes(old, new):
|
||||||
""" Return a comment stating what is different between the old and new
|
""" Return a comment stating what is different between the old and new
|
||||||
function prototype parts.
|
function prototype parts.
|
||||||
@ -223,6 +233,7 @@ def format_translation_changes(old, new):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def format_translation_includes(header, body):
|
def format_translation_includes(header, body):
|
||||||
""" Return the necessary list of includes based on the contents of the
|
""" Return the necessary list of includes based on the contents of the
|
||||||
body.
|
body.
|
||||||
@ -267,6 +278,7 @@ def format_translation_includes(header, body):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def str_to_dict(str):
|
def str_to_dict(str):
|
||||||
""" Convert a string to a dictionary. If the same key has multiple values
|
""" Convert a string to a dictionary. If the same key has multiple values
|
||||||
the values will be stored in a list. """
|
the values will be stored in a list. """
|
||||||
@ -295,6 +307,7 @@ def str_to_dict(str):
|
|||||||
dict[name] = val
|
dict[name] = val
|
||||||
return dict
|
return dict
|
||||||
|
|
||||||
|
|
||||||
def dict_to_str(dict):
|
def dict_to_str(dict):
|
||||||
""" Convert a dictionary to a string. """
|
""" Convert a dictionary to a string. """
|
||||||
str = []
|
str = []
|
||||||
@ -355,8 +368,9 @@ _simpletypes = {
|
|||||||
'cef_json_parser_error_t': ['cef_json_parser_error_t', 'JSON_NO_ERROR'],
|
'cef_json_parser_error_t': ['cef_json_parser_error_t', 'JSON_NO_ERROR'],
|
||||||
'cef_plugin_policy_t': ['cef_plugin_policy_t', 'PLUGIN_POLICY_ALLOW'],
|
'cef_plugin_policy_t': ['cef_plugin_policy_t', 'PLUGIN_POLICY_ALLOW'],
|
||||||
'CefCursorHandle': ['cef_cursor_handle_t', 'kNullCursorHandle'],
|
'CefCursorHandle': ['cef_cursor_handle_t', 'kNullCursorHandle'],
|
||||||
'CefCompositionUnderline' : ['cef_composition_underline_t',
|
'CefCompositionUnderline': [
|
||||||
'CefCompositionUnderline()'],
|
'cef_composition_underline_t', 'CefCompositionUnderline()'
|
||||||
|
],
|
||||||
'CefEventHandle': ['cef_event_handle_t', 'kNullEventHandle'],
|
'CefEventHandle': ['cef_event_handle_t', 'kNullEventHandle'],
|
||||||
'CefWindowHandle': ['cef_window_handle_t', 'kNullWindowHandle'],
|
'CefWindowHandle': ['cef_window_handle_t', 'kNullWindowHandle'],
|
||||||
'CefPoint': ['cef_point_t', 'CefPoint()'],
|
'CefPoint': ['cef_point_t', 'CefPoint()'],
|
||||||
@ -368,14 +382,15 @@ _simpletypes = {
|
|||||||
'CefTime': ['cef_time_t', 'CefTime()'],
|
'CefTime': ['cef_time_t', 'CefTime()'],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_function_impls(content, ident):
|
def get_function_impls(content, ident):
|
||||||
""" Retrieve the function parts from the specified contents as a set of
|
""" Retrieve the function parts from the specified contents as a set of
|
||||||
return value, name, arguments and body. Ident must occur somewhere in
|
return value, name, arguments and body. Ident must occur somewhere in
|
||||||
the value.
|
the value.
|
||||||
"""
|
"""
|
||||||
# extract the functions
|
# extract the functions
|
||||||
p = re.compile('\n'+_cre_func+'\((.*?)\)([A-Za-z0-9_\s]{0,})'+
|
p = re.compile(
|
||||||
'\{(.*?)\n\}',
|
'\n' + _cre_func + '\((.*?)\)([A-Za-z0-9_\s]{0,})' + '\{(.*?)\n\}',
|
||||||
re.MULTILINE | re.DOTALL)
|
re.MULTILINE | re.DOTALL)
|
||||||
list = p.findall(content)
|
list = p.findall(content)
|
||||||
|
|
||||||
@ -413,6 +428,7 @@ def get_function_impls(content, ident):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def get_next_function_impl(existing, name):
|
def get_next_function_impl(existing, name):
|
||||||
result = None
|
result = None
|
||||||
for item in existing:
|
for item in existing:
|
||||||
@ -422,6 +438,7 @@ def get_next_function_impl(existing, name):
|
|||||||
break
|
break
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def get_copyright():
|
def get_copyright():
|
||||||
result = \
|
result = \
|
||||||
"""// Copyright (c) $YEAR$ The Chromium Embedded Framework Authors. All rights
|
"""// Copyright (c) $YEAR$ The Chromium Embedded Framework Authors. All rights
|
||||||
@ -510,8 +527,8 @@ class obj_header:
|
|||||||
for attrib, retval, argval in list:
|
for attrib, retval, argval in list:
|
||||||
comment = get_comment(data, retval + '(' + argval + ');')
|
comment = get_comment(data, retval + '(' + argval + ');')
|
||||||
validate_comment(filename, retval, comment)
|
validate_comment(filename, retval, comment)
|
||||||
self.funcs.append(obj_function(self, filename, attrib, retval,
|
self.funcs.append(
|
||||||
argval, comment))
|
obj_function(self, filename, attrib, retval, argval, comment))
|
||||||
|
|
||||||
# extract includes
|
# extract includes
|
||||||
p = re.compile('\n#include \"include/' + _cre_cfnameorpath + '.h')
|
p = re.compile('\n#include \"include/' + _cre_cfnameorpath + '.h')
|
||||||
@ -522,11 +539,10 @@ class obj_header:
|
|||||||
forward_declares = p.findall(data)
|
forward_declares = p.findall(data)
|
||||||
|
|
||||||
# extract classes
|
# extract classes
|
||||||
p = re.compile('\n'+_cre_attrib+
|
p = re.compile('\n' + _cre_attrib + '\nclass' + _cre_space + _cre_cfname +
|
||||||
'\nclass'+_cre_space+_cre_cfname+_cre_space+
|
_cre_space + ':' + _cre_space + 'public' + _cre_virtual +
|
||||||
':'+_cre_space+'public'+_cre_virtual+
|
_cre_space + _cre_cfname + _cre_space + '{(.*?)\n};',
|
||||||
_cre_space+_cre_cfname+_cre_space+
|
re.MULTILINE | re.DOTALL)
|
||||||
'{(.*?)\n};', re.MULTILINE | re.DOTALL)
|
|
||||||
list = p.findall(data)
|
list = p.findall(data)
|
||||||
if len(list) > 0:
|
if len(list) > 0:
|
||||||
added = True
|
added = True
|
||||||
@ -539,15 +555,14 @@ class obj_header:
|
|||||||
comment = get_comment(data, name + "\n")
|
comment = get_comment(data, name + "\n")
|
||||||
validate_comment(filename, name, comment)
|
validate_comment(filename, name, comment)
|
||||||
self.classes.append(
|
self.classes.append(
|
||||||
obj_class(self, filename, attrib, name, parent_name, body,
|
obj_class(self, filename, attrib, name, parent_name, body, comment,
|
||||||
comment, includes, forward_declares))
|
includes, forward_declares))
|
||||||
|
|
||||||
# extract empty classes
|
# extract empty classes
|
||||||
p = re.compile('\n'+_cre_attrib+
|
p = re.compile('\n' + _cre_attrib + '\nclass' + _cre_space + _cre_cfname +
|
||||||
'\nclass'+_cre_space+_cre_cfname+_cre_space+
|
_cre_space + ':' + _cre_space + 'public' + _cre_virtual +
|
||||||
':'+_cre_space+'public'+_cre_virtual+
|
_cre_space + _cre_cfname + _cre_space + '{};',
|
||||||
_cre_space+_cre_cfname+_cre_space+
|
re.MULTILINE | re.DOTALL)
|
||||||
'{};', re.MULTILINE | re.DOTALL)
|
|
||||||
list = p.findall(data)
|
list = p.findall(data)
|
||||||
if len(list) > 0:
|
if len(list) > 0:
|
||||||
added = True
|
added = True
|
||||||
@ -560,8 +575,8 @@ class obj_header:
|
|||||||
comment = get_comment(data, name + "\n")
|
comment = get_comment(data, name + "\n")
|
||||||
validate_comment(filename, name, comment)
|
validate_comment(filename, name, comment)
|
||||||
self.classes.append(
|
self.classes.append(
|
||||||
obj_class(self, filename, attrib, name, parent_name, "",
|
obj_class(self, filename, attrib, name, parent_name, "", comment,
|
||||||
comment, includes, forward_declares))
|
includes, forward_declares))
|
||||||
|
|
||||||
if added:
|
if added:
|
||||||
# a global function or class was read from the header file
|
# a global function or class was read from the header file
|
||||||
@ -674,7 +689,10 @@ class obj_header:
|
|||||||
|
|
||||||
def get_defined_structs(self):
|
def get_defined_structs(self):
|
||||||
""" Return a list of already defined structure names. """
|
""" Return a list of already defined structure names. """
|
||||||
return ['cef_print_info_t', 'cef_window_info_t', 'cef_base_ref_counted_t', 'cef_base_scoped_t']
|
return [
|
||||||
|
'cef_print_info_t', 'cef_window_info_t', 'cef_base_ref_counted_t',
|
||||||
|
'cef_base_scoped_t'
|
||||||
|
]
|
||||||
|
|
||||||
def get_capi_translations(self):
|
def get_capi_translations(self):
|
||||||
""" Return a dictionary that maps C++ terminology to C API terminology.
|
""" Return a dictionary that maps C++ terminology to C API terminology.
|
||||||
@ -714,8 +732,8 @@ class obj_header:
|
|||||||
class obj_class:
|
class obj_class:
|
||||||
""" Class representing a C++ class. """
|
""" Class representing a C++ class. """
|
||||||
|
|
||||||
def __init__(self, parent, filename, attrib, name, parent_name, body,
|
def __init__(self, parent, filename, attrib, name, parent_name, body, comment,
|
||||||
comment, includes, forward_declares):
|
includes, forward_declares):
|
||||||
if not isinstance(parent, obj_header):
|
if not isinstance(parent, obj_header):
|
||||||
raise Exception('Invalid parent object type')
|
raise Exception('Invalid parent object type')
|
||||||
|
|
||||||
@ -729,7 +747,8 @@ class obj_class:
|
|||||||
self.forward_declares = forward_declares
|
self.forward_declares = forward_declares
|
||||||
|
|
||||||
# extract typedefs
|
# extract typedefs
|
||||||
p = re.compile('\n'+_cre_space+'typedef'+_cre_space+_cre_typedef+';',
|
p = re.compile(
|
||||||
|
'\n' + _cre_space + 'typedef' + _cre_space + _cre_typedef + ';',
|
||||||
re.MULTILINE | re.DOTALL)
|
re.MULTILINE | re.DOTALL)
|
||||||
list = p.findall(body)
|
list = p.findall(body)
|
||||||
|
|
||||||
@ -744,8 +763,8 @@ class obj_class:
|
|||||||
self.typedefs.append(obj_typedef(self, filename, value, alias))
|
self.typedefs.append(obj_typedef(self, filename, value, alias))
|
||||||
|
|
||||||
# extract static functions
|
# extract static functions
|
||||||
p = re.compile('\n'+_cre_space+_cre_attrib+'\n'+_cre_space+'static'+
|
p = re.compile('\n' + _cre_space + _cre_attrib + '\n' + _cre_space +
|
||||||
_cre_space+_cre_func+'\((.*?)\)',
|
'static' + _cre_space + _cre_func + '\((.*?)\)',
|
||||||
re.MULTILINE | re.DOTALL)
|
re.MULTILINE | re.DOTALL)
|
||||||
list = p.findall(body)
|
list = p.findall(body)
|
||||||
|
|
||||||
@ -758,7 +777,8 @@ class obj_class:
|
|||||||
obj_function_static(self, attrib, retval, argval, comment))
|
obj_function_static(self, attrib, retval, argval, comment))
|
||||||
|
|
||||||
# extract virtual functions
|
# extract virtual functions
|
||||||
p = re.compile('\n'+_cre_space+_cre_attrib+'\n'+_cre_space+'virtual'+
|
p = re.compile(
|
||||||
|
'\n' + _cre_space + _cre_attrib + '\n' + _cre_space + 'virtual' +
|
||||||
_cre_space + _cre_func + '\((.*?)\)' + _cre_vfmod,
|
_cre_space + _cre_func + '\((.*?)\)' + _cre_vfmod,
|
||||||
re.MULTILINE | re.DOTALL)
|
re.MULTILINE | re.DOTALL)
|
||||||
list = p.findall(body)
|
list = p.findall(body)
|
||||||
@ -773,7 +793,8 @@ class obj_class:
|
|||||||
vfmod.strip()))
|
vfmod.strip()))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
result = '/* '+dict_to_str(self.attribs)+' */ class '+self.name+"\n{"
|
result = '/* ' + dict_to_str(
|
||||||
|
self.attribs) + ' */ class ' + self.name + "\n{"
|
||||||
|
|
||||||
if len(self.typedefs) > 0:
|
if len(self.typedefs) > 0:
|
||||||
result += "\n\t"
|
result += "\n\t"
|
||||||
@ -1210,8 +1231,8 @@ class obj_function_static(obj_function):
|
|||||||
def __init__(self, parent, attrib, retval, argval, comment):
|
def __init__(self, parent, attrib, retval, argval, comment):
|
||||||
if not isinstance(parent, obj_class):
|
if not isinstance(parent, obj_class):
|
||||||
raise Exception('Invalid parent object type')
|
raise Exception('Invalid parent object type')
|
||||||
obj_function.__init__(self, parent, parent.filename, attrib, retval,
|
obj_function.__init__(self, parent, parent.filename, attrib, retval, argval,
|
||||||
argval, comment)
|
comment)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'static ' + obj_function.__repr__(self) + ';'
|
return 'static ' + obj_function.__repr__(self) + ';'
|
||||||
@ -1223,14 +1244,15 @@ class obj_function_static(obj_function):
|
|||||||
prefix = get_capi_name(self.parent.get_name(), False)
|
prefix = get_capi_name(self.parent.get_name(), False)
|
||||||
return obj_function.get_capi_name(self, prefix)
|
return obj_function.get_capi_name(self, prefix)
|
||||||
|
|
||||||
|
|
||||||
class obj_function_virtual(obj_function):
|
class obj_function_virtual(obj_function):
|
||||||
""" Class representing a virtual function. """
|
""" Class representing a virtual function. """
|
||||||
|
|
||||||
def __init__(self, parent, attrib, retval, argval, comment, vfmod):
|
def __init__(self, parent, attrib, retval, argval, comment, vfmod):
|
||||||
if not isinstance(parent, obj_class):
|
if not isinstance(parent, obj_class):
|
||||||
raise Exception('Invalid parent object type')
|
raise Exception('Invalid parent object type')
|
||||||
obj_function.__init__(self, parent, parent.filename, attrib, retval,
|
obj_function.__init__(self, parent, parent.filename, attrib, retval, argval,
|
||||||
argval, comment)
|
comment)
|
||||||
if vfmod == 'const':
|
if vfmod == 'const':
|
||||||
self.isconst = True
|
self.isconst = True
|
||||||
else:
|
else:
|
||||||
@ -1500,6 +1522,7 @@ class obj_argument:
|
|||||||
|
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
|
||||||
class obj_analysis:
|
class obj_analysis:
|
||||||
""" Class representing an analysis of a data type value. """
|
""" Class representing an analysis of a data type value. """
|
||||||
|
|
||||||
@ -1579,9 +1602,7 @@ class obj_analysis:
|
|||||||
if value.find('std::vector') == 0:
|
if value.find('std::vector') == 0:
|
||||||
self.result_type = 'vector'
|
self.result_type = 'vector'
|
||||||
val = string.strip(value[12:-1])
|
val = string.strip(value[12:-1])
|
||||||
self.result_value = [
|
self.result_value = [self._get_basic(val)]
|
||||||
self._get_basic(val)
|
|
||||||
]
|
|
||||||
self.result_value[0]['vector_type'] = val
|
self.result_value[0]['vector_type'] = val
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -1623,10 +1644,7 @@ class obj_analysis:
|
|||||||
def _get_basic(self, value):
|
def _get_basic(self, value):
|
||||||
# check for string values
|
# check for string values
|
||||||
if value == "CefString":
|
if value == "CefString":
|
||||||
return {
|
return {'result_type': 'string', 'result_value': None}
|
||||||
'result_type' : 'string',
|
|
||||||
'result_value' : None
|
|
||||||
}
|
|
||||||
|
|
||||||
# check for simple direct translations
|
# check for simple direct translations
|
||||||
if value in _simpletypes.keys():
|
if value in _simpletypes.keys():
|
||||||
@ -1638,10 +1656,7 @@ class obj_analysis:
|
|||||||
|
|
||||||
# check if already a C API structure
|
# check if already a C API structure
|
||||||
if value[-2:] == '_t':
|
if value[-2:] == '_t':
|
||||||
return {
|
return {'result_type': 'structure', 'result_value': value}
|
||||||
'result_type' : 'structure',
|
|
||||||
'result_value' : value
|
|
||||||
}
|
|
||||||
|
|
||||||
# check for CEF reference pointers
|
# check for CEF reference pointers
|
||||||
p = re.compile('^CefRefPtr<(.*?)>$', re.DOTALL)
|
p = re.compile('^CefRefPtr<(.*?)>$', re.DOTALL)
|
||||||
@ -1933,15 +1948,9 @@ class obj_analysis:
|
|||||||
if self.result_value[0]['result_type'] == 'string' \
|
if self.result_value[0]['result_type'] == 'string' \
|
||||||
and self.result_value[1]['result_type'] == 'string':
|
and self.result_value[1]['result_type'] == 'string':
|
||||||
if self.result_type == 'map':
|
if self.result_type == 'map':
|
||||||
return {
|
return {'value': 'cef_string_map_t', 'format': 'single'}
|
||||||
'value' : 'cef_string_map_t',
|
|
||||||
'format' : 'single'
|
|
||||||
}
|
|
||||||
elif self.result_type == 'multimap':
|
elif self.result_type == 'multimap':
|
||||||
return {
|
return {'value': 'cef_string_multimap_t', 'format': 'multi'}
|
||||||
'value' : 'cef_string_multimap_t',
|
|
||||||
'format' : 'multi'
|
|
||||||
}
|
|
||||||
raise Exception('Only mappings of strings to strings are supported')
|
raise Exception('Only mappings of strings to strings are supported')
|
||||||
|
|
||||||
def get_capi(self, defined_structs=[]):
|
def get_capi(self, defined_structs=[]):
|
||||||
|
@ -36,6 +36,7 @@ import sys
|
|||||||
# - Some global variables like "$year$" will be replaced in the whole template
|
# - Some global variables like "$year$" will be replaced in the whole template
|
||||||
# before further parsing occurs.
|
# before further parsing occurs.
|
||||||
|
|
||||||
|
|
||||||
class cef_html_builder:
|
class cef_html_builder:
|
||||||
""" Class used to build the cefbuilds HTML file. """
|
""" Class used to build the cefbuilds HTML file. """
|
||||||
|
|
||||||
@ -47,7 +48,7 @@ class cef_html_builder:
|
|||||||
def clear(self):
|
def clear(self):
|
||||||
""" Clear the contents of this object. """
|
""" Clear the contents of this object. """
|
||||||
self._parts = {}
|
self._parts = {}
|
||||||
return;
|
return
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _token(key):
|
def _token(key):
|
||||||
@ -87,7 +88,9 @@ class cef_html_builder:
|
|||||||
top = str[:start_pos]
|
top = str[:start_pos]
|
||||||
middle = str[start_pos + len(start_tag):end_pos]
|
middle = str[start_pos + len(start_tag):end_pos]
|
||||||
bottom = str[end_pos + len(end_tag):]
|
bottom = str[end_pos + len(end_tag):]
|
||||||
return (top + cef_html_builder._token(cef_html_builder._section_key(section)) + bottom, middle)
|
return (
|
||||||
|
top + cef_html_builder._token(cef_html_builder._section_key(section)) +
|
||||||
|
bottom, middle)
|
||||||
|
|
||||||
def load(self, html_template):
|
def load(self, html_template):
|
||||||
""" Load the specified |html_template| string. """
|
""" Load the specified |html_template| string. """
|
||||||
@ -175,11 +178,17 @@ class cef_html_builder:
|
|||||||
else:
|
else:
|
||||||
sample_app = 'cefclient'
|
sample_app = 'cefclient'
|
||||||
return {
|
return {
|
||||||
'standard': 'Standard binary distribution. Includes header files, libcef_dll_wrapper source code, binary files, CMake configuration files and source code for the cefclient and cefsimple sample applications. See the included README.txt file for usage and build requirements.',
|
'standard':
|
||||||
'minimal': 'Minimal binary distribution. Includes header files, libcef_dll_wrapper source code, Release build binary files and CMake configuration files. Does not include Debug build binary files or sample application source code. See the included README.txt file for usage and build requirements.',
|
'Standard binary distribution. Includes header files, libcef_dll_wrapper source code, binary files, CMake configuration files and source code for the cefclient and cefsimple sample applications. See the included README.txt file for usage and build requirements.',
|
||||||
'client': 'Release build of the ' + sample_app + ' sample application. See the included README.txt file for usage requirements.',
|
'minimal':
|
||||||
'debug_symbols': 'Debug build symbols. Must be extracted and placed next to the CEF Debug binary file with the same name and version.',
|
'Minimal binary distribution. Includes header files, libcef_dll_wrapper source code, Release build binary files and CMake configuration files. Does not include Debug build binary files or sample application source code. See the included README.txt file for usage and build requirements.',
|
||||||
'release_symbols': 'Release build symbols. Must be extracted and placed next to the CEF Release binary file with the same name and version.'
|
'client':
|
||||||
|
'Release build of the ' + sample_app +
|
||||||
|
' sample application. See the included README.txt file for usage requirements.',
|
||||||
|
'debug_symbols':
|
||||||
|
'Debug build symbols. Must be extracted and placed next to the CEF Debug binary file with the same name and version.',
|
||||||
|
'release_symbols':
|
||||||
|
'Release build symbols. Must be extracted and placed next to the CEF Release binary file with the same name and version.'
|
||||||
}[file['type']]
|
}[file['type']]
|
||||||
|
|
||||||
def generate(self, json_builder):
|
def generate(self, json_builder):
|
||||||
@ -210,9 +219,12 @@ class cef_html_builder:
|
|||||||
for version in json_builder.get_versions(platform):
|
for version in json_builder.get_versions(platform):
|
||||||
subs['cef_version'] = version['cef_version']
|
subs['cef_version'] = version['cef_version']
|
||||||
subs['chromium_version'] = version['chromium_version']
|
subs['chromium_version'] = version['chromium_version']
|
||||||
subs['last_modified'] = self._get_date(version['files'][0]['last_modified'])
|
subs['last_modified'] = self._get_date(
|
||||||
subs['cef_source_url'] = self._get_cef_source_url(version['cef_version'])
|
version['files'][0]['last_modified'])
|
||||||
subs['chromium_source_url'] = self._get_chromium_source_url(version['chromium_version'])
|
subs['cef_source_url'] = self._get_cef_source_url(
|
||||||
|
version['cef_version'])
|
||||||
|
subs['chromium_source_url'] = self._get_chromium_source_url(
|
||||||
|
version['chromium_version'])
|
||||||
|
|
||||||
# Substitute variables.
|
# Substitute variables.
|
||||||
version_str = self._replace_all(self._parts['version'], subs)
|
version_str = self._replace_all(self._parts['version'], subs)
|
||||||
@ -225,9 +237,12 @@ class cef_html_builder:
|
|||||||
subs['size'] = self._get_file_size(file['size'])
|
subs['size'] = self._get_file_size(file['size'])
|
||||||
subs['type'] = file['type']
|
subs['type'] = file['type']
|
||||||
subs['type_name'] = self._get_type_name(file['type'])
|
subs['type_name'] = self._get_type_name(file['type'])
|
||||||
subs['file_url'] = self._get_file_url(platform, version['cef_version'], file)
|
subs['file_url'] = self._get_file_url(platform,
|
||||||
subs['sha1_url'] = self._get_sha1_url(platform, version['cef_version'], file)
|
version['cef_version'], file)
|
||||||
subs['tooltip_text'] = self._get_tooltip_text(platform, version['cef_version'], file)
|
subs['sha1_url'] = self._get_sha1_url(platform,
|
||||||
|
version['cef_version'], file)
|
||||||
|
subs['tooltip_text'] = self._get_tooltip_text(
|
||||||
|
platform, version['cef_version'], file)
|
||||||
|
|
||||||
# Substitute variables.
|
# Substitute variables.
|
||||||
file_str = self._replace_all(self._parts['file'], subs)
|
file_str = self._replace_all(self._parts['file'], subs)
|
||||||
@ -236,25 +251,35 @@ class cef_html_builder:
|
|||||||
if len(file_strs) > 0:
|
if len(file_strs) > 0:
|
||||||
# Always output file types in the same order.
|
# Always output file types in the same order.
|
||||||
file_out = ''
|
file_out = ''
|
||||||
type_order = ['standard', 'minimal', 'client', 'debug_symbols', 'release_symbols']
|
type_order = [
|
||||||
|
'standard', 'minimal', 'client', 'debug_symbols',
|
||||||
|
'release_symbols'
|
||||||
|
]
|
||||||
for type in type_order:
|
for type in type_order:
|
||||||
if type in file_strs:
|
if type in file_strs:
|
||||||
file_out = file_out + file_strs[type]
|
file_out = file_out + file_strs[type]
|
||||||
|
|
||||||
# Insert files.
|
# Insert files.
|
||||||
version_str = self._replace(version_str, self._section_key('file'), file_out)
|
version_str = self._replace(version_str,
|
||||||
|
self._section_key('file'), file_out)
|
||||||
version_strs.append(version_str)
|
version_strs.append(version_str)
|
||||||
|
|
||||||
if len(version_strs) > 0:
|
if len(version_strs) > 0:
|
||||||
# Insert versions.
|
# Insert versions.
|
||||||
platform_str = self._replace(platform_str, self._section_key('version'), "".join(version_strs))
|
platform_str = self._replace(platform_str,
|
||||||
|
self._section_key('version'),
|
||||||
|
"".join(version_strs))
|
||||||
platform_strs.append(platform_str)
|
platform_strs.append(platform_str)
|
||||||
platform_link_strs.append(platform_link_str)
|
platform_link_strs.append(platform_link_str)
|
||||||
|
|
||||||
if len(platform_strs) > 0:
|
if len(platform_strs) > 0:
|
||||||
# Insert platforms.
|
# Insert platforms.
|
||||||
root_str = self._replace(root_str, self._section_key('platform_link'), "".join(platform_link_strs))
|
root_str = self._replace(root_str,
|
||||||
root_str = self._replace(root_str, self._section_key('platform'), "".join(platform_strs))
|
self._section_key('platform_link'),
|
||||||
|
"".join(platform_link_strs))
|
||||||
|
root_str = self._replace(root_str,
|
||||||
|
self._section_key('platform'),
|
||||||
|
"".join(platform_strs))
|
||||||
|
|
||||||
return root_str
|
return root_str
|
||||||
|
|
||||||
@ -263,7 +288,8 @@ class cef_html_builder:
|
|||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
# Verify command-line arguments.
|
# Verify command-line arguments.
|
||||||
if len(sys.argv) < 4:
|
if len(sys.argv) < 4:
|
||||||
sys.stderr.write('Usage: %s <json_file_in> <html_file_in> <html_file_out>' % sys.argv[0])
|
sys.stderr.write(
|
||||||
|
'Usage: %s <json_file_in> <html_file_in> <html_file_out>' % sys.argv[0])
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
json_file_in = sys.argv[1]
|
json_file_in = sys.argv[1]
|
||||||
|
@ -43,19 +43,24 @@ import urllib
|
|||||||
# directory listings.
|
# directory listings.
|
||||||
_CEF_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
|
_CEF_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
|
||||||
|
|
||||||
|
|
||||||
def parse_date(date):
|
def parse_date(date):
|
||||||
return datetime.datetime.strptime(date, _CEF_DATE_FORMAT)
|
return datetime.datetime.strptime(date, _CEF_DATE_FORMAT)
|
||||||
|
|
||||||
|
|
||||||
def format_date(date):
|
def format_date(date):
|
||||||
return date.strftime(_CEF_DATE_FORMAT)
|
return date.strftime(_CEF_DATE_FORMAT)
|
||||||
|
|
||||||
|
|
||||||
# Helpers to format datetime values on JSON read/write.
|
# Helpers to format datetime values on JSON read/write.
|
||||||
def cef_from_json(json_object):
|
def cef_from_json(json_object):
|
||||||
if 'last_modified' in json_object:
|
if 'last_modified' in json_object:
|
||||||
json_object['last_modified'] = parse_date(json_object['last_modified'])
|
json_object['last_modified'] = parse_date(json_object['last_modified'])
|
||||||
return json_object
|
return json_object
|
||||||
|
|
||||||
|
|
||||||
class cef_json_encoder(json.JSONEncoder):
|
class cef_json_encoder(json.JSONEncoder):
|
||||||
|
|
||||||
def default(self, o):
|
def default(self, o):
|
||||||
if isinstance(o, datetime.datetime):
|
if isinstance(o, datetime.datetime):
|
||||||
return format_date(o)
|
return format_date(o)
|
||||||
@ -75,7 +80,8 @@ class cef_json_builder:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_platforms():
|
def get_platforms():
|
||||||
""" Returns the list of supported platforms. """
|
""" Returns the list of supported platforms. """
|
||||||
return ('linux32', 'linux64', 'linuxarm', 'macosx64', 'windows32', 'windows64')
|
return ('linux32', 'linux64', 'linuxarm', 'macosx64', 'windows32',
|
||||||
|
'windows64')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_distrib_types():
|
def get_distrib_types():
|
||||||
@ -85,7 +91,8 @@ class cef_json_builder:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def is_valid_version(version):
|
def is_valid_version(version):
|
||||||
""" Returns true if the specified CEF version is fully qualified and valid. """
|
""" Returns true if the specified CEF version is fully qualified and valid. """
|
||||||
return bool(re.compile('^3.[0-9]{4,5}.[0-9]{4,5}.g[0-9a-f]{7}$').match(version))
|
return bool(
|
||||||
|
re.compile('^3.[0-9]{4,5}.[0-9]{4,5}.g[0-9a-f]{7}$').match(version))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_valid_chromium_version(version):
|
def is_valid_chromium_version(version):
|
||||||
@ -117,8 +124,12 @@ class cef_json_builder:
|
|||||||
# Return a string representation of this object.
|
# Return a string representation of this object.
|
||||||
self._sort_versions()
|
self._sort_versions()
|
||||||
if self._prettyprint:
|
if self._prettyprint:
|
||||||
return json.dumps(self._data, cls=cef_json_encoder, sort_keys=True,
|
return json.dumps(
|
||||||
indent=2, separators=(',', ': '))
|
self._data,
|
||||||
|
cls=cef_json_encoder,
|
||||||
|
sort_keys=True,
|
||||||
|
indent=2,
|
||||||
|
separators=(',', ': '))
|
||||||
else:
|
else:
|
||||||
return json.dumps(self._data, cls=cef_json_encoder, sort_keys=True)
|
return json.dumps(self._data, cls=cef_json_encoder, sort_keys=True)
|
||||||
|
|
||||||
@ -224,9 +235,11 @@ class cef_json_builder:
|
|||||||
not 'size' in file or \
|
not 'size' in file or \
|
||||||
not 'last_modified' in file or \
|
not 'last_modified' in file or \
|
||||||
not 'sha1' in file:
|
not 'sha1' in file:
|
||||||
self._print('load: Missing file key(s) for %s %s' % (platform, version['cef_version']))
|
self._print('load: Missing file key(s) for %s %s' %
|
||||||
|
(platform, version['cef_version']))
|
||||||
continue
|
continue
|
||||||
(expected_platform, expected_version, expected_type) = self._parse_name(file['name'])
|
(expected_platform, expected_version,
|
||||||
|
expected_type) = self._parse_name(file['name'])
|
||||||
if expected_platform != platform or \
|
if expected_platform != platform or \
|
||||||
expected_version != version['cef_version'] or \
|
expected_version != version['cef_version'] or \
|
||||||
expected_type != file['type']:
|
expected_type != file['type']:
|
||||||
@ -236,7 +249,8 @@ class cef_json_builder:
|
|||||||
self._validate_args(platform, version['cef_version'], file['type'],
|
self._validate_args(platform, version['cef_version'], file['type'],
|
||||||
file['size'], file['last_modified'], file['sha1'])
|
file['size'], file['last_modified'], file['sha1'])
|
||||||
if file['type'] in found_types:
|
if file['type'] in found_types:
|
||||||
self._print('load: Duplicate %s type for %s %s' % (file['type'], platform, version['cef_version']))
|
self._print('load: Duplicate %s type for %s %s' %
|
||||||
|
(file['type'], platform, version['cef_version']))
|
||||||
continue
|
continue
|
||||||
found_types.append(file['type'])
|
found_types.append(file['type'])
|
||||||
valid_files.append({
|
valid_files.append({
|
||||||
@ -249,9 +263,13 @@ class cef_json_builder:
|
|||||||
|
|
||||||
if len(valid_files) > 0:
|
if len(valid_files) > 0:
|
||||||
valid_versions.append({
|
valid_versions.append({
|
||||||
'cef_version': version['cef_version'],
|
'cef_version':
|
||||||
'chromium_version': self.set_chromium_version(version['cef_version'], version['chromium_version']),
|
version['cef_version'],
|
||||||
'files': self._sort_files(valid_files)
|
'chromium_version':
|
||||||
|
self.set_chromium_version(version['cef_version'],
|
||||||
|
version['chromium_version']),
|
||||||
|
'files':
|
||||||
|
self._sort_files(valid_files)
|
||||||
})
|
})
|
||||||
|
|
||||||
if len(valid_versions) > 0:
|
if len(valid_versions) > 0:
|
||||||
@ -280,7 +298,8 @@ class cef_json_builder:
|
|||||||
if name_no_ext[-4:] == '.tar':
|
if name_no_ext[-4:] == '.tar':
|
||||||
name_no_ext = name_no_ext[:-4]
|
name_no_ext = name_no_ext[:-4]
|
||||||
name_parts = name_no_ext.split('_')
|
name_parts = name_no_ext.split('_')
|
||||||
if len(name_parts) < 4 or name_parts[0] != 'cef' or name_parts[1] != 'binary':
|
if len(
|
||||||
|
name_parts) < 4 or name_parts[0] != 'cef' or name_parts[1] != 'binary':
|
||||||
raise Exception('Invalid filename: %s' % name)
|
raise Exception('Invalid filename: %s' % name)
|
||||||
|
|
||||||
# Remove 'cef' and 'binary'.
|
# Remove 'cef' and 'binary'.
|
||||||
@ -356,7 +375,7 @@ class cef_json_builder:
|
|||||||
self._validate_args(platform, version, type, size, last_modified, sha1)
|
self._validate_args(platform, version, type, size, last_modified, sha1)
|
||||||
|
|
||||||
# Find the existing version record.
|
# Find the existing version record.
|
||||||
version_idx = -1;
|
version_idx = -1
|
||||||
for i in range(0, len(self._data[platform]['versions'])):
|
for i in range(0, len(self._data[platform]['versions'])):
|
||||||
if self._data[platform]['versions'][i]['cef_version'] == version:
|
if self._data[platform]['versions'][i]['cef_version'] == version:
|
||||||
# Check the version record.
|
# Check the version record.
|
||||||
@ -376,9 +395,12 @@ class cef_json_builder:
|
|||||||
|
|
||||||
# Find the existing file record with matching type.
|
# Find the existing file record with matching type.
|
||||||
file_changed = True
|
file_changed = True
|
||||||
for i in range(0, len(self._data[platform]['versions'][version_idx]['files'])):
|
for i in range(0,
|
||||||
if self._data[platform]['versions'][version_idx]['files'][i]['type'] == type:
|
len(self._data[platform]['versions'][version_idx]['files'])):
|
||||||
existing_sha1 = self._data[platform]['versions'][version_idx]['files'][i]['sha1']
|
if self._data[platform]['versions'][version_idx]['files'][i][
|
||||||
|
'type'] == type:
|
||||||
|
existing_sha1 = self._data[platform]['versions'][version_idx]['files'][
|
||||||
|
i]['sha1']
|
||||||
if existing_sha1 != sha1:
|
if existing_sha1 != sha1:
|
||||||
# Remove the existing file record.
|
# Remove the existing file record.
|
||||||
self._print(' Remove %s %s' % (name, existing_sha1))
|
self._print(' Remove %s %s' % (name, existing_sha1))
|
||||||
@ -420,7 +442,7 @@ class cef_json_builder:
|
|||||||
if version is None or version_obj['cef_version'].find(version) == 0:
|
if version is None or version_obj['cef_version'].find(version) == 0:
|
||||||
for file_obj in version_obj['files']:
|
for file_obj in version_obj['files']:
|
||||||
if type is None or type == file_obj['type']:
|
if type is None or type == file_obj['type']:
|
||||||
result_obj = file_obj;
|
result_obj = file_obj
|
||||||
# Add additional metadata.
|
# Add additional metadata.
|
||||||
result_obj['platform'] = platform
|
result_obj['platform'] = platform
|
||||||
result_obj['cef_version'] = version_obj['cef_version']
|
result_obj['cef_version'] = version_obj['cef_version']
|
||||||
|
@ -23,23 +23,32 @@ import random
|
|||||||
import string
|
import string
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
# Create a fake sha1 checksum value.
|
# Create a fake sha1 checksum value.
|
||||||
def make_fake_sha1():
|
def make_fake_sha1():
|
||||||
return ''.join(random.SystemRandom().choice('abcdef' + string.digits) for _ in range(40))
|
return ''.join(random.SystemRandom().choice('abcdef' + string.digits)
|
||||||
|
for _ in range(40))
|
||||||
|
|
||||||
|
|
||||||
# Create a fake file size value.
|
# Create a fake file size value.
|
||||||
def make_fake_size():
|
def make_fake_size():
|
||||||
return random.randint(30000000, 60000000)
|
return random.randint(30000000, 60000000)
|
||||||
|
|
||||||
|
|
||||||
# Create fake file info based on |platform| and |version|.
|
# Create fake file info based on |platform| and |version|.
|
||||||
def make_fake_file_info(platform, version, type):
|
def make_fake_file_info(platform, version, type):
|
||||||
return {
|
return {
|
||||||
'name': cef_json_builder.get_file_name(version, platform, type) + '.tar.gz',
|
'name':
|
||||||
'size': make_fake_size(),
|
cef_json_builder.get_file_name(version, platform, type) + '.tar.gz',
|
||||||
'lastModified': datetime.datetime.now(),
|
'size':
|
||||||
'sha1': make_fake_sha1()
|
make_fake_size(),
|
||||||
|
'lastModified':
|
||||||
|
datetime.datetime.now(),
|
||||||
|
'sha1':
|
||||||
|
make_fake_sha1()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Returns a list of fake files based on |platform| and |version|.
|
# Returns a list of fake files based on |platform| and |version|.
|
||||||
def create_fake_files(platform, version):
|
def create_fake_files(platform, version):
|
||||||
files = []
|
files = []
|
||||||
@ -61,7 +70,8 @@ def create_fake_files(platform, version):
|
|||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
# Verify command-line arguments.
|
# Verify command-line arguments.
|
||||||
if len(sys.argv) < 5 or sys.argv[1] != 'add':
|
if len(sys.argv) < 5 or sys.argv[1] != 'add':
|
||||||
sys.stderr.write('Usage: %s add <platform> <cef_version> <chromium_version>' % sys.argv[0])
|
sys.stderr.write('Usage: %s add <platform> <cef_version> <chromium_version>'
|
||||||
|
% sys.argv[0])
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
# Requested platform.
|
# Requested platform.
|
||||||
@ -124,7 +134,8 @@ if __name__ == '__main__':
|
|||||||
# Add new files to the builder.
|
# Add new files to the builder.
|
||||||
changed_files = []
|
changed_files = []
|
||||||
for file in new_files:
|
for file in new_files:
|
||||||
if builder.add_file(file['name'], file['size'], file['lastModified'], file['sha1']):
|
if builder.add_file(file['name'], file['size'], file['lastModified'],
|
||||||
|
file['sha1']):
|
||||||
changed_files.append(file)
|
changed_files.append(file)
|
||||||
|
|
||||||
if len(changed_files) > 0:
|
if len(changed_files) > 0:
|
||||||
|
@ -6,6 +6,7 @@ from cef_json_builder import cef_json_builder
|
|||||||
import datetime
|
import datetime
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
class TestCefJSONBuilder(unittest.TestCase):
|
class TestCefJSONBuilder(unittest.TestCase):
|
||||||
|
|
||||||
# Write builder contents to string and then read in.
|
# Write builder contents to string and then read in.
|
||||||
@ -16,33 +17,35 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
self.assertEqual(output, str(builder2))
|
self.assertEqual(output, str(builder2))
|
||||||
|
|
||||||
# Add a file record for testing purposes.
|
# Add a file record for testing purposes.
|
||||||
def _add_test_file(self, builder, platform='linux32', version='3.2704.1414.g185cd6c',
|
def _add_test_file(self,
|
||||||
type='standard', attrib_idx=0, shouldfail=False):
|
builder,
|
||||||
|
platform='linux32',
|
||||||
|
version='3.2704.1414.g185cd6c',
|
||||||
|
type='standard',
|
||||||
|
attrib_idx=0,
|
||||||
|
shouldfail=False):
|
||||||
name = cef_json_builder.get_file_name(version, platform, type) + '.tar.gz'
|
name = cef_json_builder.get_file_name(version, platform, type) + '.tar.gz'
|
||||||
|
|
||||||
# Some random attribute information. sha1 must be different to trigger replacement.
|
# Some random attribute information. sha1 must be different to trigger replacement.
|
||||||
attribs = [
|
attribs = [{
|
||||||
{
|
|
||||||
'date_str': '2016-05-18T22:42:15.487Z',
|
'date_str': '2016-05-18T22:42:15.487Z',
|
||||||
'date_val': datetime.datetime(2016, 5, 18, 22, 42, 15, 487000),
|
'date_val': datetime.datetime(2016, 5, 18, 22, 42, 15, 487000),
|
||||||
'sha1': '2d48ee05ea6385c8fe80879c98c5dd505ad4b100',
|
'sha1': '2d48ee05ea6385c8fe80879c98c5dd505ad4b100',
|
||||||
'size': 48395610
|
'size': 48395610
|
||||||
},
|
}, {
|
||||||
{
|
|
||||||
'date_str': '2016-05-14T22:42:15.487Z',
|
'date_str': '2016-05-14T22:42:15.487Z',
|
||||||
'date_val': datetime.datetime(2016, 5, 14, 22, 42, 15, 487000),
|
'date_val': datetime.datetime(2016, 5, 14, 22, 42, 15, 487000),
|
||||||
'sha1': '2d48ee05ea6385c8fe80879c98c5dd505ad4b200',
|
'sha1': '2d48ee05ea6385c8fe80879c98c5dd505ad4b200',
|
||||||
'size': 48395620
|
'size': 48395620
|
||||||
}
|
}]
|
||||||
]
|
|
||||||
|
|
||||||
# Populate the Chromium version to avoid queries.
|
# Populate the Chromium version to avoid queries.
|
||||||
chromium_version = '49.0.2705.50'
|
chromium_version = '49.0.2705.50'
|
||||||
self.assertEqual(chromium_version, builder.set_chromium_version(version, chromium_version))
|
self.assertEqual(chromium_version,
|
||||||
|
builder.set_chromium_version(version, chromium_version))
|
||||||
self.assertEqual(0, builder.get_query_count())
|
self.assertEqual(0, builder.get_query_count())
|
||||||
|
|
||||||
result = builder.add_file(name,
|
result = builder.add_file(name, attribs[attrib_idx]['size'],
|
||||||
attribs[attrib_idx]['size'],
|
|
||||||
attribs[attrib_idx]['date_str'],
|
attribs[attrib_idx]['date_str'],
|
||||||
attribs[attrib_idx]['sha1'])
|
attribs[attrib_idx]['sha1'])
|
||||||
# Failure should be expected when adding the same file multiple times with the same sha1.
|
# Failure should be expected when adding the same file multiple times with the same sha1.
|
||||||
@ -105,8 +108,11 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
builder = cef_json_builder()
|
builder = cef_json_builder()
|
||||||
|
|
||||||
# Specify all values just in case the defaults change.
|
# Specify all values just in case the defaults change.
|
||||||
expected = self._add_test_file(builder,
|
expected = self._add_test_file(
|
||||||
platform='linux32', version='3.2704.1414.g185cd6c', type='standard')
|
builder,
|
||||||
|
platform='linux32',
|
||||||
|
version='3.2704.1414.g185cd6c',
|
||||||
|
type='standard')
|
||||||
|
|
||||||
# No filter.
|
# No filter.
|
||||||
files = builder.get_files()
|
files = builder.get_files()
|
||||||
@ -140,14 +146,18 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
self.assertEqual(len(files), 0)
|
self.assertEqual(len(files), 0)
|
||||||
|
|
||||||
# All filters.
|
# All filters.
|
||||||
files = builder.get_files(platform='linux32', version='3.2704', type='standard')
|
files = builder.get_files(
|
||||||
|
platform='linux32', version='3.2704', type='standard')
|
||||||
self.assertEqual(len(files), 1)
|
self.assertEqual(len(files), 1)
|
||||||
self.assertEqual(expected, files[0])
|
self.assertEqual(expected, files[0])
|
||||||
files = builder.get_files(platform='linux32', version='3.2704', type='minimal')
|
files = builder.get_files(
|
||||||
|
platform='linux32', version='3.2704', type='minimal')
|
||||||
self.assertEqual(len(files), 0)
|
self.assertEqual(len(files), 0)
|
||||||
files = builder.get_files(platform='linux32', version='3.2623', type='standard')
|
files = builder.get_files(
|
||||||
|
platform='linux32', version='3.2623', type='standard')
|
||||||
self.assertEqual(len(files), 0)
|
self.assertEqual(len(files), 0)
|
||||||
files = builder.get_files(platform='linux64', version='3.2704', type='standard')
|
files = builder.get_files(
|
||||||
|
platform='linux64', version='3.2704', type='standard')
|
||||||
self.assertEqual(len(files), 0)
|
self.assertEqual(len(files), 0)
|
||||||
|
|
||||||
# Test add/get of multiple files.
|
# Test add/get of multiple files.
|
||||||
@ -162,7 +172,9 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
for platform in platforms:
|
for platform in platforms:
|
||||||
for version in versions:
|
for version in versions:
|
||||||
for type in types:
|
for type in types:
|
||||||
expected.append(self._add_test_file(builder, platform=platform, type=type, version=version))
|
expected.append(
|
||||||
|
self._add_test_file(
|
||||||
|
builder, platform=platform, type=type, version=version))
|
||||||
|
|
||||||
self._verify_write_read(builder)
|
self._verify_write_read(builder)
|
||||||
|
|
||||||
@ -187,7 +199,8 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
for platform in platforms:
|
for platform in platforms:
|
||||||
for version in versions:
|
for version in versions:
|
||||||
for type in types:
|
for type in types:
|
||||||
files = builder.get_files(platform=platform, type=type, version=version)
|
files = builder.get_files(
|
||||||
|
platform=platform, type=type, version=version)
|
||||||
self.assertEqual(len(files), 1)
|
self.assertEqual(len(files), 1)
|
||||||
self.assertEqual(expected[idx], files[0])
|
self.assertEqual(expected[idx], files[0])
|
||||||
idx = idx + 1
|
idx = idx + 1
|
||||||
@ -203,7 +216,8 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
# Initial file versions.
|
# Initial file versions.
|
||||||
for platform in platforms:
|
for platform in platforms:
|
||||||
for type in types:
|
for type in types:
|
||||||
self._add_test_file(builder, platform=platform, type=type, version=version)
|
self._add_test_file(
|
||||||
|
builder, platform=platform, type=type, version=version)
|
||||||
|
|
||||||
# No filter.
|
# No filter.
|
||||||
files = builder.get_files()
|
files = builder.get_files()
|
||||||
@ -214,8 +228,13 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
# Replace all file versions (due to new sha1).
|
# Replace all file versions (due to new sha1).
|
||||||
for platform in platforms:
|
for platform in platforms:
|
||||||
for type in types:
|
for type in types:
|
||||||
expected.append(self._add_test_file(builder,
|
expected.append(
|
||||||
platform=platform, type=type, version=version, attrib_idx=1))
|
self._add_test_file(
|
||||||
|
builder,
|
||||||
|
platform=platform,
|
||||||
|
type=type,
|
||||||
|
version=version,
|
||||||
|
attrib_idx=1))
|
||||||
|
|
||||||
# No filter.
|
# No filter.
|
||||||
files = builder.get_files()
|
files = builder.get_files()
|
||||||
@ -241,7 +260,8 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
# Initial file versions.
|
# Initial file versions.
|
||||||
for platform in platforms:
|
for platform in platforms:
|
||||||
for type in types:
|
for type in types:
|
||||||
self._add_test_file(builder, platform=platform, type=type, version=version)
|
self._add_test_file(
|
||||||
|
builder, platform=platform, type=type, version=version)
|
||||||
|
|
||||||
# No filter.
|
# No filter.
|
||||||
files = builder.get_files()
|
files = builder.get_files()
|
||||||
@ -252,8 +272,13 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
# Replace no file versions (due to same sha1).
|
# Replace no file versions (due to same sha1).
|
||||||
for platform in platforms:
|
for platform in platforms:
|
||||||
for type in types:
|
for type in types:
|
||||||
expected.append(self._add_test_file(builder,
|
expected.append(
|
||||||
platform=platform, type=type, version=version, shouldfail=True))
|
self._add_test_file(
|
||||||
|
builder,
|
||||||
|
platform=platform,
|
||||||
|
type=type,
|
||||||
|
version=version,
|
||||||
|
shouldfail=True))
|
||||||
|
|
||||||
# No filter.
|
# No filter.
|
||||||
files = builder.get_files()
|
files = builder.get_files()
|
||||||
@ -283,12 +308,10 @@ class TestCefJSONBuilder(unittest.TestCase):
|
|||||||
self.assertFalse(builder.is_valid_chromium_version('foobar'))
|
self.assertFalse(builder.is_valid_chromium_version('foobar'))
|
||||||
|
|
||||||
# The Git hashes must exist but the rest of the CEF version can be fake.
|
# The Git hashes must exist but the rest of the CEF version can be fake.
|
||||||
versions = (
|
versions = (('3.2704.1414.g185cd6c',
|
||||||
('3.2704.1414.g185cd6c', '51.0.2704.47'),
|
'51.0.2704.47'), ('3.2623.9999.gb90a3be', '49.0.2623.110'),
|
||||||
('3.2623.9999.gb90a3be', '49.0.2623.110'),
|
('3.2623.9999.g2a6491b',
|
||||||
('3.2623.9999.g2a6491b', '49.0.2623.87'),
|
'49.0.2623.87'), ('3.9999.9999.gab2636b', 'master'),)
|
||||||
('3.9999.9999.gab2636b', 'master'),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Test with no query.
|
# Test with no query.
|
||||||
for (cef, chromium) in versions:
|
for (cef, chromium) in versions:
|
||||||
|
@ -16,6 +16,7 @@ if sys.platform == 'win32':
|
|||||||
else:
|
else:
|
||||||
clang_format_exe = 'clang-format'
|
clang_format_exe = 'clang-format'
|
||||||
|
|
||||||
|
|
||||||
def clang_format(file_name, file_contents):
|
def clang_format(file_name, file_contents):
|
||||||
# -assume-filename is necessary to find the .clang-format file and determine
|
# -assume-filename is necessary to find the .clang-format file and determine
|
||||||
# the language when specifying contents via stdin.
|
# the language when specifying contents via stdin.
|
||||||
|
@ -4,7 +4,6 @@
|
|||||||
# found in the LICENSE file.
|
# found in the LICENSE file.
|
||||||
|
|
||||||
# TODO(slightlyoff): move to using shared version of this script.
|
# TODO(slightlyoff): move to using shared version of this script.
|
||||||
|
|
||||||
'''This script makes it easy to combine libs and object files to a new lib,
|
'''This script makes it easy to combine libs and object files to a new lib,
|
||||||
optionally removing some of the object files in the input libs by regular
|
optionally removing some of the object files in the input libs by regular
|
||||||
expression matching.
|
expression matching.
|
||||||
@ -19,10 +18,8 @@ import sys
|
|||||||
|
|
||||||
def Shell(*args):
|
def Shell(*args):
|
||||||
'''Runs the program and args in args, returns the output from the program.'''
|
'''Runs the program and args in args, returns the output from the program.'''
|
||||||
process = subprocess.Popen(args,
|
process = subprocess.Popen(
|
||||||
stdin = None,
|
args, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
stdout = subprocess.PIPE,
|
|
||||||
stderr = subprocess.STDOUT)
|
|
||||||
output = process.stdout.readlines()
|
output = process.stdout.readlines()
|
||||||
process.wait()
|
process.wait()
|
||||||
retcode = process.returncode
|
retcode = process.returncode
|
||||||
@ -69,11 +66,15 @@ any object file (in the input libraries) that matches a given regular
|
|||||||
expression.
|
expression.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
|
||||||
def GetOptionParser():
|
def GetOptionParser():
|
||||||
parser = optparse.OptionParser(USAGE)
|
parser = optparse.OptionParser(USAGE)
|
||||||
parser.add_option('-o', '--output', dest = 'output',
|
parser.add_option(
|
||||||
help = 'write to this output library')
|
'-o', '--output', dest='output', help='write to this output library')
|
||||||
parser.add_option('-r', '--remove', dest = 'remove',
|
parser.add_option(
|
||||||
|
'-r',
|
||||||
|
'--remove',
|
||||||
|
dest='remove',
|
||||||
help='object files matching this regexp will be removed '
|
help='object files matching this regexp will be removed '
|
||||||
'from the output library')
|
'from the output library')
|
||||||
return parser
|
return parser
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
# Copyright 2017 The Chromium Embedded Framework Authors. All rights reserved.
|
# Copyright 2017 The Chromium Embedded Framework Authors. All rights reserved.
|
||||||
# Use of this source code is governed by a BSD-style license that can be found
|
# Use of this source code is governed by a BSD-style license that can be found
|
||||||
# in the LICENSE file.
|
# in the LICENSE file.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
This script implements a simple HTTP server for receiving crash report uploads
|
This script implements a simple HTTP server for receiving crash report uploads
|
||||||
from a Breakpad/Crashpad client (any CEF-based application). This script is
|
from a Breakpad/Crashpad client (any CEF-based application). This script is
|
||||||
@ -120,16 +119,20 @@ import sys
|
|||||||
import uuid
|
import uuid
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
|
|
||||||
def print_msg(msg):
|
def print_msg(msg):
|
||||||
""" Write |msg| to stdout and flush. """
|
""" Write |msg| to stdout and flush. """
|
||||||
timestr = datetime.datetime.now().strftime("%m/%d/%Y %H:%M:%S")
|
timestr = datetime.datetime.now().strftime("%m/%d/%Y %H:%M:%S")
|
||||||
sys.stdout.write("%s: %s\n" % (timestr, msg))
|
sys.stdout.write("%s: %s\n" % (timestr, msg))
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
|
|
||||||
# Key identifying the minidump file.
|
# Key identifying the minidump file.
|
||||||
minidump_key = 'upload_file_minidump'
|
minidump_key = 'upload_file_minidump'
|
||||||
|
|
||||||
|
|
||||||
class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
|
class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
|
||||||
|
|
||||||
def __init__(self, dump_directory, *args):
|
def __init__(self, dump_directory, *args):
|
||||||
self._dump_directory = dump_directory
|
self._dump_directory = dump_directory
|
||||||
BaseHTTPRequestHandler.__init__(self, *args)
|
BaseHTTPRequestHandler.__init__(self, *args)
|
||||||
@ -251,7 +254,8 @@ class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||||||
if chunked:
|
if chunked:
|
||||||
request_body = self._unchunk_request(compressed)
|
request_body = self._unchunk_request(compressed)
|
||||||
else:
|
else:
|
||||||
content_length = int(self.headers['Content-Length']) if 'Content-Length' in self.headers else 0
|
content_length = int(self.headers[
|
||||||
|
'Content-Length']) if 'Content-Length' in self.headers else 0
|
||||||
if content_length > 0:
|
if content_length > 0:
|
||||||
request_body = self.rfile.read(content_length)
|
request_body = self.rfile.read(content_length)
|
||||||
else:
|
else:
|
||||||
@ -284,15 +288,18 @@ class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||||||
with open(meta_file, 'w') as fp:
|
with open(meta_file, 'w') as fp:
|
||||||
json.dump(metadata, fp)
|
json.dump(metadata, fp)
|
||||||
|
|
||||||
|
|
||||||
def HandleRequestsUsing(dump_store):
|
def HandleRequestsUsing(dump_store):
|
||||||
return lambda *args: CrashHTTPRequestHandler(dump_directory, *args)
|
return lambda *args: CrashHTTPRequestHandler(dump_directory, *args)
|
||||||
|
|
||||||
|
|
||||||
def RunCrashServer(port, dump_directory):
|
def RunCrashServer(port, dump_directory):
|
||||||
""" Run the crash handler HTTP server. """
|
""" Run the crash handler HTTP server. """
|
||||||
httpd = HTTPServer(('', port), HandleRequestsUsing(dump_directory))
|
httpd = HTTPServer(('', port), HandleRequestsUsing(dump_directory))
|
||||||
print_msg('Starting httpd on port %d' % port)
|
print_msg('Starting httpd on port %d' % port)
|
||||||
httpd.serve_forever()
|
httpd.serve_forever()
|
||||||
|
|
||||||
|
|
||||||
# Program entry point.
|
# Program entry point.
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if len(sys.argv) != 3:
|
if len(sys.argv) != 3:
|
||||||
@ -307,4 +314,3 @@ if __name__ == "__main__":
|
|||||||
raise Exception('Directory does not exist: %s' % dump_directory)
|
raise Exception('Directory does not exist: %s' % dump_directory)
|
||||||
|
|
||||||
RunCrashServer(int(sys.argv[1]), dump_directory)
|
RunCrashServer(int(sys.argv[1]), dump_directory)
|
||||||
|
|
||||||
|
@ -4,10 +4,12 @@
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
def get_year():
|
def get_year():
|
||||||
""" Returns the current year. """
|
""" Returns the current year. """
|
||||||
return str(datetime.datetime.now().year)
|
return str(datetime.datetime.now().year)
|
||||||
|
|
||||||
|
|
||||||
def get_date():
|
def get_date():
|
||||||
""" Returns the current date. """
|
""" Returns the current date. """
|
||||||
return datetime.datetime.now().strftime('%B %d, %Y')
|
return datetime.datetime.now().strftime('%B %d, %Y')
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
from subprocess import Popen, PIPE
|
from subprocess import Popen, PIPE
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def exec_cmd(cmd, path, input_string=None):
|
def exec_cmd(cmd, path, input_string=None):
|
||||||
""" Execute the specified command and return the result. """
|
""" Execute the specified command and return the result. """
|
||||||
out = ''
|
out = ''
|
||||||
@ -12,11 +13,20 @@ def exec_cmd(cmd, path, input_string=None):
|
|||||||
parts = cmd.split()
|
parts = cmd.split()
|
||||||
try:
|
try:
|
||||||
if input_string is None:
|
if input_string is None:
|
||||||
process = Popen(parts, cwd=path, stdout=PIPE, stderr=PIPE,
|
process = Popen(
|
||||||
|
parts,
|
||||||
|
cwd=path,
|
||||||
|
stdout=PIPE,
|
||||||
|
stderr=PIPE,
|
||||||
shell=(sys.platform == 'win32'))
|
shell=(sys.platform == 'win32'))
|
||||||
out, err = process.communicate()
|
out, err = process.communicate()
|
||||||
else:
|
else:
|
||||||
process = Popen(parts, cwd=path, stdin=PIPE, stdout=PIPE, stderr=PIPE,
|
process = Popen(
|
||||||
|
parts,
|
||||||
|
cwd=path,
|
||||||
|
stdin=PIPE,
|
||||||
|
stdout=PIPE,
|
||||||
|
stderr=PIPE,
|
||||||
shell=(sys.platform == 'win32'))
|
shell=(sys.platform == 'win32'))
|
||||||
out, err = process.communicate(input=input_string)
|
out, err = process.communicate(input=input_string)
|
||||||
except IOError, (errno, strerror):
|
except IOError, (errno, strerror):
|
||||||
|
@ -8,6 +8,7 @@ import shutil
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
|
||||||
def read_file(name, normalize=True):
|
def read_file(name, normalize=True):
|
||||||
""" Read a file. """
|
""" Read a file. """
|
||||||
try:
|
try:
|
||||||
@ -24,6 +25,7 @@ def read_file(name, normalize = True):
|
|||||||
else:
|
else:
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
def write_file(name, data):
|
def write_file(name, data):
|
||||||
""" Write a file. """
|
""" Write a file. """
|
||||||
try:
|
try:
|
||||||
@ -36,14 +38,17 @@ def write_file(name, data):
|
|||||||
else:
|
else:
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
def path_exists(name):
|
def path_exists(name):
|
||||||
""" Returns true if the path currently exists. """
|
""" Returns true if the path currently exists. """
|
||||||
return os.path.exists(name)
|
return os.path.exists(name)
|
||||||
|
|
||||||
|
|
||||||
def backup_file(name):
|
def backup_file(name):
|
||||||
""" Rename the file to a name that includes the current time stamp. """
|
""" Rename the file to a name that includes the current time stamp. """
|
||||||
move_file(name, name + '.' + time.strftime('%Y-%m-%d-%H-%M-%S'))
|
move_file(name, name + '.' + time.strftime('%Y-%m-%d-%H-%M-%S'))
|
||||||
|
|
||||||
|
|
||||||
def copy_file(src, dst, quiet=True):
|
def copy_file(src, dst, quiet=True):
|
||||||
""" Copy a file. """
|
""" Copy a file. """
|
||||||
try:
|
try:
|
||||||
@ -51,9 +56,11 @@ def copy_file(src, dst, quiet = True):
|
|||||||
if not quiet:
|
if not quiet:
|
||||||
sys.stdout.write('Transferring ' + src + ' file.\n')
|
sys.stdout.write('Transferring ' + src + ' file.\n')
|
||||||
except IOError, (errno, strerror):
|
except IOError, (errno, strerror):
|
||||||
sys.stderr.write('Failed to copy file from '+src+' to '+dst+': '+strerror)
|
sys.stderr.write('Failed to copy file from ' + src + ' to ' + dst + ': ' +
|
||||||
|
strerror)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def move_file(src, dst, quiet=True):
|
def move_file(src, dst, quiet=True):
|
||||||
""" Move a file. """
|
""" Move a file. """
|
||||||
try:
|
try:
|
||||||
@ -61,9 +68,11 @@ def move_file(src, dst, quiet = True):
|
|||||||
if not quiet:
|
if not quiet:
|
||||||
sys.stdout.write('Moving ' + src + ' file.\n')
|
sys.stdout.write('Moving ' + src + ' file.\n')
|
||||||
except IOError, (errno, strerror):
|
except IOError, (errno, strerror):
|
||||||
sys.stderr.write('Failed to move file from '+src+' to '+dst+': '+strerror)
|
sys.stderr.write('Failed to move file from ' + src + ' to ' + dst + ': ' +
|
||||||
|
strerror)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def copy_files(src_glob, dst_folder, quiet=True):
|
def copy_files(src_glob, dst_folder, quiet=True):
|
||||||
""" Copy multiple files. """
|
""" Copy multiple files. """
|
||||||
for fname in iglob(src_glob):
|
for fname in iglob(src_glob):
|
||||||
@ -73,6 +82,7 @@ def copy_files(src_glob, dst_folder, quiet = True):
|
|||||||
else:
|
else:
|
||||||
copy_file(fname, dst, quiet)
|
copy_file(fname, dst, quiet)
|
||||||
|
|
||||||
|
|
||||||
def remove_file(name, quiet=True):
|
def remove_file(name, quiet=True):
|
||||||
""" Remove the specified file. """
|
""" Remove the specified file. """
|
||||||
try:
|
try:
|
||||||
@ -84,6 +94,7 @@ def remove_file(name, quiet = True):
|
|||||||
sys.stderr.write('Failed to remove file ' + name + ': ' + strerror)
|
sys.stderr.write('Failed to remove file ' + name + ': ' + strerror)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def copy_dir(src, dst, quiet=True):
|
def copy_dir(src, dst, quiet=True):
|
||||||
""" Copy a directory tree. """
|
""" Copy a directory tree. """
|
||||||
try:
|
try:
|
||||||
@ -92,9 +103,11 @@ def copy_dir(src, dst, quiet = True):
|
|||||||
if not quiet:
|
if not quiet:
|
||||||
sys.stdout.write('Transferring ' + src + ' directory.\n')
|
sys.stdout.write('Transferring ' + src + ' directory.\n')
|
||||||
except IOError, (errno, strerror):
|
except IOError, (errno, strerror):
|
||||||
sys.stderr.write('Failed to copy directory from '+src+' to '+dst+': '+strerror)
|
sys.stderr.write('Failed to copy directory from ' + src + ' to ' + dst +
|
||||||
|
': ' + strerror)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def remove_dir(name, quiet=True):
|
def remove_dir(name, quiet=True):
|
||||||
""" Remove the specified directory. """
|
""" Remove the specified directory. """
|
||||||
try:
|
try:
|
||||||
@ -106,6 +119,7 @@ def remove_dir(name, quiet = True):
|
|||||||
sys.stderr.write('Failed to remove directory ' + name + ': ' + strerror)
|
sys.stderr.write('Failed to remove directory ' + name + ': ' + strerror)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def make_dir(name, quiet=True):
|
def make_dir(name, quiet=True):
|
||||||
""" Create the specified directory. """
|
""" Create the specified directory. """
|
||||||
try:
|
try:
|
||||||
@ -117,11 +131,13 @@ def make_dir(name, quiet = True):
|
|||||||
sys.stderr.write('Failed to create directory ' + name + ': ' + strerror)
|
sys.stderr.write('Failed to create directory ' + name + ': ' + strerror)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
def get_files(search_glob):
|
def get_files(search_glob):
|
||||||
""" Returns all files matching the search glob. """
|
""" Returns all files matching the search glob. """
|
||||||
# Sort the result for consistency across platforms.
|
# Sort the result for consistency across platforms.
|
||||||
return sorted(iglob(search_glob))
|
return sorted(iglob(search_glob))
|
||||||
|
|
||||||
|
|
||||||
def read_version_file(file, args):
|
def read_version_file(file, args):
|
||||||
""" Read and parse a version file (key=value pairs, one per line). """
|
""" Read and parse a version file (key=value pairs, one per line). """
|
||||||
lines = read_file(file).split("\n")
|
lines = read_file(file).split("\n")
|
||||||
@ -130,10 +146,12 @@ def read_version_file(file, args):
|
|||||||
if len(parts) == 2:
|
if len(parts) == 2:
|
||||||
args[parts[0]] = parts[1]
|
args[parts[0]] = parts[1]
|
||||||
|
|
||||||
|
|
||||||
def eval_file(src):
|
def eval_file(src):
|
||||||
""" Loads and evaluates the contents of the specified file. """
|
""" Loads and evaluates the contents of the specified file. """
|
||||||
return eval(read_file(src), {'__builtins__': None}, None)
|
return eval(read_file(src), {'__builtins__': None}, None)
|
||||||
|
|
||||||
|
|
||||||
def normalize_path(path):
|
def normalize_path(path):
|
||||||
""" Normalizes the path separator to match the Unix standard. """
|
""" Normalizes the path separator to match the Unix standard. """
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
|
@ -25,13 +25,12 @@ else:
|
|||||||
print 'Unknown operating system platform'
|
print 'Unknown operating system platform'
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
print "\nGenerating CEF version header file..."
|
print "\nGenerating CEF version header file..."
|
||||||
cmd = [ 'python', 'tools/make_version_header.py',
|
cmd = [
|
||||||
'--header', 'include/cef_version.h',
|
'python', 'tools/make_version_header.py', '--header',
|
||||||
'--cef_version', 'VERSION',
|
'include/cef_version.h', '--cef_version', 'VERSION', '--chrome_version',
|
||||||
'--chrome_version', '../chrome/VERSION',
|
'../chrome/VERSION', '--cpp_header_dir', 'include'
|
||||||
'--cpp_header_dir', 'include' ]
|
]
|
||||||
RunAction(cef_dir, cmd)
|
RunAction(cef_dir, cmd)
|
||||||
|
|
||||||
print "\nPatching build configuration and source files for CEF..."
|
print "\nPatching build configuration and source files for CEF..."
|
||||||
|
@ -10,7 +10,7 @@ try:
|
|||||||
import gclient_utils
|
import gclient_utils
|
||||||
except ImportError, e:
|
except ImportError, e:
|
||||||
# Search the PATH environment variable to find the depot_tools folder.
|
# Search the PATH environment variable to find the depot_tools folder.
|
||||||
depot_tools = None;
|
depot_tools = None
|
||||||
paths = os.environ.get('PATH').split(os.pathsep)
|
paths = os.environ.get('PATH').split(os.pathsep)
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if os.path.exists(os.path.join(path, 'gclient_utils.py')):
|
if os.path.exists(os.path.join(path, 'gclient_utils.py')):
|
||||||
@ -25,6 +25,7 @@ except ImportError, e:
|
|||||||
sys.path.append(depot_tools)
|
sys.path.append(depot_tools)
|
||||||
import gclient_utils
|
import gclient_utils
|
||||||
|
|
||||||
|
|
||||||
# Copied from gclient.py python code.
|
# Copied from gclient.py python code.
|
||||||
def RunAction(dir, command):
|
def RunAction(dir, command):
|
||||||
"""Runs the action."""
|
"""Runs the action."""
|
||||||
@ -35,8 +36,7 @@ def RunAction(dir, command):
|
|||||||
command[0] = sys.executable
|
command[0] = sys.executable
|
||||||
|
|
||||||
try:
|
try:
|
||||||
gclient_utils.CheckCallAndFilterAndHeader(
|
gclient_utils.CheckCallAndFilterAndHeader(command, cwd=dir, always=True)
|
||||||
command, cwd=dir, always=True)
|
|
||||||
except gclient_utils.Error, e:
|
except gclient_utils.Error, e:
|
||||||
# Use a discrete exit status code of 2 to indicate that a hook action
|
# Use a discrete exit status code of 2 to indicate that a hook action
|
||||||
# failed. Users of this script may wish to treat hook action failures
|
# failed. Users of this script may wish to treat hook action failures
|
||||||
|
@ -12,10 +12,12 @@ if sys.platform == 'win32':
|
|||||||
else:
|
else:
|
||||||
git_exe = 'git'
|
git_exe = 'git'
|
||||||
|
|
||||||
|
|
||||||
def is_checkout(path):
|
def is_checkout(path):
|
||||||
""" Returns true if the path represents a git checkout. """
|
""" Returns true if the path represents a git checkout. """
|
||||||
return os.path.isdir(os.path.join(path, '.git'))
|
return os.path.isdir(os.path.join(path, '.git'))
|
||||||
|
|
||||||
|
|
||||||
def get_hash(path='.', branch='HEAD'):
|
def get_hash(path='.', branch='HEAD'):
|
||||||
""" Returns the git hash for the specified branch/tag/hash. """
|
""" Returns the git hash for the specified branch/tag/hash. """
|
||||||
cmd = "%s rev-parse %s" % (git_exe, branch)
|
cmd = "%s rev-parse %s" % (git_exe, branch)
|
||||||
@ -24,6 +26,7 @@ def get_hash(path = '.', branch = 'HEAD'):
|
|||||||
return result['out'].strip()
|
return result['out'].strip()
|
||||||
return 'Unknown'
|
return 'Unknown'
|
||||||
|
|
||||||
|
|
||||||
def get_url(path='.'):
|
def get_url(path='.'):
|
||||||
""" Returns the origin url for the specified path. """
|
""" Returns the origin url for the specified path. """
|
||||||
cmd = "%s config --get remote.origin.url" % git_exe
|
cmd = "%s config --get remote.origin.url" % git_exe
|
||||||
@ -32,6 +35,7 @@ def get_url(path = '.'):
|
|||||||
return result['out'].strip()
|
return result['out'].strip()
|
||||||
return 'Unknown'
|
return 'Unknown'
|
||||||
|
|
||||||
|
|
||||||
def get_commit_number(path='.', branch='HEAD'):
|
def get_commit_number(path='.', branch='HEAD'):
|
||||||
""" Returns the number of commits in the specified branch/tag/hash. """
|
""" Returns the number of commits in the specified branch/tag/hash. """
|
||||||
cmd = "%s rev-list --count %s" % (git_exe, branch)
|
cmd = "%s rev-list --count %s" % (git_exe, branch)
|
||||||
@ -40,6 +44,7 @@ def get_commit_number(path = '.', branch = 'HEAD'):
|
|||||||
return result['out'].strip()
|
return result['out'].strip()
|
||||||
return '0'
|
return '0'
|
||||||
|
|
||||||
|
|
||||||
def get_changed_files(path, hash):
|
def get_changed_files(path, hash):
|
||||||
""" Retrieves the list of changed files. """
|
""" Retrieves the list of changed files. """
|
||||||
if hash == 'unstaged':
|
if hash == 'unstaged':
|
||||||
@ -57,6 +62,7 @@ def get_changed_files(path, hash):
|
|||||||
return files.strip().split("\n")
|
return files.strip().split("\n")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def write_indented_output(output):
|
def write_indented_output(output):
|
||||||
""" Apply a fixed amount of intent to lines before printing. """
|
""" Apply a fixed amount of intent to lines before printing. """
|
||||||
if output == '':
|
if output == '':
|
||||||
@ -67,6 +73,7 @@ def write_indented_output(output):
|
|||||||
continue
|
continue
|
||||||
sys.stdout.write('\t%s\n' % line)
|
sys.stdout.write('\t%s\n' % line)
|
||||||
|
|
||||||
|
|
||||||
def git_apply_patch_file(patch_path, patch_dir):
|
def git_apply_patch_file(patch_path, patch_dir):
|
||||||
""" Apply |patch_path| to files in |patch_dir|. """
|
""" Apply |patch_path| to files in |patch_dir|. """
|
||||||
patch_name = os.path.basename(patch_path)
|
patch_name = os.path.basename(patch_path)
|
||||||
|
@ -80,9 +80,11 @@ else:
|
|||||||
print 'Unknown operating system platform'
|
print 'Unknown operating system platform'
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
def msg(msg):
|
def msg(msg):
|
||||||
print 'NOTE: ' + msg
|
print 'NOTE: ' + msg
|
||||||
|
|
||||||
|
|
||||||
def NameValueListToDict(name_value_list):
|
def NameValueListToDict(name_value_list):
|
||||||
"""
|
"""
|
||||||
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
|
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
|
||||||
@ -111,6 +113,7 @@ def NameValueListToDict(name_value_list):
|
|||||||
result[tokens[0]] = True
|
result[tokens[0]] = True
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def ShlexEnv(env_name):
|
def ShlexEnv(env_name):
|
||||||
"""
|
"""
|
||||||
Split an environment variable using shell-like syntax.
|
Split an environment variable using shell-like syntax.
|
||||||
@ -120,6 +123,7 @@ def ShlexEnv(env_name):
|
|||||||
flags = shlex.split(flags)
|
flags = shlex.split(flags)
|
||||||
return flags
|
return flags
|
||||||
|
|
||||||
|
|
||||||
def MergeDicts(*dict_args):
|
def MergeDicts(*dict_args):
|
||||||
"""
|
"""
|
||||||
Given any number of dicts, shallow copy and merge into a new dict.
|
Given any number of dicts, shallow copy and merge into a new dict.
|
||||||
@ -130,6 +134,7 @@ def MergeDicts(*dict_args):
|
|||||||
result.update(dictionary)
|
result.update(dictionary)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def GetValueString(val):
|
def GetValueString(val):
|
||||||
"""
|
"""
|
||||||
Return the string representation of |val| expected by GN.
|
Return the string representation of |val| expected by GN.
|
||||||
@ -143,6 +148,7 @@ def GetValueString(val):
|
|||||||
return 'false'
|
return 'false'
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
def GetChromiumDefaultArgs():
|
def GetChromiumDefaultArgs():
|
||||||
"""
|
"""
|
||||||
Return default GN args. These must match the Chromium defaults.
|
Return default GN args. These must match the Chromium defaults.
|
||||||
@ -171,6 +177,7 @@ def GetChromiumDefaultArgs():
|
|||||||
|
|
||||||
return defaults
|
return defaults
|
||||||
|
|
||||||
|
|
||||||
def GetArgValue(args, key):
|
def GetArgValue(args, key):
|
||||||
"""
|
"""
|
||||||
Return an existing GN arg value or the Chromium default.
|
Return an existing GN arg value or the Chromium default.
|
||||||
@ -179,6 +186,7 @@ def GetArgValue(args, key):
|
|||||||
assert key in defaults, "No default Chromium value specified for %s" % key
|
assert key in defaults, "No default Chromium value specified for %s" % key
|
||||||
return args.get(key, defaults[key])
|
return args.get(key, defaults[key])
|
||||||
|
|
||||||
|
|
||||||
def GetRecommendedDefaultArgs():
|
def GetRecommendedDefaultArgs():
|
||||||
"""
|
"""
|
||||||
Return recommended default GN args that differ from Chromium defaults.
|
Return recommended default GN args that differ from Chromium defaults.
|
||||||
@ -209,12 +217,14 @@ def GetRecommendedDefaultArgs():
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def GetGNEnvArgs():
|
def GetGNEnvArgs():
|
||||||
"""
|
"""
|
||||||
Return GN args specified via the GN_DEFINES env variable.
|
Return GN args specified via the GN_DEFINES env variable.
|
||||||
"""
|
"""
|
||||||
return NameValueListToDict(ShlexEnv('GN_DEFINES'))
|
return NameValueListToDict(ShlexEnv('GN_DEFINES'))
|
||||||
|
|
||||||
|
|
||||||
def GetRequiredArgs():
|
def GetRequiredArgs():
|
||||||
"""
|
"""
|
||||||
Return required GN args. Also enforced by assert() in //cef/BUILD.gn.
|
Return required GN args. Also enforced by assert() in //cef/BUILD.gn.
|
||||||
@ -243,6 +253,7 @@ def GetRequiredArgs():
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def GetMergedArgs(build_args):
|
def GetMergedArgs(build_args):
|
||||||
"""
|
"""
|
||||||
Return merged GN args.
|
Return merged GN args.
|
||||||
@ -258,6 +269,7 @@ def GetMergedArgs(build_args):
|
|||||||
|
|
||||||
return MergeDicts(dict, required)
|
return MergeDicts(dict, required)
|
||||||
|
|
||||||
|
|
||||||
def ValidateArgs(args):
|
def ValidateArgs(args):
|
||||||
"""
|
"""
|
||||||
Validate GN arg combinations that we know about. Also provide suggestions
|
Validate GN arg combinations that we know about. Also provide suggestions
|
||||||
@ -288,7 +300,8 @@ def ValidateArgs(args):
|
|||||||
elif platform == 'windows':
|
elif platform == 'windows':
|
||||||
assert target_cpu in ('x86', 'x64'), 'target_cpu must be "x86" or "x64"'
|
assert target_cpu in ('x86', 'x64'), 'target_cpu must be "x86" or "x64"'
|
||||||
elif platform == 'linux':
|
elif platform == 'linux':
|
||||||
assert target_cpu in ('x86', 'x64', 'arm'), 'target_cpu must be "x86", "x64" or "arm"'
|
assert target_cpu in ('x86', 'x64',
|
||||||
|
'arm'), 'target_cpu must be "x86", "x64" or "arm"'
|
||||||
|
|
||||||
if platform == 'linux':
|
if platform == 'linux':
|
||||||
if target_cpu == 'x86':
|
if target_cpu == 'x86':
|
||||||
@ -313,7 +326,8 @@ def ValidateArgs(args):
|
|||||||
|
|
||||||
# Non-official debug builds should use /DEBUG:FASTLINK.
|
# Non-official debug builds should use /DEBUG:FASTLINK.
|
||||||
if not is_official_build and is_debug and not is_win_fastlink:
|
if not is_official_build and is_debug and not is_win_fastlink:
|
||||||
msg('is_official_build=false + is_debug=true recommends is_win_fastlink=true')
|
msg('is_official_build=false + is_debug=true recommends is_win_fastlink=true'
|
||||||
|
)
|
||||||
|
|
||||||
# Windows custom toolchain requirements.
|
# Windows custom toolchain requirements.
|
||||||
#
|
#
|
||||||
@ -374,6 +388,7 @@ def ValidateArgs(args):
|
|||||||
if (os.path.exists(vcvars_path)):
|
if (os.path.exists(vcvars_path)):
|
||||||
msg('INCLUDE/LIB/PATH values will be derived from %s' % vcvars_path)
|
msg('INCLUDE/LIB/PATH values will be derived from %s' % vcvars_path)
|
||||||
|
|
||||||
|
|
||||||
def GetConfigArgs(args, is_debug, cpu):
|
def GetConfigArgs(args, is_debug, cpu):
|
||||||
"""
|
"""
|
||||||
Return merged GN args for the configuration and validate.
|
Return merged GN args for the configuration and validate.
|
||||||
@ -403,6 +418,7 @@ def GetConfigArgs(args, is_debug, cpu):
|
|||||||
ValidateArgs(result)
|
ValidateArgs(result)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def LinuxSysrootExists(cpu):
|
def LinuxSysrootExists(cpu):
|
||||||
"""
|
"""
|
||||||
Returns true if the sysroot for the specified |cpu| architecture exists.
|
Returns true if the sysroot for the specified |cpu| architecture exists.
|
||||||
@ -421,6 +437,7 @@ def LinuxSysrootExists(cpu):
|
|||||||
|
|
||||||
return os.path.isdir(os.path.join(sysroot_root, sysroot_name))
|
return os.path.isdir(os.path.join(sysroot_root, sysroot_name))
|
||||||
|
|
||||||
|
|
||||||
def GetAllPlatformConfigs(build_args):
|
def GetAllPlatformConfigs(build_args):
|
||||||
"""
|
"""
|
||||||
Return a map of directory name to GN args for the current platform.
|
Return a map of directory name to GN args for the current platform.
|
||||||
@ -447,7 +464,8 @@ def GetAllPlatformConfigs(build_args):
|
|||||||
if LinuxSysrootExists(cpu):
|
if LinuxSysrootExists(cpu):
|
||||||
supported_cpus.append(cpu)
|
supported_cpus.append(cpu)
|
||||||
else:
|
else:
|
||||||
msg('Not generating %s configuration due to missing sysroot directory' % cpu)
|
msg('Not generating %s configuration due to missing sysroot directory'
|
||||||
|
% cpu)
|
||||||
else:
|
else:
|
||||||
supported_cpus = ['x64']
|
supported_cpus = ['x64']
|
||||||
elif platform == 'windows':
|
elif platform == 'windows':
|
||||||
@ -464,6 +482,7 @@ def GetAllPlatformConfigs(build_args):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def GetConfigFileContents(args):
|
def GetConfigFileContents(args):
|
||||||
"""
|
"""
|
||||||
Generate config file contents for the arguments.
|
Generate config file contents for the arguments.
|
||||||
@ -473,6 +492,7 @@ def GetConfigFileContents(args):
|
|||||||
pairs.append("%s=%s" % (k, GetValueString(args[k])))
|
pairs.append("%s=%s" % (k, GetValueString(args[k])))
|
||||||
return "\n".join(pairs)
|
return "\n".join(pairs)
|
||||||
|
|
||||||
|
|
||||||
# Program entry point.
|
# Program entry point.
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
import sys
|
||||||
|
@ -14,15 +14,37 @@ import sys
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
module_order = [
|
module_order = [
|
||||||
"_sse", "-sse", "_ssse", "-ssse",
|
"_sse",
|
||||||
"_sse2", "-sse2", "_ssse2", "-ssse2",
|
"-sse",
|
||||||
"_sse3", "-sse3", "_ssse3", "-ssse3",
|
"_ssse",
|
||||||
"_sse4", "-sse4", "_ssse4", "-ssse4",
|
"-ssse",
|
||||||
"_avx", "-avx", "_savx", "-savx",
|
"_sse2",
|
||||||
"_avx1", "-avx1", "_savx1", "-savx1",
|
"-sse2",
|
||||||
"_avx2", "-avx2", "_savx2", "-savx2",
|
"_ssse2",
|
||||||
|
"-ssse2",
|
||||||
|
"_sse3",
|
||||||
|
"-sse3",
|
||||||
|
"_ssse3",
|
||||||
|
"-ssse3",
|
||||||
|
"_sse4",
|
||||||
|
"-sse4",
|
||||||
|
"_ssse4",
|
||||||
|
"-ssse4",
|
||||||
|
"_avx",
|
||||||
|
"-avx",
|
||||||
|
"_savx",
|
||||||
|
"-savx",
|
||||||
|
"_avx1",
|
||||||
|
"-avx1",
|
||||||
|
"_savx1",
|
||||||
|
"-savx1",
|
||||||
|
"_avx2",
|
||||||
|
"-avx2",
|
||||||
|
"_savx2",
|
||||||
|
"-savx2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def get_obj_class(item):
|
def get_obj_class(item):
|
||||||
item = item.lower()
|
item = item.lower()
|
||||||
for i in range(len(module_order) - 1, -1, -1):
|
for i in range(len(module_order) - 1, -1, -1):
|
||||||
@ -31,12 +53,16 @@ def get_obj_class(item):
|
|||||||
return 1 + i
|
return 1 + i
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def obj_compare(x, y):
|
def obj_compare(x, y):
|
||||||
xc = get_obj_class(x)
|
xc = get_obj_class(x)
|
||||||
yc = get_obj_class(y)
|
yc = get_obj_class(y)
|
||||||
if xc < yc: return -1
|
if xc < yc:
|
||||||
elif xc > yc: return 1
|
return -1
|
||||||
else: return 0
|
elif xc > yc:
|
||||||
|
return 1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def process_line(line):
|
def process_line(line):
|
||||||
@ -70,5 +96,6 @@ def process_file(path):
|
|||||||
f.write("\n".join(result))
|
f.write("\n".join(result))
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
|
|
||||||
|
|
||||||
def apply(confpath):
|
def apply(confpath):
|
||||||
process_file(os.path.join(confpath, "obj", "cef", "libcef.ninja"))
|
process_file(os.path.join(confpath, "obj", "cef", "libcef.ninja"))
|
||||||
|
@ -5,13 +5,14 @@
|
|||||||
from cef_parser import *
|
from cef_parser import *
|
||||||
from date_util import *
|
from date_util import *
|
||||||
|
|
||||||
|
|
||||||
def make_capi_global_funcs(funcs, defined_names, translate_map, indent):
|
def make_capi_global_funcs(funcs, defined_names, translate_map, indent):
|
||||||
result = ''
|
result = ''
|
||||||
first = True
|
first = True
|
||||||
for func in funcs:
|
for func in funcs:
|
||||||
comment = func.get_comment()
|
comment = func.get_comment()
|
||||||
if first or len(comment) > 0:
|
if first or len(comment) > 0:
|
||||||
result += '\n'+format_comment(comment, indent, translate_map);
|
result += '\n' + format_comment(comment, indent, translate_map)
|
||||||
if func.get_retval().get_type().is_result_string():
|
if func.get_retval().get_type().is_result_string():
|
||||||
result += indent + '// The resulting string must be freed by calling cef_string_userfree_free().\n'
|
result += indent + '// The resulting string must be freed by calling cef_string_userfree_free().\n'
|
||||||
result += indent + 'CEF_EXPORT ' + func.get_capi_proto(defined_names) + ';\n'
|
result += indent + 'CEF_EXPORT ' + func.get_capi_proto(defined_names) + ';\n'
|
||||||
@ -19,6 +20,7 @@ def make_capi_global_funcs(funcs, defined_names, translate_map, indent):
|
|||||||
first = False
|
first = False
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def make_capi_member_funcs(funcs, defined_names, translate_map, indent):
|
def make_capi_member_funcs(funcs, defined_names, translate_map, indent):
|
||||||
result = ''
|
result = ''
|
||||||
first = True
|
first = True
|
||||||
@ -35,6 +37,7 @@ def make_capi_member_funcs(funcs, defined_names, translate_map, indent):
|
|||||||
first = False
|
first = False
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def make_capi_header(header, filename):
|
def make_capi_header(header, filename):
|
||||||
# structure names that have already been defined
|
# structure names that have already been defined
|
||||||
defined_names = header.get_defined_structs()
|
defined_names = header.get_defined_structs()
|
||||||
@ -110,7 +113,8 @@ def make_capi_header(header, filename):
|
|||||||
if include.startswith('base/'):
|
if include.startswith('base/'):
|
||||||
# base/ headers are C++. They should not be included by
|
# base/ headers are C++. They should not be included by
|
||||||
# translated CEF API headers.
|
# translated CEF API headers.
|
||||||
raise Exception('Disallowed include of %s.h from %s' % (include, filename))
|
raise Exception('Disallowed include of %s.h from %s' % (include,
|
||||||
|
filename))
|
||||||
elif include.startswith('internal/'):
|
elif include.startswith('internal/'):
|
||||||
# internal/ headers may be C or C++. Include them as-is.
|
# internal/ headers may be C or C++. Include them as-is.
|
||||||
internal_includes.add(include)
|
internal_includes.add(include)
|
||||||
@ -155,15 +159,14 @@ extern "C" {
|
|||||||
for cls in classes:
|
for cls in classes:
|
||||||
# virtual functions are inside the structure
|
# virtual functions are inside the structure
|
||||||
classname = cls.get_capi_name()
|
classname = cls.get_capi_name()
|
||||||
result += '\n'+format_comment(cls.get_comment(), '', translate_map);
|
result += '\n' + format_comment(cls.get_comment(), '', translate_map)
|
||||||
result += 'typedef struct _'+classname+' {\n'+\
|
result += 'typedef struct _'+classname+' {\n'+\
|
||||||
' ///\n'+\
|
' ///\n'+\
|
||||||
' // Base structure.\n'+\
|
' // Base structure.\n'+\
|
||||||
' ///\n'+\
|
' ///\n'+\
|
||||||
' '+cls.get_parent_capi_name()+' base;\n'
|
' '+cls.get_parent_capi_name()+' base;\n'
|
||||||
funcs = cls.get_virtual_funcs()
|
funcs = cls.get_virtual_funcs()
|
||||||
result += make_capi_member_funcs(funcs, defined_names,
|
result += make_capi_member_funcs(funcs, defined_names, translate_map, ' ')
|
||||||
translate_map, ' ')
|
|
||||||
result += '} ' + classname + ';\n\n'
|
result += '} ' + classname + ';\n\n'
|
||||||
|
|
||||||
defined_names.append(cls.get_capi_name())
|
defined_names.append(cls.get_capi_name())
|
||||||
@ -171,8 +174,8 @@ extern "C" {
|
|||||||
# static functions become global
|
# static functions become global
|
||||||
funcs = cls.get_static_funcs()
|
funcs = cls.get_static_funcs()
|
||||||
if len(funcs) > 0:
|
if len(funcs) > 0:
|
||||||
result += make_capi_global_funcs(funcs, defined_names,
|
result += make_capi_global_funcs(funcs, defined_names, translate_map,
|
||||||
translate_map, '')+'\n'
|
'') + '\n'
|
||||||
|
|
||||||
# output global functions
|
# output global functions
|
||||||
funcs = header.get_funcs(filename)
|
funcs = header.get_funcs(filename)
|
||||||
@ -192,7 +195,8 @@ extern "C" {
|
|||||||
# add the copyright year
|
# add the copyright year
|
||||||
result = result.replace('$YEAR$', get_year())
|
result = result.replace('$YEAR$', get_year())
|
||||||
# add the guard string
|
# add the guard string
|
||||||
guard = 'CEF_INCLUDE_CAPI_'+string.upper(filename.replace('/', '_').replace('.', '_capi_'))+'_'
|
guard = 'CEF_INCLUDE_CAPI_' + string.upper(
|
||||||
|
filename.replace('/', '_').replace('.', '_capi_')) + '_'
|
||||||
result = result.replace('$GUARD$', guard)
|
result = result.replace('$GUARD$', guard)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -12,6 +12,7 @@ script_dir = os.path.dirname(__file__)
|
|||||||
# CEF root directory.
|
# CEF root directory.
|
||||||
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
|
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
|
||||||
|
|
||||||
|
|
||||||
def get_files_for_variable(cmake_path, variables, variable):
|
def get_files_for_variable(cmake_path, variables, variable):
|
||||||
""" Returns the path values associated with |variable| and relative to the
|
""" Returns the path values associated with |variable| and relative to the
|
||||||
|cmake_path| directory. """
|
|cmake_path| directory. """
|
||||||
@ -38,12 +39,14 @@ def get_files_for_variable(cmake_path, variables, variable):
|
|||||||
new_paths.append(newpath)
|
new_paths.append(newpath)
|
||||||
return new_paths
|
return new_paths
|
||||||
|
|
||||||
|
|
||||||
def format_cmake_set(name, values):
|
def format_cmake_set(name, values):
|
||||||
result = 'set(%s\n' % name
|
result = 'set(%s\n' % name
|
||||||
for value in values:
|
for value in values:
|
||||||
result += ' %s\n' % value
|
result += ' %s\n' % value
|
||||||
return result + ' )\n'
|
return result + ' )\n'
|
||||||
|
|
||||||
|
|
||||||
def format_cmake_group(cmake_path, name, files, platform_sep, append_macro):
|
def format_cmake_group(cmake_path, name, files, platform_sep, append_macro):
|
||||||
platforms = {}
|
platforms = {}
|
||||||
common = []
|
common = []
|
||||||
@ -88,12 +91,14 @@ def format_cmake_group(cmake_path, name, files, platform_sep, append_macro):
|
|||||||
result += 'source_group(%s FILES ${%s})\n\n' % (folder, name)
|
result += 'source_group(%s FILES ${%s})\n\n' % (folder, name)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def format_cmake_library(name, group_names):
|
def format_cmake_library(name, group_names):
|
||||||
result = 'add_library(%s\n' % name
|
result = 'add_library(%s\n' % name
|
||||||
for group in group_names:
|
for group in group_names:
|
||||||
result += ' ${%s}\n' % group
|
result += ' ${%s}\n' % group
|
||||||
return result + ' )\n\n'
|
return result + ' )\n\n'
|
||||||
|
|
||||||
|
|
||||||
def process_cmake_template_segment(segment, segment_ct, cmake_path, variables):
|
def process_cmake_template_segment(segment, segment_ct, cmake_path, variables):
|
||||||
prefix = None
|
prefix = None
|
||||||
library = None
|
library = None
|
||||||
@ -176,6 +181,7 @@ def process_cmake_template_segment(segment, segment_ct, cmake_path, variables):
|
|||||||
|
|
||||||
return result.strip()
|
return result.strip()
|
||||||
|
|
||||||
|
|
||||||
def process_cmake_template(input, output, variables, quiet=False):
|
def process_cmake_template(input, output, variables, quiet=False):
|
||||||
""" Reads the |input| template, parses variable substitution sections and
|
""" Reads the |input| template, parses variable substitution sections and
|
||||||
writes |output|. """
|
writes |output|. """
|
||||||
@ -219,6 +225,7 @@ def process_cmake_template(input, output, variables, quiet = False):
|
|||||||
if changed:
|
if changed:
|
||||||
write_file(output, result)
|
write_file(output, result)
|
||||||
|
|
||||||
|
|
||||||
def read_gypi_variables(source):
|
def read_gypi_variables(source):
|
||||||
""" Read the |source| gypi file and extract the variables section. """
|
""" Read the |source| gypi file and extract the variables section. """
|
||||||
path = os.path.join(cef_dir, source + '.gypi')
|
path = os.path.join(cef_dir, source + '.gypi')
|
||||||
@ -229,6 +236,7 @@ def read_gypi_variables(source):
|
|||||||
raise Exception('File %s does not have a variables section' % path)
|
raise Exception('File %s does not have a variables section' % path)
|
||||||
return contents['variables']
|
return contents['variables']
|
||||||
|
|
||||||
|
|
||||||
# File entry point.
|
# File entry point.
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# Verify that the correct number of command-line arguments are provided.
|
# Verify that the correct number of command-line arguments are provided.
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
from cef_parser import *
|
from cef_parser import *
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_header(header, clsname):
|
def make_cpptoc_header(header, clsname):
|
||||||
cls = header.get_class(clsname)
|
cls = header.get_class(clsname)
|
||||||
if cls is None:
|
if cls is None:
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
from cef_parser import *
|
from cef_parser import *
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_impl_proto(name, func, parts):
|
def make_cpptoc_impl_proto(name, func, parts):
|
||||||
if isinstance(func, obj_function_virtual):
|
if isinstance(func, obj_function_virtual):
|
||||||
proto = parts['retval'] + ' CEF_CALLBACK'
|
proto = parts['retval'] + ' CEF_CALLBACK'
|
||||||
@ -13,6 +14,7 @@ def make_cpptoc_impl_proto(name, func, parts):
|
|||||||
proto += ' ' + name + '(' + string.join(parts['args'], ', ') + ')'
|
proto += ' ' + name + '(' + string.join(parts['args'], ', ') + ')'
|
||||||
return proto
|
return proto
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_function_impl_existing(cls, name, func, impl, defined_names):
|
def make_cpptoc_function_impl_existing(cls, name, func, impl, defined_names):
|
||||||
notify(name + ' has manual edits')
|
notify(name + ' has manual edits')
|
||||||
|
|
||||||
@ -23,7 +25,9 @@ def make_cpptoc_function_impl_existing(cls, name, func, impl, defined_names):
|
|||||||
if len(changes) > 0:
|
if len(changes) > 0:
|
||||||
notify(name + ' prototype changed')
|
notify(name + ' prototype changed')
|
||||||
|
|
||||||
return make_cpptoc_impl_proto(name, func, parts)+'{'+changes+impl['body']+'\n}\n\n'
|
return make_cpptoc_impl_proto(
|
||||||
|
name, func, parts) + '{' + changes + impl['body'] + '\n}\n\n'
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_function_impl_new(cls, name, func, defined_names):
|
def make_cpptoc_function_impl_new(cls, name, func, defined_names):
|
||||||
# retrieve the C API prototype parts
|
# retrieve the C API prototype parts
|
||||||
@ -49,7 +53,7 @@ def make_cpptoc_function_impl_new(cls, name, func, defined_names):
|
|||||||
else:
|
else:
|
||||||
retval_default = retval.get_retval_default(True)
|
retval_default = retval.get_retval_default(True)
|
||||||
if len(retval_default) > 0:
|
if len(retval_default) > 0:
|
||||||
retval_default = ' '+retval_default;
|
retval_default = ' ' + retval_default
|
||||||
|
|
||||||
if len(invalid) > 0:
|
if len(invalid) > 0:
|
||||||
notify(name + ' could not be autogenerated')
|
notify(name + ' could not be autogenerated')
|
||||||
@ -285,7 +289,8 @@ def make_cpptoc_function_impl_new(cls, name, func, defined_names):
|
|||||||
'\n }'
|
'\n }'
|
||||||
params.append(arg_name + 'List')
|
params.append(arg_name + 'List')
|
||||||
else:
|
else:
|
||||||
raise Exception('Unsupported argument type %s for parameter %s in %s' % (arg_type, arg_name, name))
|
raise Exception('Unsupported argument type %s for parameter %s in %s' %
|
||||||
|
(arg_type, arg_name, name))
|
||||||
|
|
||||||
if len(result) != result_len:
|
if len(result) != result_len:
|
||||||
result += '\n'
|
result += '\n'
|
||||||
@ -310,7 +315,8 @@ def make_cpptoc_function_impl_new(cls, name, func, defined_names):
|
|||||||
result += func.parent.get_name() + 'CppToC::Get(self)->'
|
result += func.parent.get_name() + 'CppToC::Get(self)->'
|
||||||
else:
|
else:
|
||||||
# virtual method for a parent class
|
# virtual method for a parent class
|
||||||
result += cls.get_name()+'CppToC::Get(reinterpret_cast<'+cls.get_capi_name()+'*>(self))->'
|
result += cls.get_name(
|
||||||
|
) + 'CppToC::Get(reinterpret_cast<' + cls.get_capi_name() + '*>(self))->'
|
||||||
else:
|
else:
|
||||||
result += func.parent.get_name() + '::'
|
result += func.parent.get_name() + '::'
|
||||||
result += func.get_name() + '('
|
result += func.get_name() + '('
|
||||||
@ -407,7 +413,7 @@ def make_cpptoc_function_impl_new(cls, name, func, defined_names):
|
|||||||
names = []
|
names = []
|
||||||
for tmpcls in classes:
|
for tmpcls in classes:
|
||||||
if tmpcls.has_attrib('no_debugct_check'):
|
if tmpcls.has_attrib('no_debugct_check'):
|
||||||
continue;
|
continue
|
||||||
|
|
||||||
if tmpcls.is_library_side():
|
if tmpcls.is_library_side():
|
||||||
names.append(tmpcls.get_name() + 'CppToC')
|
names.append(tmpcls.get_name() + 'CppToC')
|
||||||
@ -419,7 +425,7 @@ def make_cpptoc_function_impl_new(cls, name, func, defined_names):
|
|||||||
result += '\n#if DCHECK_IS_ON()'\
|
result += '\n#if DCHECK_IS_ON()'\
|
||||||
'\n // Check that all wrapper objects have been destroyed'
|
'\n // Check that all wrapper objects have been destroyed'
|
||||||
for name in names:
|
for name in names:
|
||||||
result += '\n DCHECK(base::AtomicRefCountIsZero(&'+name+'::DebugObjCt));';
|
result += '\n DCHECK(base::AtomicRefCountIsZero(&' + name + '::DebugObjCt));'
|
||||||
result += '\n#endif // DCHECK_IS_ON()'
|
result += '\n#endif // DCHECK_IS_ON()'
|
||||||
|
|
||||||
if len(result) != result_len:
|
if len(result) != result_len:
|
||||||
@ -455,6 +461,7 @@ def make_cpptoc_function_impl_new(cls, name, func, defined_names):
|
|||||||
result += '}\n\n'
|
result += '}\n\n'
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_function_impl(cls, funcs, existing, prefixname, defined_names):
|
def make_cpptoc_function_impl(cls, funcs, existing, prefixname, defined_names):
|
||||||
impl = ''
|
impl = ''
|
||||||
|
|
||||||
@ -467,13 +474,16 @@ def make_cpptoc_function_impl(cls, funcs, existing, prefixname, defined_names):
|
|||||||
if not value is None \
|
if not value is None \
|
||||||
and value['body'].find('// AUTO-GENERATED CONTENT') < 0:
|
and value['body'].find('// AUTO-GENERATED CONTENT') < 0:
|
||||||
# an implementation exists that was not auto-generated
|
# an implementation exists that was not auto-generated
|
||||||
impl += make_cpptoc_function_impl_existing(cls, name, func, value, defined_names)
|
impl += make_cpptoc_function_impl_existing(cls, name, func, value,
|
||||||
|
defined_names)
|
||||||
else:
|
else:
|
||||||
impl += make_cpptoc_function_impl_new(cls, name, func, defined_names)
|
impl += make_cpptoc_function_impl_new(cls, name, func, defined_names)
|
||||||
|
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
def make_cpptoc_virtual_function_impl(header, cls, existing, prefixname, defined_names):
|
|
||||||
|
def make_cpptoc_virtual_function_impl(header, cls, existing, prefixname,
|
||||||
|
defined_names):
|
||||||
funcs = []
|
funcs = []
|
||||||
funcs.extend(cls.get_virtual_funcs())
|
funcs.extend(cls.get_virtual_funcs())
|
||||||
cur_cls = cls
|
cur_cls = cls
|
||||||
@ -488,7 +498,9 @@ def make_cpptoc_virtual_function_impl(header, cls, existing, prefixname, defined
|
|||||||
funcs.extend(parent_cls.get_virtual_funcs())
|
funcs.extend(parent_cls.get_virtual_funcs())
|
||||||
cur_cls = header.get_class(parent_name, defined_names)
|
cur_cls = header.get_class(parent_name, defined_names)
|
||||||
|
|
||||||
return make_cpptoc_function_impl(cls, funcs, existing, prefixname, defined_names)
|
return make_cpptoc_function_impl(cls, funcs, existing, prefixname,
|
||||||
|
defined_names)
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_virtual_function_assignment_block(funcs, offset, prefixname):
|
def make_cpptoc_virtual_function_assignment_block(funcs, offset, prefixname):
|
||||||
impl = ''
|
impl = ''
|
||||||
@ -497,8 +509,11 @@ def make_cpptoc_virtual_function_assignment_block(funcs, offset, prefixname):
|
|||||||
impl += ' GetStruct()->' + offset + name + ' = ' + prefixname + '_' + name + ';\n'
|
impl += ' GetStruct()->' + offset + name + ' = ' + prefixname + '_' + name + ';\n'
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
def make_cpptoc_virtual_function_assignment(header, cls, prefixname, defined_names):
|
|
||||||
impl = make_cpptoc_virtual_function_assignment_block(cls.get_virtual_funcs(), '', prefixname)
|
def make_cpptoc_virtual_function_assignment(header, cls, prefixname,
|
||||||
|
defined_names):
|
||||||
|
impl = make_cpptoc_virtual_function_assignment_block(cls.get_virtual_funcs(),
|
||||||
|
'', prefixname)
|
||||||
|
|
||||||
cur_cls = cls
|
cur_cls = cls
|
||||||
offset = ''
|
offset = ''
|
||||||
@ -511,11 +526,13 @@ def make_cpptoc_virtual_function_assignment(header, cls, prefixname, defined_nam
|
|||||||
parent_cls = header.get_class(parent_name, defined_names)
|
parent_cls = header.get_class(parent_name, defined_names)
|
||||||
if parent_cls is None:
|
if parent_cls is None:
|
||||||
raise Exception('Class does not exist: ' + parent_name)
|
raise Exception('Class does not exist: ' + parent_name)
|
||||||
impl += make_cpptoc_virtual_function_assignment_block(parent_cls.get_virtual_funcs(), offset, prefixname)
|
impl += make_cpptoc_virtual_function_assignment_block(
|
||||||
|
parent_cls.get_virtual_funcs(), offset, prefixname)
|
||||||
cur_cls = header.get_class(parent_name, defined_names)
|
cur_cls = header.get_class(parent_name, defined_names)
|
||||||
|
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_unwrap_derived(header, cls, base_scoped):
|
def make_cpptoc_unwrap_derived(header, cls, base_scoped):
|
||||||
# identify all classes that derive from cls
|
# identify all classes that derive from cls
|
||||||
derived_classes = []
|
derived_classes = []
|
||||||
@ -549,6 +566,7 @@ def make_cpptoc_unwrap_derived(header, cls, base_scoped):
|
|||||||
' }\n'
|
' }\n'
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_class_impl(header, clsname, impl):
|
def make_cpptoc_class_impl(header, clsname, impl):
|
||||||
# structure names that have already been defined
|
# structure names that have already been defined
|
||||||
defined_names = header.get_defined_structs()
|
defined_names = header.get_defined_structs()
|
||||||
@ -572,7 +590,8 @@ def make_cpptoc_class_impl(header, clsname, impl):
|
|||||||
template_class = 'CefCppToCRefCounted'
|
template_class = 'CefCppToCRefCounted'
|
||||||
|
|
||||||
# generate virtual functions
|
# generate virtual functions
|
||||||
virtualimpl = make_cpptoc_virtual_function_impl(header, cls, existing, prefixname, defined_names)
|
virtualimpl = make_cpptoc_virtual_function_impl(header, cls, existing,
|
||||||
|
prefixname, defined_names)
|
||||||
if len(virtualimpl) > 0:
|
if len(virtualimpl) > 0:
|
||||||
virtualimpl = '\nnamespace {\n\n// MEMBER FUNCTIONS - Body may be edited by hand.\n\n' + virtualimpl + '} // namespace'
|
virtualimpl = '\nnamespace {\n\n// MEMBER FUNCTIONS - Body may be edited by hand.\n\n' + virtualimpl + '} // namespace'
|
||||||
|
|
||||||
@ -583,7 +602,9 @@ def make_cpptoc_class_impl(header, clsname, impl):
|
|||||||
existing = get_function_impls(impl, 'CEF_EXPORT')
|
existing = get_function_impls(impl, 'CEF_EXPORT')
|
||||||
|
|
||||||
# generate static functions
|
# generate static functions
|
||||||
staticimpl = make_cpptoc_function_impl(cls, cls.get_static_funcs(), existing, None, defined_names)
|
staticimpl = make_cpptoc_function_impl(cls,
|
||||||
|
cls.get_static_funcs(), existing, None,
|
||||||
|
defined_names)
|
||||||
if len(staticimpl) > 0:
|
if len(staticimpl) > 0:
|
||||||
staticimpl = '\n// GLOBAL FUNCTIONS - Body may be edited by hand.\n\n' + staticimpl
|
staticimpl = '\n// GLOBAL FUNCTIONS - Body may be edited by hand.\n\n' + staticimpl
|
||||||
|
|
||||||
@ -594,7 +615,9 @@ def make_cpptoc_class_impl(header, clsname, impl):
|
|||||||
|
|
||||||
# determine what includes are required by identifying what translation
|
# determine what includes are required by identifying what translation
|
||||||
# classes are being used
|
# classes are being used
|
||||||
includes = format_translation_includes(header, resultingimpl + (unwrapderived[0] if base_scoped else unwrapderived))
|
includes = format_translation_includes(header, resultingimpl +
|
||||||
|
(unwrapderived[0]
|
||||||
|
if base_scoped else unwrapderived))
|
||||||
|
|
||||||
# build the final output
|
# build the final output
|
||||||
result = get_copyright()
|
result = get_copyright()
|
||||||
@ -605,7 +628,8 @@ def make_cpptoc_class_impl(header, clsname, impl):
|
|||||||
|
|
||||||
const = '// CONSTRUCTOR - Do not edit by hand.\n\n'+ \
|
const = '// CONSTRUCTOR - Do not edit by hand.\n\n'+ \
|
||||||
clsname+'CppToC::'+clsname+'CppToC() {\n'
|
clsname+'CppToC::'+clsname+'CppToC() {\n'
|
||||||
const += make_cpptoc_virtual_function_assignment(header, cls, prefixname, defined_names)
|
const += make_cpptoc_virtual_function_assignment(header, cls, prefixname,
|
||||||
|
defined_names)
|
||||||
const += '}\n\n'
|
const += '}\n\n'
|
||||||
|
|
||||||
if base_scoped:
|
if base_scoped:
|
||||||
@ -635,6 +659,7 @@ def make_cpptoc_class_impl(header, clsname, impl):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def make_cpptoc_global_impl(header, impl):
|
def make_cpptoc_global_impl(header, impl):
|
||||||
# structure names that have already been defined
|
# structure names that have already been defined
|
||||||
defined_names = header.get_defined_structs()
|
defined_names = header.get_defined_structs()
|
||||||
@ -643,7 +668,9 @@ def make_cpptoc_global_impl(header, impl):
|
|||||||
existing = get_function_impls(impl, 'CEF_EXPORT')
|
existing = get_function_impls(impl, 'CEF_EXPORT')
|
||||||
|
|
||||||
# generate global functions
|
# generate global functions
|
||||||
impl = make_cpptoc_function_impl(None, header.get_funcs(), existing, None, defined_names)
|
impl = make_cpptoc_function_impl(None,
|
||||||
|
header.get_funcs(), existing, None,
|
||||||
|
defined_names)
|
||||||
if len(impl) > 0:
|
if len(impl) > 0:
|
||||||
impl = '\n// GLOBAL FUNCTIONS - Body may be edited by hand.\n\n' + impl
|
impl = '\n// GLOBAL FUNCTIONS - Body may be edited by hand.\n\n' + impl
|
||||||
|
|
||||||
@ -669,6 +696,7 @@ def make_cpptoc_global_impl(header, impl):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def write_cpptoc_impl(header, clsname, dir):
|
def write_cpptoc_impl(header, clsname, dir):
|
||||||
if clsname is None:
|
if clsname is None:
|
||||||
# global file
|
# global file
|
||||||
@ -698,7 +726,8 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
# verify that the correct number of command-line arguments are provided
|
# verify that the correct number of command-line arguments are provided
|
||||||
if len(sys.argv) < 4:
|
if len(sys.argv) < 4:
|
||||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname> <existing_impl>')
|
sys.stderr.write('Usage: ' + sys.argv[0] +
|
||||||
|
' <infile> <classname> <existing_impl>')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
# create the header object
|
# create the header object
|
||||||
|
@ -4,8 +4,9 @@
|
|||||||
|
|
||||||
from cef_parser import *
|
from cef_parser import *
|
||||||
|
|
||||||
|
|
||||||
def make_function_body_block(cls):
|
def make_function_body_block(cls):
|
||||||
impl = ' // '+cls.get_name()+' methods.\n';
|
impl = ' // ' + cls.get_name() + ' methods.\n'
|
||||||
|
|
||||||
funcs = cls.get_virtual_funcs()
|
funcs = cls.get_virtual_funcs()
|
||||||
for func in funcs:
|
for func in funcs:
|
||||||
@ -17,6 +18,7 @@ def make_function_body_block(cls):
|
|||||||
|
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
|
|
||||||
def make_function_body(header, cls):
|
def make_function_body(header, cls):
|
||||||
impl = make_function_body_block(cls)
|
impl = make_function_body_block(cls)
|
||||||
|
|
||||||
@ -36,6 +38,7 @@ def make_function_body(header, cls):
|
|||||||
|
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_header(header, clsname):
|
def make_ctocpp_header(header, clsname):
|
||||||
cls = header.get_class(clsname)
|
cls = header.get_class(clsname)
|
||||||
if cls is None:
|
if cls is None:
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
from cef_parser import *
|
from cef_parser import *
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_impl_proto(clsname, name, func, parts):
|
def make_ctocpp_impl_proto(clsname, name, func, parts):
|
||||||
const = ''
|
const = ''
|
||||||
|
|
||||||
@ -21,6 +22,7 @@ def make_ctocpp_impl_proto(clsname, name, func, parts):
|
|||||||
proto += name + '(' + string.join(parts['args'], ', ') + ')' + const
|
proto += name + '(' + string.join(parts['args'], ', ') + ')' + const
|
||||||
return proto
|
return proto
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_function_impl_existing(clsname, name, func, impl):
|
def make_ctocpp_function_impl_existing(clsname, name, func, impl):
|
||||||
notify(name + ' has manual edits')
|
notify(name + ' has manual edits')
|
||||||
|
|
||||||
@ -34,6 +36,7 @@ def make_ctocpp_function_impl_existing(clsname, name, func, impl):
|
|||||||
return make_ctocpp_impl_proto(clsname, name, func, parts)+'{'+ \
|
return make_ctocpp_impl_proto(clsname, name, func, parts)+'{'+ \
|
||||||
changes+impl['body']+'\n}\n\n'
|
changes+impl['body']+'\n}\n\n'
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_function_impl_new(clsname, name, func):
|
def make_ctocpp_function_impl_new(clsname, name, func):
|
||||||
# build the C++ prototype
|
# build the C++ prototype
|
||||||
parts = func.get_cpp_parts(True)
|
parts = func.get_cpp_parts(True)
|
||||||
@ -42,7 +45,8 @@ def make_ctocpp_function_impl_new(clsname, name, func):
|
|||||||
if isinstance(func, obj_function_virtual):
|
if isinstance(func, obj_function_virtual):
|
||||||
# determine how the struct should be referenced
|
# determine how the struct should be referenced
|
||||||
if clsname == func.parent.get_name():
|
if clsname == func.parent.get_name():
|
||||||
result += '\n '+get_capi_name(clsname, True)+'* _struct = GetStruct();'
|
result += '\n ' + get_capi_name(clsname,
|
||||||
|
True) + '* _struct = GetStruct();'
|
||||||
else:
|
else:
|
||||||
result += '\n '+func.parent.get_capi_name()+'* _struct = reinterpret_cast<'+\
|
result += '\n '+func.parent.get_capi_name()+'* _struct = reinterpret_cast<'+\
|
||||||
func.parent.get_capi_name()+'*>(GetStruct());'
|
func.parent.get_capi_name()+'*>(GetStruct());'
|
||||||
@ -66,7 +70,7 @@ def make_ctocpp_function_impl_new(clsname, name, func):
|
|||||||
else:
|
else:
|
||||||
retval_default = retval.get_retval_default(False)
|
retval_default = retval.get_retval_default(False)
|
||||||
if len(retval_default) > 0:
|
if len(retval_default) > 0:
|
||||||
retval_default = ' '+retval_default;
|
retval_default = ' ' + retval_default
|
||||||
|
|
||||||
# add API hash check
|
# add API hash check
|
||||||
if func.has_attrib('api_hash_check'):
|
if func.has_attrib('api_hash_check'):
|
||||||
@ -79,7 +83,8 @@ def make_ctocpp_function_impl_new(clsname, name, func):
|
|||||||
|
|
||||||
if isinstance(func, obj_function_virtual):
|
if isinstance(func, obj_function_virtual):
|
||||||
# add the structure size check
|
# add the structure size check
|
||||||
result += '\n if (CEF_MEMBER_MISSING(_struct, '+func.get_capi_name()+'))'
|
result += '\n if (CEF_MEMBER_MISSING(_struct, ' + func.get_capi_name(
|
||||||
|
) + '))'
|
||||||
result += '\n return' + retval_default + ';\n'
|
result += '\n return' + retval_default + ';\n'
|
||||||
|
|
||||||
if len(invalid) > 0:
|
if len(invalid) > 0:
|
||||||
@ -302,7 +307,8 @@ def make_ctocpp_function_impl_new(clsname, name, func):
|
|||||||
params.append(arg_name + 'Count')
|
params.append(arg_name + 'Count')
|
||||||
params.append(arg_name + 'List')
|
params.append(arg_name + 'List')
|
||||||
else:
|
else:
|
||||||
raise Exception('Unsupported argument type %s for parameter %s in %s' % (arg_type, arg_name, name))
|
raise Exception('Unsupported argument type %s for parameter %s in %s' %
|
||||||
|
(arg_type, arg_name, name))
|
||||||
|
|
||||||
if len(result) != result_len:
|
if len(result) != result_len:
|
||||||
result += '\n'
|
result += '\n'
|
||||||
@ -447,7 +453,7 @@ def make_ctocpp_function_impl_new(clsname, name, func):
|
|||||||
names = []
|
names = []
|
||||||
for cls in classes:
|
for cls in classes:
|
||||||
if cls.has_attrib('no_debugct_check'):
|
if cls.has_attrib('no_debugct_check'):
|
||||||
continue;
|
continue
|
||||||
|
|
||||||
if cls.is_library_side():
|
if cls.is_library_side():
|
||||||
names.append(cls.get_name() + 'CToCpp')
|
names.append(cls.get_name() + 'CToCpp')
|
||||||
@ -459,7 +465,7 @@ def make_ctocpp_function_impl_new(clsname, name, func):
|
|||||||
result += '\n#if DCHECK_IS_ON()'\
|
result += '\n#if DCHECK_IS_ON()'\
|
||||||
'\n // Check that all wrapper objects have been destroyed'
|
'\n // Check that all wrapper objects have been destroyed'
|
||||||
for name in names:
|
for name in names:
|
||||||
result += '\n DCHECK(base::AtomicRefCountIsZero(&'+name+'::DebugObjCt));';
|
result += '\n DCHECK(base::AtomicRefCountIsZero(&' + name + '::DebugObjCt));'
|
||||||
result += '\n#endif // DCHECK_IS_ON()'
|
result += '\n#endif // DCHECK_IS_ON()'
|
||||||
|
|
||||||
if len(result) != result_len:
|
if len(result) != result_len:
|
||||||
@ -496,6 +502,7 @@ def make_ctocpp_function_impl_new(clsname, name, func):
|
|||||||
result += '}\n\n'
|
result += '}\n\n'
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_function_impl(clsname, funcs, existing):
|
def make_ctocpp_function_impl(clsname, funcs, existing):
|
||||||
impl = ''
|
impl = ''
|
||||||
|
|
||||||
@ -511,8 +518,10 @@ def make_ctocpp_function_impl(clsname, funcs, existing):
|
|||||||
|
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_virtual_function_impl(header, cls, existing):
|
def make_ctocpp_virtual_function_impl(header, cls, existing):
|
||||||
impl = make_ctocpp_function_impl(cls.get_name(), cls.get_virtual_funcs(), existing)
|
impl = make_ctocpp_function_impl(cls.get_name(),
|
||||||
|
cls.get_virtual_funcs(), existing)
|
||||||
|
|
||||||
cur_cls = cls
|
cur_cls = cls
|
||||||
while True:
|
while True:
|
||||||
@ -523,11 +532,14 @@ def make_ctocpp_virtual_function_impl(header, cls, existing):
|
|||||||
parent_cls = header.get_class(parent_name)
|
parent_cls = header.get_class(parent_name)
|
||||||
if parent_cls is None:
|
if parent_cls is None:
|
||||||
raise Exception('Class does not exist: ' + parent_name)
|
raise Exception('Class does not exist: ' + parent_name)
|
||||||
impl += make_ctocpp_function_impl(cls.get_name(), parent_cls.get_virtual_funcs(), existing)
|
impl += make_ctocpp_function_impl(cls.get_name(),
|
||||||
|
parent_cls.get_virtual_funcs(),
|
||||||
|
existing)
|
||||||
cur_cls = header.get_class(parent_name)
|
cur_cls = header.get_class(parent_name)
|
||||||
|
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_unwrap_derived(header, cls, base_scoped):
|
def make_ctocpp_unwrap_derived(header, cls, base_scoped):
|
||||||
# identify all classes that derive from cls
|
# identify all classes that derive from cls
|
||||||
derived_classes = []
|
derived_classes = []
|
||||||
@ -561,6 +573,7 @@ def make_ctocpp_unwrap_derived(header, cls, base_scoped):
|
|||||||
' }\n'
|
' }\n'
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_class_impl(header, clsname, impl):
|
def make_ctocpp_class_impl(header, clsname, impl):
|
||||||
cls = header.get_class(clsname)
|
cls = header.get_class(clsname)
|
||||||
if cls is None:
|
if cls is None:
|
||||||
@ -587,7 +600,8 @@ def make_ctocpp_class_impl(header, clsname, impl):
|
|||||||
existing = get_function_impls(impl, clsname + '::')
|
existing = get_function_impls(impl, clsname + '::')
|
||||||
|
|
||||||
# generate static functions
|
# generate static functions
|
||||||
staticimpl = make_ctocpp_function_impl(clsname, cls.get_static_funcs(), existing)
|
staticimpl = make_ctocpp_function_impl(clsname,
|
||||||
|
cls.get_static_funcs(), existing)
|
||||||
if len(staticimpl) > 0:
|
if len(staticimpl) > 0:
|
||||||
staticimpl = '\n// STATIC METHODS - Body may be edited by hand.\n\n' + staticimpl
|
staticimpl = '\n// STATIC METHODS - Body may be edited by hand.\n\n' + staticimpl
|
||||||
|
|
||||||
@ -598,7 +612,9 @@ def make_ctocpp_class_impl(header, clsname, impl):
|
|||||||
|
|
||||||
# determine what includes are required by identifying what translation
|
# determine what includes are required by identifying what translation
|
||||||
# classes are being used
|
# classes are being used
|
||||||
includes = format_translation_includes(header, resultingimpl + (unwrapderived[0] if base_scoped else unwrapderived))
|
includes = format_translation_includes(header, resultingimpl +
|
||||||
|
(unwrapderived[0]
|
||||||
|
if base_scoped else unwrapderived))
|
||||||
|
|
||||||
# build the final output
|
# build the final output
|
||||||
result = get_copyright()
|
result = get_copyright()
|
||||||
@ -638,6 +654,7 @@ def make_ctocpp_class_impl(header, clsname, impl):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def make_ctocpp_global_impl(header, impl):
|
def make_ctocpp_global_impl(header, impl):
|
||||||
# retrieve the existing global function implementations
|
# retrieve the existing global function implementations
|
||||||
existing = get_function_impls(impl, 'CEF_GLOBAL')
|
existing = get_function_impls(impl, 'CEF_GLOBAL')
|
||||||
@ -669,6 +686,7 @@ def make_ctocpp_global_impl(header, impl):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def write_ctocpp_impl(header, clsname, dir):
|
def write_ctocpp_impl(header, clsname, dir):
|
||||||
if clsname is None:
|
if clsname is None:
|
||||||
# global file
|
# global file
|
||||||
@ -698,7 +716,8 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
# verify that the correct number of command-line arguments are provided
|
# verify that the correct number of command-line arguments are provided
|
||||||
if len(sys.argv) < 4:
|
if len(sys.argv) < 4:
|
||||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname> <existing_impl>')
|
sys.stderr.write('Usage: ' + sys.argv[0] +
|
||||||
|
' <infile> <classname> <existing_impl>')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
# create the header object
|
# create the header object
|
||||||
|
@ -15,10 +15,12 @@ import sys
|
|||||||
import tarfile
|
import tarfile
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
|
|
||||||
def create_zip_archive(input_dir):
|
def create_zip_archive(input_dir):
|
||||||
""" Creates a zip archive of the specified input directory. """
|
""" Creates a zip archive of the specified input directory. """
|
||||||
zip_file = input_dir + '.zip'
|
zip_file = input_dir + '.zip'
|
||||||
zf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED, True)
|
zf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED, True)
|
||||||
|
|
||||||
def addDir(dir):
|
def addDir(dir):
|
||||||
for f in os.listdir(dir):
|
for f in os.listdir(dir):
|
||||||
full_path = os.path.join(dir, f)
|
full_path = os.path.join(dir, f)
|
||||||
@ -27,9 +29,11 @@ def create_zip_archive(input_dir):
|
|||||||
else:
|
else:
|
||||||
zf.write(full_path, os.path.relpath(full_path, \
|
zf.write(full_path, os.path.relpath(full_path, \
|
||||||
os.path.join(input_dir, os.pardir)))
|
os.path.join(input_dir, os.pardir)))
|
||||||
|
|
||||||
addDir(input_dir)
|
addDir(input_dir)
|
||||||
zf.close()
|
zf.close()
|
||||||
|
|
||||||
|
|
||||||
def create_tar_archive(input_dir, format):
|
def create_tar_archive(input_dir, format):
|
||||||
""" Creates a tar archive of the specified input directory. """
|
""" Creates a tar archive of the specified input directory. """
|
||||||
# Supported formats include "gz" and "bz2".
|
# Supported formats include "gz" and "bz2".
|
||||||
@ -38,6 +42,7 @@ def create_tar_archive(input_dir, format):
|
|||||||
tf.add(input_dir, arcname=os.path.basename(input_dir))
|
tf.add(input_dir, arcname=os.path.basename(input_dir))
|
||||||
tf.close()
|
tf.close()
|
||||||
|
|
||||||
|
|
||||||
def create_7z_archive(input_dir, format):
|
def create_7z_archive(input_dir, format):
|
||||||
""" Creates a 7z archive of the specified input directory. """
|
""" Creates a 7z archive of the specified input directory. """
|
||||||
# CEF_COMMAND_7ZIP might be "c:\Program Files (x86)\7Zip\7z.exe" or /usr/bin/7za
|
# CEF_COMMAND_7ZIP might be "c:\Program Files (x86)\7Zip\7z.exe" or /usr/bin/7za
|
||||||
@ -58,11 +63,13 @@ def create_7z_archive(input_dir, format):
|
|||||||
zip_input = input_dir
|
zip_input = input_dir
|
||||||
|
|
||||||
# Create the compressed archive.
|
# Create the compressed archive.
|
||||||
run('"%s" a -t%s -y %s %s' % (command, format, zip_file, zip_input), working_dir)
|
run('"%s" a -t%s -y %s %s' % (command, format, zip_file, zip_input),
|
||||||
|
working_dir)
|
||||||
|
|
||||||
if not tar_file is None:
|
if not tar_file is None:
|
||||||
remove_file(tar_file)
|
remove_file(tar_file)
|
||||||
|
|
||||||
|
|
||||||
def create_output_dir(name, parent_dir):
|
def create_output_dir(name, parent_dir):
|
||||||
""" Creates an output directory and adds the path to the archive list. """
|
""" Creates an output directory and adds the path to the archive list. """
|
||||||
output_dir = os.path.abspath(os.path.join(parent_dir, name))
|
output_dir = os.path.abspath(os.path.join(parent_dir, name))
|
||||||
@ -71,6 +78,7 @@ def create_output_dir(name, parent_dir):
|
|||||||
archive_dirs.append(output_dir)
|
archive_dirs.append(output_dir)
|
||||||
return output_dir
|
return output_dir
|
||||||
|
|
||||||
|
|
||||||
def get_readme_component(name):
|
def get_readme_component(name):
|
||||||
""" Loads a README file component. """
|
""" Loads a README file component. """
|
||||||
paths = []
|
paths = []
|
||||||
@ -94,6 +102,7 @@ def get_readme_component(name):
|
|||||||
|
|
||||||
raise Exception('Readme component not found: ' + name)
|
raise Exception('Readme component not found: ' + name)
|
||||||
|
|
||||||
|
|
||||||
def create_readme():
|
def create_readme():
|
||||||
""" Creates the README.TXT file. """
|
""" Creates the README.TXT file. """
|
||||||
# gather the components
|
# gather the components
|
||||||
@ -148,6 +157,7 @@ def create_readme():
|
|||||||
if not options.quiet:
|
if not options.quiet:
|
||||||
sys.stdout.write('Creating README.TXT file.\n')
|
sys.stdout.write('Creating README.TXT file.\n')
|
||||||
|
|
||||||
|
|
||||||
def create_fuzed_gtest(tests_dir):
|
def create_fuzed_gtest(tests_dir):
|
||||||
""" Generate a fuzed version of gtest and build the expected directory structure. """
|
""" Generate a fuzed version of gtest and build the expected directory structure. """
|
||||||
src_gtest_dir = os.path.join(src_dir, 'testing', 'gtest')
|
src_gtest_dir = os.path.join(src_dir, 'testing', 'gtest')
|
||||||
@ -177,12 +187,15 @@ def create_fuzed_gtest(tests_dir):
|
|||||||
move_file(gtest_cpp, target_gtest_cpp_dir, options.quiet)
|
move_file(gtest_cpp, target_gtest_cpp_dir, options.quiet)
|
||||||
|
|
||||||
# gtest LICENSE file at tests/gtest/LICENSE
|
# gtest LICENSE file at tests/gtest/LICENSE
|
||||||
copy_file(os.path.join(src_gtest_dir, 'LICENSE'), target_gtest_dir, options.quiet)
|
copy_file(
|
||||||
|
os.path.join(src_gtest_dir, 'LICENSE'), target_gtest_dir, options.quiet)
|
||||||
|
|
||||||
# CEF README file at tests/gtest/README.cef
|
# CEF README file at tests/gtest/README.cef
|
||||||
copy_file(os.path.join(cef_dir, 'tests', 'gtest', 'README.cef.in'),
|
copy_file(
|
||||||
|
os.path.join(cef_dir, 'tests', 'gtest', 'README.cef.in'),
|
||||||
os.path.join(target_gtest_dir, 'README.cef'), options.quiet)
|
os.path.join(target_gtest_dir, 'README.cef'), options.quiet)
|
||||||
|
|
||||||
|
|
||||||
def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet):
|
def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet):
|
||||||
""" Transfer files from one location to another. """
|
""" Transfer files from one location to another. """
|
||||||
for path in gypi_paths:
|
for path in gypi_paths:
|
||||||
@ -192,6 +205,7 @@ def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet):
|
|||||||
make_dir(dst_path, quiet)
|
make_dir(dst_path, quiet)
|
||||||
copy_file(src, dst, quiet)
|
copy_file(src, dst, quiet)
|
||||||
|
|
||||||
|
|
||||||
def normalize_headers(file, new_path=''):
|
def normalize_headers(file, new_path=''):
|
||||||
""" Normalize headers post-processing. Remove the path component from any
|
""" Normalize headers post-processing. Remove the path component from any
|
||||||
project include directives. """
|
project include directives. """
|
||||||
@ -200,6 +214,7 @@ def normalize_headers(file, new_path = ''):
|
|||||||
"// Include path modified for CEF Binary Distribution.\n#include \""+new_path+"\\1\"", data)
|
"// Include path modified for CEF Binary Distribution.\n#include \""+new_path+"\\1\"", data)
|
||||||
write_file(file, data)
|
write_file(file, data)
|
||||||
|
|
||||||
|
|
||||||
def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
|
def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
|
||||||
""" Transfer files based on the specified configuration. """
|
""" Transfer files based on the specified configuration. """
|
||||||
if not path_exists(transfer_cfg):
|
if not path_exists(transfer_cfg):
|
||||||
@ -219,7 +234,8 @@ def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
|
|||||||
# place a readme file in the destination directory
|
# place a readme file in the destination directory
|
||||||
readme = os.path.join(dst_path, 'README-TRANSFER.txt')
|
readme = os.path.join(dst_path, 'README-TRANSFER.txt')
|
||||||
if not path_exists(readme):
|
if not path_exists(readme):
|
||||||
copy_file(os.path.join(script_dir, 'distrib/README-TRANSFER.txt'), readme)
|
copy_file(
|
||||||
|
os.path.join(script_dir, 'distrib/README-TRANSFER.txt'), readme)
|
||||||
open(readme, 'ab').write(cfg['source'] + "\n")
|
open(readme, 'ab').write(cfg['source'] + "\n")
|
||||||
|
|
||||||
# perform any required post-processing
|
# perform any required post-processing
|
||||||
@ -231,7 +247,9 @@ def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
|
|||||||
new_path = cfg['new_header_path']
|
new_path = cfg['new_header_path']
|
||||||
normalize_headers(dst, new_path)
|
normalize_headers(dst, new_path)
|
||||||
|
|
||||||
def transfer_files(cef_dir, script_dir, transfer_cfg_dir, mode, output_dir, quiet):
|
|
||||||
|
def transfer_files(cef_dir, script_dir, transfer_cfg_dir, mode, output_dir,
|
||||||
|
quiet):
|
||||||
# Non-mode-specific transfers.
|
# Non-mode-specific transfers.
|
||||||
transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer.cfg')
|
transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer.cfg')
|
||||||
eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet)
|
eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet)
|
||||||
@ -239,9 +257,11 @@ def transfer_files(cef_dir, script_dir, transfer_cfg_dir, mode, output_dir, quie
|
|||||||
transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer_%s.cfg' % mode)
|
transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer_%s.cfg' % mode)
|
||||||
eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet)
|
eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet)
|
||||||
|
|
||||||
|
|
||||||
def combine_libs(build_dir, libs, dest_lib):
|
def combine_libs(build_dir, libs, dest_lib):
|
||||||
""" Combine multiple static libraries into a single static library. """
|
""" Combine multiple static libraries into a single static library. """
|
||||||
cmdline = 'msvs_env.bat win%s python combine_libs.py -o "%s"' % (platform_arch, dest_lib)
|
cmdline = 'msvs_env.bat win%s python combine_libs.py -o "%s"' % (
|
||||||
|
platform_arch, dest_lib)
|
||||||
for lib in libs:
|
for lib in libs:
|
||||||
lib_path = os.path.join(build_dir, lib)
|
lib_path = os.path.join(build_dir, lib)
|
||||||
for path in get_files(lib_path): # Expand wildcards in |lib_path|.
|
for path in get_files(lib_path): # Expand wildcards in |lib_path|.
|
||||||
@ -250,13 +270,15 @@ def combine_libs(build_dir, libs, dest_lib):
|
|||||||
cmdline = cmdline + ' "%s"' % path
|
cmdline = cmdline + ' "%s"' % path
|
||||||
run(cmdline, os.path.join(cef_dir, 'tools'))
|
run(cmdline, os.path.join(cef_dir, 'tools'))
|
||||||
|
|
||||||
|
|
||||||
def run(command_line, working_dir):
|
def run(command_line, working_dir):
|
||||||
""" Run a command. """
|
""" Run a command. """
|
||||||
sys.stdout.write('-------- Running "'+command_line+'" in "'+\
|
sys.stdout.write('-------- Running "'+command_line+'" in "'+\
|
||||||
working_dir+'"...'+"\n")
|
working_dir+'"...'+"\n")
|
||||||
args = shlex.split(command_line.replace('\\', '\\\\'))
|
args = shlex.split(command_line.replace('\\', '\\\\'))
|
||||||
return subprocess.check_call(args, cwd=working_dir, env=os.environ,
|
return subprocess.check_call(
|
||||||
shell=(sys.platform == 'win32'))
|
args, cwd=working_dir, env=os.environ, shell=(sys.platform == 'win32'))
|
||||||
|
|
||||||
|
|
||||||
# cannot be loaded as a module
|
# cannot be loaded as a module
|
||||||
if __name__ != "__main__":
|
if __name__ != "__main__":
|
||||||
@ -269,45 +291,81 @@ This utility builds the CEF Binary Distribution.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
parser = OptionParser(description=disc)
|
parser = OptionParser(description=disc)
|
||||||
parser.add_option('--output-dir', dest='outputdir', metavar='DIR',
|
parser.add_option(
|
||||||
|
'--output-dir',
|
||||||
|
dest='outputdir',
|
||||||
|
metavar='DIR',
|
||||||
help='output directory [required]')
|
help='output directory [required]')
|
||||||
parser.add_option('--distrib-subdir', dest='distribsubdir',
|
parser.add_option(
|
||||||
|
'--distrib-subdir',
|
||||||
|
dest='distribsubdir',
|
||||||
help='name of the subdirectory for the distribution',
|
help='name of the subdirectory for the distribution',
|
||||||
default='')
|
default='')
|
||||||
parser.add_option('--allow-partial',
|
parser.add_option(
|
||||||
action='store_true', dest='allowpartial', default=False,
|
'--allow-partial',
|
||||||
|
action='store_true',
|
||||||
|
dest='allowpartial',
|
||||||
|
default=False,
|
||||||
help='allow creation of partial distributions')
|
help='allow creation of partial distributions')
|
||||||
parser.add_option('--no-symbols',
|
parser.add_option(
|
||||||
action='store_true', dest='nosymbols', default=False,
|
'--no-symbols',
|
||||||
|
action='store_true',
|
||||||
|
dest='nosymbols',
|
||||||
|
default=False,
|
||||||
help='don\'t create symbol files')
|
help='don\'t create symbol files')
|
||||||
parser.add_option('--no-docs',
|
parser.add_option(
|
||||||
action='store_true', dest='nodocs', default=False,
|
'--no-docs',
|
||||||
|
action='store_true',
|
||||||
|
dest='nodocs',
|
||||||
|
default=False,
|
||||||
help='don\'t create documentation')
|
help='don\'t create documentation')
|
||||||
parser.add_option('--no-archive',
|
parser.add_option(
|
||||||
action='store_true', dest='noarchive', default=False,
|
'--no-archive',
|
||||||
|
action='store_true',
|
||||||
|
dest='noarchive',
|
||||||
|
default=False,
|
||||||
help='don\'t create archives for output directories')
|
help='don\'t create archives for output directories')
|
||||||
parser.add_option('--ninja-build',
|
parser.add_option(
|
||||||
action='store_true', dest='ninjabuild', default=False,
|
'--ninja-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='ninjabuild',
|
||||||
|
default=False,
|
||||||
help='build was created using ninja')
|
help='build was created using ninja')
|
||||||
parser.add_option('--x64-build',
|
parser.add_option(
|
||||||
action='store_true', dest='x64build', default=False,
|
'--x64-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='x64build',
|
||||||
|
default=False,
|
||||||
help='create a 64-bit binary distribution')
|
help='create a 64-bit binary distribution')
|
||||||
parser.add_option('--arm-build',
|
parser.add_option(
|
||||||
action='store_true', dest='armbuild', default=False,
|
'--arm-build',
|
||||||
|
action='store_true',
|
||||||
|
dest='armbuild',
|
||||||
|
default=False,
|
||||||
help='create an ARM binary distribution')
|
help='create an ARM binary distribution')
|
||||||
parser.add_option('--minimal',
|
parser.add_option(
|
||||||
action='store_true', dest='minimal', default=False,
|
'--minimal',
|
||||||
|
action='store_true',
|
||||||
|
dest='minimal',
|
||||||
|
default=False,
|
||||||
help='include only release build binary files')
|
help='include only release build binary files')
|
||||||
parser.add_option('--client',
|
parser.add_option(
|
||||||
action='store_true', dest='client', default=False,
|
'--client',
|
||||||
|
action='store_true',
|
||||||
|
dest='client',
|
||||||
|
default=False,
|
||||||
help='include only the sample application')
|
help='include only the sample application')
|
||||||
parser.add_option('-q', '--quiet',
|
parser.add_option(
|
||||||
action='store_true', dest='quiet', default=False,
|
'-q',
|
||||||
|
'--quiet',
|
||||||
|
action='store_true',
|
||||||
|
dest='quiet',
|
||||||
|
default=False,
|
||||||
help='do not output detailed status information')
|
help='do not output detailed status information')
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
# Test the operating system.
|
# Test the operating system.
|
||||||
platform = '';
|
platform = ''
|
||||||
if sys.platform == 'win32':
|
if sys.platform == 'win32':
|
||||||
platform = 'windows'
|
platform = 'windows'
|
||||||
elif sys.platform == 'darwin':
|
elif sys.platform == 'darwin':
|
||||||
@ -369,7 +427,8 @@ args = {}
|
|||||||
read_version_file(os.path.join(cef_dir, 'VERSION'), args)
|
read_version_file(os.path.join(cef_dir, 'VERSION'), args)
|
||||||
read_version_file(os.path.join(cef_dir, '../chrome/VERSION'), args)
|
read_version_file(os.path.join(cef_dir, '../chrome/VERSION'), args)
|
||||||
|
|
||||||
cef_ver = '%s.%s.%s.g%s' % (args['CEF_MAJOR'], args['BUILD'], cef_commit_number, cef_rev[:7])
|
cef_ver = '%s.%s.%s.g%s' % (args['CEF_MAJOR'], args['BUILD'], cef_commit_number,
|
||||||
|
cef_rev[:7])
|
||||||
chromium_ver = args['MAJOR'] + '.' + args['MINOR'] + '.' + args['BUILD'] + '.' + args['PATCH']
|
chromium_ver = args['MAJOR'] + '.' + args['MINOR'] + '.' + args['BUILD'] + '.' + args['PATCH']
|
||||||
|
|
||||||
# list of output directories to be archived
|
# list of output directories to be archived
|
||||||
@ -599,47 +658,64 @@ if platform == 'windows':
|
|||||||
if mode == 'standard':
|
if mode == 'standard':
|
||||||
# transfer Debug files
|
# transfer Debug files
|
||||||
build_dir = build_dir_debug
|
build_dir = build_dir_debug
|
||||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'libcef.dll')):
|
if not options.allowpartial or path_exists(
|
||||||
|
os.path.join(build_dir, 'libcef.dll')):
|
||||||
valid_build_dir = build_dir
|
valid_build_dir = build_dir
|
||||||
dst_dir = os.path.join(output_dir, 'Debug')
|
dst_dir = os.path.join(output_dir, 'Debug')
|
||||||
make_dir(dst_dir, options.quiet)
|
make_dir(dst_dir, options.quiet)
|
||||||
copy_files(os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
|
copy_files(
|
||||||
|
os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
|
||||||
for binary in binaries:
|
for binary in binaries:
|
||||||
copy_file(os.path.join(build_dir, binary), os.path.join(dst_dir, os.path.basename(binary)), options.quiet)
|
copy_file(
|
||||||
|
os.path.join(build_dir, binary),
|
||||||
|
os.path.join(dst_dir, os.path.basename(binary)), options.quiet)
|
||||||
copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \
|
copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \
|
||||||
options.quiet)
|
options.quiet)
|
||||||
combine_libs(build_dir, sandbox_libs, os.path.join(dst_dir, 'cef_sandbox.lib'));
|
combine_libs(build_dir, sandbox_libs,
|
||||||
|
os.path.join(dst_dir, 'cef_sandbox.lib'))
|
||||||
|
|
||||||
if not options.nosymbols:
|
if not options.nosymbols:
|
||||||
# create the symbol output directory
|
# create the symbol output directory
|
||||||
symbol_output_dir = create_output_dir(output_dir_name + '_debug_symbols', options.outputdir)
|
symbol_output_dir = create_output_dir(
|
||||||
|
output_dir_name + '_debug_symbols', options.outputdir)
|
||||||
# transfer contents
|
# transfer contents
|
||||||
copy_file(os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir, options.quiet)
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir,
|
||||||
|
options.quiet)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("No Debug build files.\n")
|
sys.stderr.write("No Debug build files.\n")
|
||||||
|
|
||||||
# transfer Release files
|
# transfer Release files
|
||||||
build_dir = build_dir_release
|
build_dir = build_dir_release
|
||||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'libcef.dll')):
|
if not options.allowpartial or path_exists(
|
||||||
|
os.path.join(build_dir, 'libcef.dll')):
|
||||||
valid_build_dir = build_dir
|
valid_build_dir = build_dir
|
||||||
dst_dir = os.path.join(output_dir, 'Release')
|
dst_dir = os.path.join(output_dir, 'Release')
|
||||||
make_dir(dst_dir, options.quiet)
|
make_dir(dst_dir, options.quiet)
|
||||||
copy_files(os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
|
copy_files(
|
||||||
|
os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
|
||||||
for binary in binaries:
|
for binary in binaries:
|
||||||
copy_file(os.path.join(build_dir, binary), os.path.join(dst_dir, os.path.basename(binary)), options.quiet)
|
copy_file(
|
||||||
|
os.path.join(build_dir, binary),
|
||||||
|
os.path.join(dst_dir, os.path.basename(binary)), options.quiet)
|
||||||
|
|
||||||
if mode != 'client':
|
if mode != 'client':
|
||||||
copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \
|
copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \
|
||||||
options.quiet)
|
options.quiet)
|
||||||
combine_libs(build_dir, sandbox_libs, os.path.join(dst_dir, 'cef_sandbox.lib'));
|
combine_libs(build_dir, sandbox_libs,
|
||||||
|
os.path.join(dst_dir, 'cef_sandbox.lib'))
|
||||||
else:
|
else:
|
||||||
copy_file(os.path.join(build_dir, 'cefclient.exe'), dst_dir, options.quiet)
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'cefclient.exe'), dst_dir, options.quiet)
|
||||||
|
|
||||||
if not options.nosymbols:
|
if not options.nosymbols:
|
||||||
# create the symbol output directory
|
# create the symbol output directory
|
||||||
symbol_output_dir = create_output_dir(output_dir_name + '_release_symbols', options.outputdir)
|
symbol_output_dir = create_output_dir(
|
||||||
|
output_dir_name + '_release_symbols', options.outputdir)
|
||||||
# transfer contents
|
# transfer contents
|
||||||
copy_file(os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir, options.quiet)
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir,
|
||||||
|
options.quiet)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("No Release build files.\n")
|
sys.stderr.write("No Release build files.\n")
|
||||||
|
|
||||||
@ -652,12 +728,19 @@ if platform == 'windows':
|
|||||||
dst_dir = os.path.join(output_dir, 'Resources')
|
dst_dir = os.path.join(output_dir, 'Resources')
|
||||||
make_dir(dst_dir, options.quiet)
|
make_dir(dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet)
|
copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet)
|
copy_file(
|
||||||
copy_file(os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet)
|
os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'cef_extensions.pak'), dst_dir, options.quiet)
|
copy_file(
|
||||||
copy_file(os.path.join(build_dir, 'devtools_resources.pak'), dst_dir, options.quiet)
|
os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet)
|
||||||
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'cef_extensions.pak'), dst_dir, options.quiet)
|
||||||
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'devtools_resources.pak'), dst_dir,
|
||||||
|
options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet)
|
copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet)
|
||||||
copy_dir(os.path.join(build_dir, 'locales'), os.path.join(dst_dir, 'locales'), options.quiet)
|
copy_dir(
|
||||||
|
os.path.join(build_dir, 'locales'),
|
||||||
|
os.path.join(dst_dir, 'locales'), options.quiet)
|
||||||
|
|
||||||
if mode == 'standard' or mode == 'minimal':
|
if mode == 'standard' or mode == 'minimal':
|
||||||
# transfer include files
|
# transfer include files
|
||||||
@ -694,7 +777,8 @@ if platform == 'windows':
|
|||||||
src_dir = os.path.join(cef_dir, 'docs')
|
src_dir = os.path.join(cef_dir, 'docs')
|
||||||
if path_exists(src_dir):
|
if path_exists(src_dir):
|
||||||
# create the docs output directory
|
# create the docs output directory
|
||||||
docs_output_dir = create_output_dir(output_dir_base + '_docs', options.outputdir)
|
docs_output_dir = create_output_dir(output_dir_base + '_docs',
|
||||||
|
options.outputdir)
|
||||||
# transfer contents
|
# transfer contents
|
||||||
copy_dir(src_dir, docs_output_dir, options.quiet)
|
copy_dir(src_dir, docs_output_dir, options.quiet)
|
||||||
|
|
||||||
@ -705,46 +789,60 @@ elif platform == 'macosx':
|
|||||||
if mode == 'standard':
|
if mode == 'standard':
|
||||||
# transfer Debug files
|
# transfer Debug files
|
||||||
build_dir = build_dir_debug
|
build_dir = build_dir_debug
|
||||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient.app')):
|
if not options.allowpartial or path_exists(
|
||||||
|
os.path.join(build_dir, 'cefclient.app')):
|
||||||
valid_build_dir = build_dir
|
valid_build_dir = build_dir
|
||||||
dst_dir = os.path.join(output_dir, 'Debug')
|
dst_dir = os.path.join(output_dir, 'Debug')
|
||||||
make_dir(dst_dir, options.quiet)
|
make_dir(dst_dir, options.quiet)
|
||||||
copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \
|
copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \
|
||||||
os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet)
|
os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet)
|
||||||
copy_file(os.path.join(script_dir, 'distrib/mac/widevinecdmadapter.plugin'), dst_dir, options.quiet)
|
copy_file(
|
||||||
|
os.path.join(script_dir, 'distrib/mac/widevinecdmadapter.plugin'),
|
||||||
|
dst_dir, options.quiet)
|
||||||
|
|
||||||
if not options.nosymbols:
|
if not options.nosymbols:
|
||||||
# create the symbol output directory
|
# create the symbol output directory
|
||||||
symbol_output_dir = create_output_dir(output_dir_name + '_debug_symbols', options.outputdir)
|
symbol_output_dir = create_output_dir(
|
||||||
|
output_dir_name + '_debug_symbols', options.outputdir)
|
||||||
|
|
||||||
# The real dSYM already exists, just copy it to the output directory.
|
# The real dSYM already exists, just copy it to the output directory.
|
||||||
# dSYMs are only generated when is_official_build=true or enable_dsyms=true.
|
# dSYMs are only generated when is_official_build=true or enable_dsyms=true.
|
||||||
# See //build/config/mac/symbols.gni.
|
# See //build/config/mac/symbols.gni.
|
||||||
copy_dir(os.path.join(build_dir, '%s.dSYM' % framework_name),
|
copy_dir(
|
||||||
os.path.join(symbol_output_dir, '%s.dSYM' % framework_name), options.quiet)
|
os.path.join(build_dir, '%s.dSYM' % framework_name),
|
||||||
|
os.path.join(symbol_output_dir, '%s.dSYM' % framework_name),
|
||||||
|
options.quiet)
|
||||||
|
|
||||||
# transfer Release files
|
# transfer Release files
|
||||||
build_dir = build_dir_release
|
build_dir = build_dir_release
|
||||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient.app')):
|
if not options.allowpartial or path_exists(
|
||||||
|
os.path.join(build_dir, 'cefclient.app')):
|
||||||
valid_build_dir = build_dir
|
valid_build_dir = build_dir
|
||||||
dst_dir = os.path.join(output_dir, 'Release')
|
dst_dir = os.path.join(output_dir, 'Release')
|
||||||
make_dir(dst_dir, options.quiet)
|
make_dir(dst_dir, options.quiet)
|
||||||
if mode != 'client':
|
if mode != 'client':
|
||||||
copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \
|
copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \
|
||||||
os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet)
|
os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet)
|
||||||
copy_file(os.path.join(script_dir, 'distrib/mac/widevinecdmadapter.plugin'), dst_dir, options.quiet)
|
copy_file(
|
||||||
|
os.path.join(script_dir, 'distrib/mac/widevinecdmadapter.plugin'),
|
||||||
|
dst_dir, options.quiet)
|
||||||
else:
|
else:
|
||||||
copy_dir(os.path.join(build_dir, 'cefclient.app'), os.path.join(dst_dir, 'cefclient.app'), options.quiet)
|
copy_dir(
|
||||||
|
os.path.join(build_dir, 'cefclient.app'),
|
||||||
|
os.path.join(dst_dir, 'cefclient.app'), options.quiet)
|
||||||
|
|
||||||
if not options.nosymbols:
|
if not options.nosymbols:
|
||||||
# create the symbol output directory
|
# create the symbol output directory
|
||||||
symbol_output_dir = create_output_dir(output_dir_name + '_release_symbols', options.outputdir)
|
symbol_output_dir = create_output_dir(
|
||||||
|
output_dir_name + '_release_symbols', options.outputdir)
|
||||||
|
|
||||||
# The real dSYM already exists, just copy it to the output directory.
|
# The real dSYM already exists, just copy it to the output directory.
|
||||||
# dSYMs are only generated when is_official_build=true or enable_dsyms=true.
|
# dSYMs are only generated when is_official_build=true or enable_dsyms=true.
|
||||||
# See //build/config/mac/symbols.gni.
|
# See //build/config/mac/symbols.gni.
|
||||||
copy_dir(os.path.join(build_dir, '%s.dSYM' % framework_name),
|
copy_dir(
|
||||||
os.path.join(symbol_output_dir, '%s.dSYM' % framework_name), options.quiet)
|
os.path.join(build_dir, '%s.dSYM' % framework_name),
|
||||||
|
os.path.join(symbol_output_dir, '%s.dSYM' % framework_name),
|
||||||
|
options.quiet)
|
||||||
|
|
||||||
if mode == 'standard' or mode == 'minimal':
|
if mode == 'standard' or mode == 'minimal':
|
||||||
# transfer include files
|
# transfer include files
|
||||||
@ -804,11 +902,17 @@ elif platform == 'linux':
|
|||||||
valid_build_dir = build_dir
|
valid_build_dir = build_dir
|
||||||
dst_dir = os.path.join(output_dir, 'Debug')
|
dst_dir = os.path.join(output_dir, 'Debug')
|
||||||
make_dir(dst_dir, options.quiet)
|
make_dir(dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'chrome_sandbox'), os.path.join(dst_dir, 'chrome-sandbox'), options.quiet)
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'chrome_sandbox'),
|
||||||
|
os.path.join(dst_dir, 'chrome-sandbox'), options.quiet)
|
||||||
copy_file(libcef_path, dst_dir, options.quiet)
|
copy_file(libcef_path, dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'libwidevinecdmadapter.so'), dst_dir, options.quiet)
|
copy_file(
|
||||||
copy_file(os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet)
|
os.path.join(build_dir, 'libwidevinecdmadapter.so'), dst_dir,
|
||||||
copy_file(os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet)
|
options.quiet)
|
||||||
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet)
|
||||||
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("No Debug build files.\n")
|
sys.stderr.write("No Debug build files.\n")
|
||||||
|
|
||||||
@ -823,10 +927,16 @@ elif platform == 'linux':
|
|||||||
if mode == 'client':
|
if mode == 'client':
|
||||||
copy_file(os.path.join(build_dir, 'cefsimple'), dst_dir, options.quiet)
|
copy_file(os.path.join(build_dir, 'cefsimple'), dst_dir, options.quiet)
|
||||||
copy_file(libcef_path, dst_dir, options.quiet)
|
copy_file(libcef_path, dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'chrome_sandbox'), os.path.join(dst_dir, 'chrome-sandbox'), options.quiet)
|
copy_file(
|
||||||
copy_file(os.path.join(build_dir, 'libwidevinecdmadapter.so'), dst_dir, options.quiet)
|
os.path.join(build_dir, 'chrome_sandbox'),
|
||||||
copy_file(os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet)
|
os.path.join(dst_dir, 'chrome-sandbox'), options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet)
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'libwidevinecdmadapter.so'), dst_dir,
|
||||||
|
options.quiet)
|
||||||
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet)
|
||||||
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("No Release build files.\n")
|
sys.stderr.write("No Release build files.\n")
|
||||||
|
|
||||||
@ -839,12 +949,19 @@ elif platform == 'linux':
|
|||||||
dst_dir = os.path.join(output_dir, 'Resources')
|
dst_dir = os.path.join(output_dir, 'Resources')
|
||||||
make_dir(dst_dir, options.quiet)
|
make_dir(dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet)
|
copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet)
|
copy_file(
|
||||||
copy_file(os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet)
|
os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'cef_extensions.pak'), dst_dir, options.quiet)
|
copy_file(
|
||||||
copy_file(os.path.join(build_dir, 'devtools_resources.pak'), dst_dir, options.quiet)
|
os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet)
|
||||||
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'cef_extensions.pak'), dst_dir, options.quiet)
|
||||||
|
copy_file(
|
||||||
|
os.path.join(build_dir, 'devtools_resources.pak'), dst_dir,
|
||||||
|
options.quiet)
|
||||||
copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet)
|
copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet)
|
||||||
copy_dir(os.path.join(build_dir, 'locales'), os.path.join(dst_dir, 'locales'), options.quiet)
|
copy_dir(
|
||||||
|
os.path.join(build_dir, 'locales'),
|
||||||
|
os.path.join(dst_dir, 'locales'), options.quiet)
|
||||||
|
|
||||||
if mode == 'standard' or mode == 'minimal':
|
if mode == 'standard' or mode == 'minimal':
|
||||||
# transfer include files
|
# transfer include files
|
||||||
@ -885,7 +1002,8 @@ if not options.noarchive:
|
|||||||
|
|
||||||
for dir in archive_dirs:
|
for dir in archive_dirs:
|
||||||
if not options.quiet:
|
if not options.quiet:
|
||||||
sys.stdout.write("Creating %s archive for %s...\n" % (archive_format, os.path.basename(dir)))
|
sys.stdout.write("Creating %s archive for %s...\n" %
|
||||||
|
(archive_format, os.path.basename(dir)))
|
||||||
if archive_format == 'zip':
|
if archive_format == 'zip':
|
||||||
create_zip_archive(dir)
|
create_zip_archive(dir)
|
||||||
elif archive_format == 'tar.gz':
|
elif archive_format == 'tar.gz':
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
from cef_parser import *
|
from cef_parser import *
|
||||||
|
|
||||||
|
|
||||||
def make_gypi_file(header):
|
def make_gypi_file(header):
|
||||||
# header string
|
# header string
|
||||||
result = \
|
result = \
|
||||||
@ -83,6 +84,7 @@ def make_gypi_file(header):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def write_gypi_file(header, file):
|
def write_gypi_file(header, file):
|
||||||
newcontents = make_gypi_file(header)
|
newcontents = make_gypi_file(header)
|
||||||
return (file, newcontents)
|
return (file, newcontents)
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
||||||
# reserved. Use of this source code is governed by a BSD-style license
|
# reserved. Use of this source code is governed by a BSD-style license
|
||||||
# that can be found in the LICENSE file.
|
# that can be found in the LICENSE file.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
A simple utility function to merge pack resource files into a single resource file.
|
A simple utility function to merge pack resource files into a single resource file.
|
||||||
"""
|
"""
|
||||||
@ -109,6 +108,7 @@ def MakeFile(output, input):
|
|||||||
else:
|
else:
|
||||||
sys.stdout.write('File ' + output + ' is already up to date.\n')
|
sys.stdout.write('File ' + output + ' is already up to date.\n')
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
def main(argv):
|
||||||
if len(argv) < 3:
|
if len(argv) < 3:
|
||||||
print("Usage:\n %s <output_filename> <input_file1> [input_file2] ... " %
|
print("Usage:\n %s <output_filename> <input_file1> [input_file2] ... " %
|
||||||
|
@ -14,23 +14,38 @@ if __name__ != "__main__":
|
|||||||
sys.stderr.write('This file cannot be loaded as a module!')
|
sys.stderr.write('This file cannot be loaded as a module!')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
# parse command-line options
|
# parse command-line options
|
||||||
disc = """
|
disc = """
|
||||||
This utility creates the version header file.
|
This utility creates the version header file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
parser = OptionParser(description=disc)
|
parser = OptionParser(description=disc)
|
||||||
parser.add_option('--header', dest='header', metavar='FILE',
|
parser.add_option(
|
||||||
|
'--header',
|
||||||
|
dest='header',
|
||||||
|
metavar='FILE',
|
||||||
help='output version header file [required]')
|
help='output version header file [required]')
|
||||||
parser.add_option('--cef_version', dest='cef_version', metavar='FILE',
|
parser.add_option(
|
||||||
|
'--cef_version',
|
||||||
|
dest='cef_version',
|
||||||
|
metavar='FILE',
|
||||||
help='input CEF version config file [required]')
|
help='input CEF version config file [required]')
|
||||||
parser.add_option('--chrome_version', dest='chrome_version', metavar='FILE',
|
parser.add_option(
|
||||||
|
'--chrome_version',
|
||||||
|
dest='chrome_version',
|
||||||
|
metavar='FILE',
|
||||||
help='input Chrome version config file [required]')
|
help='input Chrome version config file [required]')
|
||||||
parser.add_option('--cpp_header_dir', dest='cpp_header_dir', metavar='DIR',
|
parser.add_option(
|
||||||
|
'--cpp_header_dir',
|
||||||
|
dest='cpp_header_dir',
|
||||||
|
metavar='DIR',
|
||||||
help='input directory for C++ header files [required]')
|
help='input directory for C++ header files [required]')
|
||||||
parser.add_option('-q', '--quiet',
|
parser.add_option(
|
||||||
action='store_true', dest='quiet', default=False,
|
'-q',
|
||||||
|
'--quiet',
|
||||||
|
action='store_true',
|
||||||
|
dest='quiet',
|
||||||
|
default=False,
|
||||||
help='do not output detailed status information')
|
help='do not output detailed status information')
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
@ -39,12 +54,14 @@ if options.header is None or options.cef_version is None or options.chrome_versi
|
|||||||
parser.print_help(sys.stdout)
|
parser.print_help(sys.stdout)
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
def write_version_header(header, chrome_version, cef_version, cpp_header_dir):
|
def write_version_header(header, chrome_version, cef_version, cpp_header_dir):
|
||||||
""" Creates the header file for the current revision and Chrome version information
|
""" Creates the header file for the current revision and Chrome version information
|
||||||
if the information has changed or if the file doesn't already exist. """
|
if the information has changed or if the file doesn't already exist. """
|
||||||
|
|
||||||
if not path_exists(chrome_version):
|
if not path_exists(chrome_version):
|
||||||
raise Exception('Chrome version file '+chrome_version+' does not exist.')
|
raise Exception('Chrome version file ' + chrome_version +
|
||||||
|
' does not exist.')
|
||||||
if not path_exists(cef_version):
|
if not path_exists(cef_version):
|
||||||
raise Exception('CEF version file ' + cef_version + ' does not exist.')
|
raise Exception('CEF version file ' + cef_version + ' does not exist.')
|
||||||
|
|
||||||
@ -64,7 +81,8 @@ def write_version_header(header, chrome_version, cef_version, cpp_header_dir):
|
|||||||
|
|
||||||
commit_number = git.get_commit_number()
|
commit_number = git.get_commit_number()
|
||||||
commit_hash = git.get_hash()
|
commit_hash = git.get_hash()
|
||||||
version = '%s.%s.%s.g%s' % (args['CEF_MAJOR'], args['BUILD'], commit_number, commit_hash[:7])
|
version = '%s.%s.%s.g%s' % (args['CEF_MAJOR'], args['BUILD'], commit_number,
|
||||||
|
commit_hash[:7])
|
||||||
|
|
||||||
# calculate api hashes
|
# calculate api hashes
|
||||||
api_hash_calculator = cef_api_hash(cpp_header_dir, verbose=False)
|
api_hash_calculator = cef_api_hash(cpp_header_dir, verbose=False)
|
||||||
@ -165,7 +183,9 @@ def write_version_header(header, chrome_version, cef_version, cpp_header_dir):
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
written = write_version_header(options.header, options.chrome_version, options.cef_version, options.cpp_header_dir)
|
|
||||||
|
written = write_version_header(options.header, options.chrome_version,
|
||||||
|
options.cef_version, options.cpp_header_dir)
|
||||||
if not options.quiet:
|
if not options.quiet:
|
||||||
if written:
|
if written:
|
||||||
sys.stdout.write('File ' + options.header + ' updated.\n')
|
sys.stdout.write('File ' + options.header + ' updated.\n')
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
from cef_parser import *
|
from cef_parser import *
|
||||||
from make_ctocpp_impl import *
|
from make_ctocpp_impl import *
|
||||||
|
|
||||||
|
|
||||||
def make_views_function_stub_impl(clsname, func):
|
def make_views_function_stub_impl(clsname, func):
|
||||||
name = func.get_name()
|
name = func.get_name()
|
||||||
|
|
||||||
@ -32,6 +33,7 @@ def make_views_function_stub_impl(clsname, func):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def make_views_class_stub_impl(header, cls):
|
def make_views_class_stub_impl(header, cls):
|
||||||
impl = ''
|
impl = ''
|
||||||
|
|
||||||
@ -42,6 +44,7 @@ def make_views_class_stub_impl(header, cls):
|
|||||||
|
|
||||||
return impl
|
return impl
|
||||||
|
|
||||||
|
|
||||||
def make_views_stub_impl(header):
|
def make_views_stub_impl(header):
|
||||||
includes = ''
|
includes = ''
|
||||||
impl = ''
|
impl = ''
|
||||||
@ -64,10 +67,12 @@ def make_views_stub_impl(header):
|
|||||||
result += impl
|
result += impl
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def write_views_stub_impl(header, file):
|
def write_views_stub_impl(header, file):
|
||||||
newcontents = make_views_stub_impl(header)
|
newcontents = make_views_stub_impl(header)
|
||||||
return (file, newcontents)
|
return (file, newcontents)
|
||||||
|
|
||||||
|
|
||||||
# Test the module.
|
# Test the module.
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
import sys
|
import sys
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
from cef_parser import *
|
from cef_parser import *
|
||||||
|
|
||||||
|
|
||||||
def make_wrapper_types_header(header):
|
def make_wrapper_types_header(header):
|
||||||
result = get_copyright()
|
result = get_copyright()
|
||||||
|
|
||||||
|
@ -9,16 +9,19 @@ import sys
|
|||||||
from exec_util import exec_cmd
|
from exec_util import exec_cmd
|
||||||
import git_util as git
|
import git_util as git
|
||||||
|
|
||||||
|
|
||||||
def msg(message):
|
def msg(message):
|
||||||
""" Output a message. """
|
""" Output a message. """
|
||||||
sys.stdout.write('--> ' + message + "\n")
|
sys.stdout.write('--> ' + message + "\n")
|
||||||
|
|
||||||
|
|
||||||
def warn(message):
|
def warn(message):
|
||||||
""" Output a warning. """
|
""" Output a warning. """
|
||||||
sys.stdout.write('-' * 80 + "\n")
|
sys.stdout.write('-' * 80 + "\n")
|
||||||
sys.stdout.write('!!!! WARNING: ' + message + "\n")
|
sys.stdout.write('!!!! WARNING: ' + message + "\n")
|
||||||
sys.stdout.write('-' * 80 + "\n")
|
sys.stdout.write('-' * 80 + "\n")
|
||||||
|
|
||||||
|
|
||||||
def extract_paths(file):
|
def extract_paths(file):
|
||||||
""" Extract the list of modified paths from the patch file. """
|
""" Extract the list of modified paths from the patch file. """
|
||||||
paths = []
|
paths = []
|
||||||
@ -32,6 +35,7 @@ def extract_paths(file):
|
|||||||
paths.append(match.group(1).strip())
|
paths.append(match.group(1).strip())
|
||||||
return paths
|
return paths
|
||||||
|
|
||||||
|
|
||||||
# Cannot be loaded as a module.
|
# Cannot be loaded as a module.
|
||||||
if __name__ != "__main__":
|
if __name__ != "__main__":
|
||||||
sys.stderr.write('This file cannot be loaded as a module!')
|
sys.stderr.write('This file cannot be loaded as a module!')
|
||||||
@ -42,6 +46,7 @@ disc = """
|
|||||||
This utility updates existing patch files.
|
This utility updates existing patch files.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
# Support options with multiple arguments.
|
# Support options with multiple arguments.
|
||||||
class MultipleOption(Option):
|
class MultipleOption(Option):
|
||||||
ACTIONS = Option.ACTIONS + ("extend",)
|
ACTIONS = Option.ACTIONS + ("extend",)
|
||||||
@ -55,16 +60,26 @@ class MultipleOption(Option):
|
|||||||
else:
|
else:
|
||||||
Option.take_action(self, action, dest, opt, value, values, parser)
|
Option.take_action(self, action, dest, opt, value, values, parser)
|
||||||
|
|
||||||
parser = OptionParser(option_class=MultipleOption,
|
|
||||||
description=disc)
|
parser = OptionParser(option_class=MultipleOption, description=disc)
|
||||||
parser.add_option('--resave',
|
parser.add_option(
|
||||||
action='store_true', dest='resave', default=False,
|
'--resave',
|
||||||
|
action='store_true',
|
||||||
|
dest='resave',
|
||||||
|
default=False,
|
||||||
help='re-save existing patch files to pick up manual changes')
|
help='re-save existing patch files to pick up manual changes')
|
||||||
parser.add_option('--revert',
|
parser.add_option(
|
||||||
action='store_true', dest='revert', default=False,
|
'--revert',
|
||||||
|
action='store_true',
|
||||||
|
dest='revert',
|
||||||
|
default=False,
|
||||||
help='revert all changes from existing patch files')
|
help='revert all changes from existing patch files')
|
||||||
parser.add_option('--patch',
|
parser.add_option(
|
||||||
action='extend', dest='patch', type='string', default=[],
|
'--patch',
|
||||||
|
action='extend',
|
||||||
|
dest='patch',
|
||||||
|
type='string',
|
||||||
|
default=[],
|
||||||
help='optional patch name to process (multiples allowed)')
|
help='optional patch name to process (multiples allowed)')
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
@ -19,12 +19,14 @@ cef_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
|||||||
cef_patch_dir = os.path.join(cef_dir, 'patch')
|
cef_patch_dir = os.path.join(cef_dir, 'patch')
|
||||||
src_dir = os.path.abspath(os.path.join(cef_dir, os.pardir))
|
src_dir = os.path.abspath(os.path.join(cef_dir, os.pardir))
|
||||||
|
|
||||||
|
|
||||||
def write_note(type, note):
|
def write_note(type, note):
|
||||||
separator = '-' * 79 + '\n'
|
separator = '-' * 79 + '\n'
|
||||||
sys.stdout.write(separator)
|
sys.stdout.write(separator)
|
||||||
sys.stdout.write('!!!! %s: %s\n' % (type, note))
|
sys.stdout.write('!!!! %s: %s\n' % (type, note))
|
||||||
sys.stdout.write(separator)
|
sys.stdout.write(separator)
|
||||||
|
|
||||||
|
|
||||||
def apply_patch_file(patch_file, patch_dir):
|
def apply_patch_file(patch_file, patch_dir):
|
||||||
''' Apply a specific patch file in optional patch directory. '''
|
''' Apply a specific patch file in optional patch directory. '''
|
||||||
patch_path = os.path.join(cef_patch_dir, 'patches', patch_file + '.patch')
|
patch_path = os.path.join(cef_patch_dir, 'patches', patch_file + '.patch')
|
||||||
@ -39,9 +41,11 @@ def apply_patch_file(patch_file, patch_dir):
|
|||||||
|
|
||||||
result = git_apply_patch_file(patch_path, patch_dir)
|
result = git_apply_patch_file(patch_path, patch_dir)
|
||||||
if result == 'fail':
|
if result == 'fail':
|
||||||
write_note('ERROR', 'This patch failed to apply. Your build will not be correct.')
|
write_note('ERROR',
|
||||||
|
'This patch failed to apply. Your build will not be correct.')
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def apply_patch_config():
|
def apply_patch_config():
|
||||||
''' Apply patch files based on a configuration file. '''
|
''' Apply patch files based on a configuration file. '''
|
||||||
config_file = os.path.join(cef_patch_dir, 'patch.cfg')
|
config_file = os.path.join(cef_patch_dir, 'patch.cfg')
|
||||||
@ -66,7 +70,8 @@ def apply_patch_config():
|
|||||||
dopatch = False
|
dopatch = False
|
||||||
|
|
||||||
if dopatch:
|
if dopatch:
|
||||||
result = apply_patch_file(patch_file, patch['path'] if 'path' in patch else None)
|
result = apply_patch_file(patch_file, patch['path']
|
||||||
|
if 'path' in patch else None)
|
||||||
results[result] += 1
|
results[result] += 1
|
||||||
|
|
||||||
if 'note' in patch:
|
if 'note' in patch:
|
||||||
@ -79,8 +84,13 @@ def apply_patch_config():
|
|||||||
|
|
||||||
if results['fail'] > 0:
|
if results['fail'] > 0:
|
||||||
sys.stdout.write('\n')
|
sys.stdout.write('\n')
|
||||||
write_note('ERROR', '%d patches failed to apply. Your build will not be correct.' % results['fail'])
|
write_note('ERROR',
|
||||||
raise Exception('%d patches failed to apply. Your build will not be correct.' % results['fail'])
|
'%d patches failed to apply. Your build will not be correct.' %
|
||||||
|
results['fail'])
|
||||||
|
raise Exception(
|
||||||
|
'%d patches failed to apply. Your build will not be correct.' %
|
||||||
|
results['fail'])
|
||||||
|
|
||||||
|
|
||||||
# Parse command-line options.
|
# Parse command-line options.
|
||||||
disc = """
|
disc = """
|
||||||
@ -88,9 +98,12 @@ This utility applies patch files.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
parser = OptionParser(description=disc)
|
parser = OptionParser(description=disc)
|
||||||
parser.add_option('--patch-file', dest='patchfile', metavar='FILE',
|
parser.add_option(
|
||||||
help='patch source file')
|
'--patch-file', dest='patchfile', metavar='FILE', help='patch source file')
|
||||||
parser.add_option('--patch-dir', dest='patchdir', metavar='DIR',
|
parser.add_option(
|
||||||
|
'--patch-dir',
|
||||||
|
dest='patchdir',
|
||||||
|
metavar='DIR',
|
||||||
help='patch target directory')
|
help='patch target directory')
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
@ -17,31 +17,46 @@ from make_views_stub_impl import *
|
|||||||
from make_wrapper_types_header import *
|
from make_wrapper_types_header import *
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
|
|
||||||
|
|
||||||
# cannot be loaded as a module
|
# cannot be loaded as a module
|
||||||
if __name__ != "__main__":
|
if __name__ != "__main__":
|
||||||
sys.stderr.write('This file cannot be loaded as a module!')
|
sys.stderr.write('This file cannot be loaded as a module!')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
# parse command-line options
|
# parse command-line options
|
||||||
disc = """
|
disc = """
|
||||||
This utility generates files for the CEF C++ to C API translation layer.
|
This utility generates files for the CEF C++ to C API translation layer.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
parser = OptionParser(description=disc)
|
parser = OptionParser(description=disc)
|
||||||
parser.add_option('--root-dir', dest='rootdir', metavar='DIR',
|
parser.add_option(
|
||||||
|
'--root-dir',
|
||||||
|
dest='rootdir',
|
||||||
|
metavar='DIR',
|
||||||
help='CEF root directory [required]')
|
help='CEF root directory [required]')
|
||||||
parser.add_option('--backup',
|
parser.add_option(
|
||||||
action='store_true', dest='backup', default=False,
|
'--backup',
|
||||||
|
action='store_true',
|
||||||
|
dest='backup',
|
||||||
|
default=False,
|
||||||
help='create a backup of modified files')
|
help='create a backup of modified files')
|
||||||
parser.add_option('--force',
|
parser.add_option(
|
||||||
action='store_true', dest='force', default=False,
|
'--force',
|
||||||
|
action='store_true',
|
||||||
|
dest='force',
|
||||||
|
default=False,
|
||||||
help='force rewrite of the file')
|
help='force rewrite of the file')
|
||||||
parser.add_option('-c', '--classes', dest='classes', action='append',
|
parser.add_option(
|
||||||
|
'-c',
|
||||||
|
'--classes',
|
||||||
|
dest='classes',
|
||||||
|
action='append',
|
||||||
help='only translate the specified classes')
|
help='only translate the specified classes')
|
||||||
parser.add_option('-q', '--quiet',
|
parser.add_option(
|
||||||
action='store_true', dest='quiet', default=False,
|
'-q',
|
||||||
|
'--quiet',
|
||||||
|
action='store_true',
|
||||||
|
dest='quiet',
|
||||||
|
default=False,
|
||||||
help='do not output detailed status information')
|
help='do not output detailed status information')
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
@ -58,7 +73,8 @@ cpp_header_views_dir = os.path.join(cpp_header_dir, 'views')
|
|||||||
capi_header_dir = os.path.join(cpp_header_dir, 'capi')
|
capi_header_dir = os.path.join(cpp_header_dir, 'capi')
|
||||||
libcef_dll_dir = os.path.join(root_dir, 'libcef_dll')
|
libcef_dll_dir = os.path.join(root_dir, 'libcef_dll')
|
||||||
cpptoc_global_impl = os.path.join(libcef_dll_dir, 'libcef_dll.cc')
|
cpptoc_global_impl = os.path.join(libcef_dll_dir, 'libcef_dll.cc')
|
||||||
ctocpp_global_impl = os.path.join(libcef_dll_dir, 'wrapper', 'libcef_dll_wrapper.cc')
|
ctocpp_global_impl = os.path.join(libcef_dll_dir, 'wrapper',
|
||||||
|
'libcef_dll_wrapper.cc')
|
||||||
wrapper_types_header = os.path.join(libcef_dll_dir, 'wrapper_types.h')
|
wrapper_types_header = os.path.join(libcef_dll_dir, 'wrapper_types.h')
|
||||||
cpptoc_dir = os.path.join(libcef_dll_dir, 'cpptoc')
|
cpptoc_dir = os.path.join(libcef_dll_dir, 'cpptoc')
|
||||||
ctocpp_dir = os.path.join(libcef_dll_dir, 'ctocpp')
|
ctocpp_dir = os.path.join(libcef_dll_dir, 'ctocpp')
|
||||||
@ -82,10 +98,10 @@ header.add_directory(cpp_header_dir, excluded_files)
|
|||||||
header.add_directory(cpp_header_test_dir)
|
header.add_directory(cpp_header_test_dir)
|
||||||
header.add_directory(cpp_header_views_dir)
|
header.add_directory(cpp_header_views_dir)
|
||||||
|
|
||||||
|
|
||||||
# Track the number of files that were written.
|
# Track the number of files that were written.
|
||||||
writect = 0
|
writect = 0
|
||||||
|
|
||||||
|
|
||||||
def update_file(file, newcontents):
|
def update_file(file, newcontents):
|
||||||
""" Replaces the contents of |file| with |newcontents| if necessary. """
|
""" Replaces the contents of |file| with |newcontents| if necessary. """
|
||||||
oldcontents = ''
|
oldcontents = ''
|
||||||
@ -113,7 +129,7 @@ def update_file(file, newcontents):
|
|||||||
oldhash = oldcontents[start + len(hash_start):end]
|
oldhash = oldcontents[start + len(hash_start):end]
|
||||||
|
|
||||||
# Compute the new hash.
|
# Compute the new hash.
|
||||||
rev = hashlib.sha1(newcontents).digest();
|
rev = hashlib.sha1(newcontents).digest()
|
||||||
newhash = ''.join(format(ord(i), '0>2x') for i in rev)
|
newhash = ''.join(format(ord(i), '0>2x') for i in rev)
|
||||||
|
|
||||||
if oldhash == newhash:
|
if oldhash == newhash:
|
||||||
|
Reference in New Issue
Block a user