Apply yapf formatting to all Python files (issue #2171)

This commit is contained in:
Marshall Greenblatt
2017-05-28 15:04:18 +02:00
parent 59606b88d2
commit 12150b43d2
34 changed files with 5101 additions and 4557 deletions

View File

@ -21,7 +21,6 @@ depot_tools_archive_url = 'https://storage.googleapis.com/chrome-infra/depot_too
cef_git_url = 'https://bitbucket.org/chromiumembedded/cef.git' cef_git_url = 'https://bitbucket.org/chromiumembedded/cef.git'
## ##
# Global system variables. # Global system variables.
## ##
@ -29,21 +28,22 @@ cef_git_url = 'https://bitbucket.org/chromiumembedded/cef.git'
# Script directory. # Script directory.
script_dir = os.path.dirname(__file__) script_dir = os.path.dirname(__file__)
## ##
# Helper functions. # Helper functions.
## ##
def msg(message): def msg(message):
""" Output a message. """ """ Output a message. """
sys.stdout.write('--> ' + message + "\n") sys.stdout.write('--> ' + message + "\n")
def run(command_line, working_dir, depot_tools_dir=None, output_file=None): def run(command_line, working_dir, depot_tools_dir=None, output_file=None):
""" Runs the specified command. """ """ Runs the specified command. """
# add depot_tools to the path # add depot_tools to the path
env = os.environ env = os.environ
if not depot_tools_dir is None: if not depot_tools_dir is None:
env['PATH'] = depot_tools_dir+os.pathsep+env['PATH'] env['PATH'] = depot_tools_dir + os.pathsep + env['PATH']
sys.stdout.write('-------- Running "'+command_line+'" in "'+\ sys.stdout.write('-------- Running "'+command_line+'" in "'+\
working_dir+'"...'+"\n") working_dir+'"...'+"\n")
@ -51,27 +51,34 @@ def run(command_line, working_dir, depot_tools_dir=None, output_file=None):
args = shlex.split(command_line.replace('\\', '\\\\')) args = shlex.split(command_line.replace('\\', '\\\\'))
if not output_file: if not output_file:
return subprocess.check_call(args, cwd=working_dir, env=env, return subprocess.check_call(
shell=(sys.platform == 'win32')) args, cwd=working_dir, env=env, shell=(sys.platform == 'win32'))
with open(output_file, "w") as f: with open(output_file, "w") as f:
return subprocess.check_call(args, cwd=working_dir, env=env, return subprocess.check_call(
shell=(sys.platform == 'win32'), args,
stderr=subprocess.STDOUT, stdout=f) cwd=working_dir,
env=env,
shell=(sys.platform == 'win32'),
stderr=subprocess.STDOUT,
stdout=f)
def create_directory(path): def create_directory(path):
""" Creates a directory if it doesn't already exist. """ """ Creates a directory if it doesn't already exist. """
if not os.path.exists(path): if not os.path.exists(path):
msg("Creating directory %s" % (path)); msg("Creating directory %s" % (path))
if not options.dryrun: if not options.dryrun:
os.makedirs(path) os.makedirs(path)
def delete_directory(path): def delete_directory(path):
""" Removes an existing directory. """ """ Removes an existing directory. """
if os.path.exists(path): if os.path.exists(path):
msg("Removing directory %s" % (path)); msg("Removing directory %s" % (path))
if not options.dryrun: if not options.dryrun:
shutil.rmtree(path, onerror=onerror) shutil.rmtree(path, onerror=onerror)
def copy_directory(source, target, allow_overwrite=False): def copy_directory(source, target, allow_overwrite=False):
""" Copies a directory from source to target. """ """ Copies a directory from source to target. """
if not options.dryrun and os.path.exists(target): if not options.dryrun and os.path.exists(target):
@ -79,10 +86,11 @@ def copy_directory(source, target, allow_overwrite=False):
raise Exception("Directory %s already exists" % (target)) raise Exception("Directory %s already exists" % (target))
remove_directory(target) remove_directory(target)
if os.path.exists(source): if os.path.exists(source):
msg("Copying directory %s to %s" % (source, target)); msg("Copying directory %s to %s" % (source, target))
if not options.dryrun: if not options.dryrun:
shutil.copytree(source, target) shutil.copytree(source, target)
def move_directory(source, target, allow_overwrite=False): def move_directory(source, target, allow_overwrite=False):
""" Copies a directory from source to target. """ """ Copies a directory from source to target. """
if not options.dryrun and os.path.exists(target): if not options.dryrun and os.path.exists(target):
@ -90,14 +98,16 @@ def move_directory(source, target, allow_overwrite=False):
raise Exception("Directory %s already exists" % (target)) raise Exception("Directory %s already exists" % (target))
remove_directory(target) remove_directory(target)
if os.path.exists(source): if os.path.exists(source):
msg("Moving directory %s to %s" % (source, target)); msg("Moving directory %s to %s" % (source, target))
if not options.dryrun: if not options.dryrun:
shutil.move(source, target) shutil.move(source, target)
def is_git_checkout(path): def is_git_checkout(path):
""" Returns true if the path represents a git checkout. """ """ Returns true if the path represents a git checkout. """
return os.path.exists(os.path.join(path, '.git')) return os.path.exists(os.path.join(path, '.git'))
def exec_cmd(cmd, path): def exec_cmd(cmd, path):
""" Execute the specified command and return the result. """ """ Execute the specified command and return the result. """
out = '' out = ''
@ -105,10 +115,12 @@ def exec_cmd(cmd, path):
sys.stdout.write("-------- Running \"%s\" in \"%s\"...\n" % (cmd, path)) sys.stdout.write("-------- Running \"%s\" in \"%s\"...\n" % (cmd, path))
parts = cmd.split() parts = cmd.split()
try: try:
process = subprocess.Popen(parts, cwd=path, process = subprocess.Popen(
stdout=subprocess.PIPE, parts,
stderr=subprocess.PIPE, cwd=path,
shell=(sys.platform == 'win32')) stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=(sys.platform == 'win32'))
out, err = process.communicate() out, err = process.communicate()
except IOError, (errno, strerror): except IOError, (errno, strerror):
raise raise
@ -116,6 +128,7 @@ def exec_cmd(cmd, path):
raise raise
return {'out': out, 'err': err} return {'out': out, 'err': err}
def get_git_hash(path, branch): def get_git_hash(path, branch):
""" Returns the git hash for the specified branch/tag/hash. """ """ Returns the git hash for the specified branch/tag/hash. """
cmd = "%s rev-parse %s" % (git_exe, branch) cmd = "%s rev-parse %s" % (git_exe, branch)
@ -124,6 +137,7 @@ def get_git_hash(path, branch):
return result['out'].strip() return result['out'].strip()
return 'Unknown' return 'Unknown'
def get_git_url(path): def get_git_url(path):
""" Returns the origin url for the specified path. """ """ Returns the origin url for the specified path. """
cmd = "%s config --get remote.origin.url" % (git_exe) cmd = "%s config --get remote.origin.url" % (git_exe)
@ -132,6 +146,7 @@ def get_git_url(path):
return result['out'].strip() return result['out'].strip()
return 'Unknown' return 'Unknown'
def download_and_extract(src, target): def download_and_extract(src, target):
""" Extracts the contents of src, which may be a URL or local file, to the """ Extracts the contents of src, which may be a URL or local file, to the
target directory. """ target directory. """
@ -143,7 +158,7 @@ def download_and_extract(src, target):
response = opener.open(src) response = opener.open(src)
temporary = True temporary = True
handle, archive_path = tempfile.mkstemp(suffix = '.zip') handle, archive_path = tempfile.mkstemp(suffix='.zip')
os.write(handle, response.read()) os.write(handle, response.read())
os.close(handle) os.close(handle)
elif os.path.exists(src): elif os.path.exists(src):
@ -169,6 +184,7 @@ def download_and_extract(src, target):
if temporary and os.path.exists(archive_path): if temporary and os.path.exists(archive_path):
os.remove(archive_path) os.remove(archive_path)
def read_file(path): def read_file(path):
""" Read a file. """ """ Read a file. """
if os.path.exists(path): if os.path.exists(path):
@ -179,11 +195,13 @@ def read_file(path):
else: else:
raise Exception("Path does not exist: %s" % (path)) raise Exception("Path does not exist: %s" % (path))
def read_config_file(path): def read_config_file(path):
""" Read a configuration file. """ """ Read a configuration file. """
# Parse the contents. # Parse the contents.
return eval(read_file(path), {'__builtins__': None}, None) return eval(read_file(path), {'__builtins__': None}, None)
def write_config_file(path, contents): def write_config_file(path, contents):
""" Write a configuration file. """ """ Write a configuration file. """
msg('Writing file: %s' % path) msg('Writing file: %s' % path)
@ -195,6 +213,7 @@ def write_config_file(path, contents):
fp.write("}\n") fp.write("}\n")
fp.close() fp.close()
def read_branch_config_file(path): def read_branch_config_file(path):
""" Read the CEF branch from the specified path. """ """ Read the CEF branch from the specified path. """
config_file = os.path.join(path, 'cef.branch') config_file = os.path.join(path, 'cef.branch')
@ -204,12 +223,14 @@ def read_branch_config_file(path):
return contents['branch'] return contents['branch']
return '' return ''
def write_branch_config_file(path, branch): def write_branch_config_file(path, branch):
""" Write the CEF branch to the specified path. """ """ Write the CEF branch to the specified path. """
config_file = os.path.join(path, 'cef.branch') config_file = os.path.join(path, 'cef.branch')
if not os.path.isfile(config_file): if not os.path.isfile(config_file):
write_config_file(config_file, {'branch': branch}) write_config_file(config_file, {'branch': branch})
def remove_deps_entry(path, entry): def remove_deps_entry(path, entry):
""" Remove an entry from the Chromium DEPS file at the specified path. """ """ Remove an entry from the Chromium DEPS file at the specified path. """
msg('Updating DEPS file: %s' % path) msg('Updating DEPS file: %s' % path)
@ -233,6 +254,7 @@ def remove_deps_entry(path, entry):
fp.write(line) fp.write(line)
fp.close() fp.close()
def apply_deps_patch(): def apply_deps_patch():
""" Patch the Chromium DEPS file if necessary. """ """ Patch the Chromium DEPS file if necessary. """
# Starting with 43.0.2357.126 the DEPS file is now 100% Git and the .DEPS.git # Starting with 43.0.2357.126 the DEPS file is now 100% Git and the .DEPS.git
@ -251,8 +273,8 @@ def apply_deps_patch():
# Attempt to apply the DEPS patch file that may exist with newer branches. # Attempt to apply the DEPS patch file that may exist with newer branches.
patch_tool = os.path.join(cef_dir, 'tools', 'patcher.py') patch_tool = os.path.join(cef_dir, 'tools', 'patcher.py')
run('%s %s --patch-file "%s" --patch-dir "%s"' % run('%s %s --patch-file "%s" --patch-dir "%s"' %
(python_exe, patch_tool, patch_file, chromium_src_dir), (python_exe, patch_tool, patch_file,
chromium_src_dir, depot_tools_dir) chromium_src_dir), chromium_src_dir, depot_tools_dir)
elif cef_branch != 'trunk' and int(cef_branch) <= 1916: elif cef_branch != 'trunk' and int(cef_branch) <= 1916:
# Release branch DEPS files older than 37.0.2007.0 may include a 'src' # Release branch DEPS files older than 37.0.2007.0 may include a 'src'
# entry. This entry needs to be removed otherwise `gclient sync` will # entry. This entry needs to be removed otherwise `gclient sync` will
@ -261,6 +283,7 @@ def apply_deps_patch():
else: else:
raise Exception("Path does not exist: %s" % (deps_path)) raise Exception("Path does not exist: %s" % (deps_path))
def onerror(func, path, exc_info): def onerror(func, path, exc_info):
""" """
Error handler for ``shutil.rmtree``. Error handler for ``shutil.rmtree``.
@ -299,10 +322,17 @@ distribution of CEF.
parser = OptionParser(description=disc) parser = OptionParser(description=disc)
# Setup options. # Setup options.
parser.add_option('--download-dir', dest='downloaddir', metavar='DIR', parser.add_option(
help='Download directory with no spaces [required].') '--download-dir',
parser.add_option('--depot-tools-dir', dest='depottoolsdir', metavar='DIR', dest='downloaddir',
help='Download directory for depot_tools.', default='') metavar='DIR',
help='Download directory with no spaces [required].')
parser.add_option(
'--depot-tools-dir',
dest='depottoolsdir',
metavar='DIR',
help='Download directory for depot_tools.',
default='')
parser.add_option('--depot-tools-archive', dest='depottoolsarchive', parser.add_option('--depot-tools-archive', dest='depottoolsarchive',
help='Zip archive file that contains a single top-level '+\ help='Zip archive file that contains a single top-level '+\
'depot_tools directory.', default='') 'depot_tools directory.', default='')
@ -331,9 +361,12 @@ parser.add_option('--chromium-checkout', dest='chromiumcheckout',
default='') default='')
# Miscellaneous options. # Miscellaneous options.
parser.add_option('--force-config', parser.add_option(
action='store_true', dest='forceconfig', default=False, '--force-config',
help='Force creation of a new gclient config file.') action='store_true',
dest='forceconfig',
default=False,
help='Force creation of a new gclient config file.')
parser.add_option('--force-clean', parser.add_option('--force-clean',
action='store_true', dest='forceclean', default=False, action='store_true', dest='forceclean', default=False,
help='Force a clean checkout of Chromium and CEF. This will'+\ help='Force a clean checkout of Chromium and CEF. This will'+\
@ -342,9 +375,12 @@ parser.add_option('--force-clean-deps',
action='store_true', dest='forcecleandeps', default=False, action='store_true', dest='forcecleandeps', default=False,
help='Force a clean checkout of Chromium dependencies. Used'+\ help='Force a clean checkout of Chromium dependencies. Used'+\
' in combination with --force-clean.') ' in combination with --force-clean.')
parser.add_option('--dry-run', parser.add_option(
action='store_true', dest='dryrun', default=False, '--dry-run',
help="Output commands without executing them.") action='store_true',
dest='dryrun',
default=False,
help="Output commands without executing them.")
parser.add_option('--dry-run-platform', dest='dryrunplatform', default=None, parser.add_option('--dry-run-platform', dest='dryrunplatform', default=None,
help='Simulate a dry run on the specified platform '+\ help='Simulate a dry run on the specified platform '+\
'(windows, macosx, linux). Must be used in combination'+\ '(windows, macosx, linux). Must be used in combination'+\
@ -365,12 +401,18 @@ parser.add_option('--no-cef-update',
help='Do not update CEF. Pass --force-build or '+\ help='Do not update CEF. Pass --force-build or '+\
'--force-distrib if you desire a new build or '+\ '--force-distrib if you desire a new build or '+\
'distribution.') 'distribution.')
parser.add_option('--no-chromium-update', parser.add_option(
action='store_true', dest='nochromiumupdate', default=False, '--no-chromium-update',
help='Do not update Chromium.') action='store_true',
parser.add_option('--no-depot-tools-update', dest='nochromiumupdate',
action='store_true', dest='nodepottoolsupdate', default=False, default=False,
help='Do not update depot_tools.') help='Do not update Chromium.')
parser.add_option(
'--no-depot-tools-update',
action='store_true',
dest='nodepottoolsupdate',
default=False,
help='Do not update depot_tools.')
# Build-related options. # Build-related options.
parser.add_option('--force-build', parser.add_option('--force-build',
@ -378,63 +420,114 @@ parser.add_option('--force-build',
help='Force CEF debug and release builds. This builds '+\ help='Force CEF debug and release builds. This builds '+\
'[build-target] on all platforms and chrome_sandbox '+\ '[build-target] on all platforms and chrome_sandbox '+\
'on Linux.') 'on Linux.')
parser.add_option('--no-build', parser.add_option(
action='store_true', dest='nobuild', default=False, '--no-build',
help='Do not build CEF.') action='store_true',
parser.add_option('--build-target', dest='buildtarget', default='cefclient', dest='nobuild',
help='Target name(s) to build (defaults to "cefclient").') default=False,
parser.add_option('--build-tests', help='Do not build CEF.')
action='store_true', dest='buildtests', default=False, parser.add_option(
help='Also build the ceftests target.') '--build-target',
parser.add_option('--no-debug-build', dest='buildtarget',
action='store_true', dest='nodebugbuild', default=False, default='cefclient',
help="Don't perform the CEF debug build.") help='Target name(s) to build (defaults to "cefclient").')
parser.add_option('--no-release-build', parser.add_option(
action='store_true', dest='noreleasebuild', default=False, '--build-tests',
help="Don't perform the CEF release build.") action='store_true',
parser.add_option('--verbose-build', dest='buildtests',
action='store_true', dest='verbosebuild', default=False, default=False,
help='Show all command lines while building.') help='Also build the ceftests target.')
parser.add_option(
'--no-debug-build',
action='store_true',
dest='nodebugbuild',
default=False,
help="Don't perform the CEF debug build.")
parser.add_option(
'--no-release-build',
action='store_true',
dest='noreleasebuild',
default=False,
help="Don't perform the CEF release build.")
parser.add_option(
'--verbose-build',
action='store_true',
dest='verbosebuild',
default=False,
help='Show all command lines while building.')
parser.add_option('--build-log-file', parser.add_option('--build-log-file',
action='store_true', dest='buildlogfile', default=False, action='store_true', dest='buildlogfile', default=False,
help='Write build logs to file. The file will be named '+\ help='Write build logs to file. The file will be named '+\
'"build-[branch]-[debug|release].log" in the download '+\ '"build-[branch]-[debug|release].log" in the download '+\
'directory.') 'directory.')
parser.add_option('--x64-build', parser.add_option(
action='store_true', dest='x64build', default=False, '--x64-build',
help='Create a 64-bit build.') action='store_true',
parser.add_option('--arm-build', dest='x64build',
action='store_true', dest='armbuild', default=False, default=False,
help='Create an ARM build.') help='Create a 64-bit build.')
parser.add_option(
'--arm-build',
action='store_true',
dest='armbuild',
default=False,
help='Create an ARM build.')
# Distribution-related options. # Distribution-related options.
parser.add_option('--force-distrib', parser.add_option(
action='store_true', dest='forcedistrib', default=False, '--force-distrib',
help='Force creation of a CEF binary distribution.') action='store_true',
parser.add_option('--no-distrib', dest='forcedistrib',
action='store_true', dest='nodistrib', default=False, default=False,
help="Don't create a CEF binary distribution.") help='Force creation of a CEF binary distribution.')
parser.add_option('--minimal-distrib', parser.add_option(
action='store_true', dest='minimaldistrib', default=False, '--no-distrib',
help='Create a minimal CEF binary distribution.') action='store_true',
parser.add_option('--minimal-distrib-only', dest='nodistrib',
action='store_true', dest='minimaldistribonly', default=False, default=False,
help='Create a minimal CEF binary distribution only.') help="Don't create a CEF binary distribution.")
parser.add_option('--client-distrib', parser.add_option(
action='store_true', dest='clientdistrib', default=False, '--minimal-distrib',
help='Create a client CEF binary distribution.') action='store_true',
parser.add_option('--client-distrib-only', dest='minimaldistrib',
action='store_true', dest='clientdistribonly', default=False, default=False,
help='Create a client CEF binary distribution only.') help='Create a minimal CEF binary distribution.')
parser.add_option('--no-distrib-docs', parser.add_option(
action='store_true', dest='nodistribdocs', default=False, '--minimal-distrib-only',
help="Don't create CEF documentation.") action='store_true',
parser.add_option('--no-distrib-archive', dest='minimaldistribonly',
action='store_true', dest='nodistribarchive', default=False, default=False,
help="Don't create archives for output directories.") help='Create a minimal CEF binary distribution only.')
parser.add_option('--clean-artifacts', parser.add_option(
action='store_true', dest='cleanartifacts', default=False, '--client-distrib',
help='Clean the artifacts output directory.') action='store_true',
dest='clientdistrib',
default=False,
help='Create a client CEF binary distribution.')
parser.add_option(
'--client-distrib-only',
action='store_true',
dest='clientdistribonly',
default=False,
help='Create a client CEF binary distribution only.')
parser.add_option(
'--no-distrib-docs',
action='store_true',
dest='nodistribdocs',
default=False,
help="Don't create CEF documentation.")
parser.add_option(
'--no-distrib-archive',
action='store_true',
dest='nodistribarchive',
default=False,
help="Don't create archives for output directories.")
parser.add_option(
'--clean-artifacts',
action='store_true',
dest='cleanartifacts',
default=False,
help='Clean the artifacts output directory.')
parser.add_option('--distrib-subdir', dest='distribsubdir', parser.add_option('--distrib-subdir', dest='distribsubdir',
help='CEF distrib dir name, child of '+\ help='CEF distrib dir name, child of '+\
'chromium/src/cef/binary_distrib', 'chromium/src/cef/binary_distrib',
@ -584,7 +677,6 @@ if platform == 'windows':
# Avoid errors when the "vs_toolchain.py update" Chromium hook runs. # Avoid errors when the "vs_toolchain.py update" Chromium hook runs.
os.environ['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0' os.environ['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0'
## ##
# Manage the download directory. # Manage the download directory.
## ##
@ -595,7 +687,6 @@ create_directory(download_dir)
msg("Download Directory: %s" % (download_dir)) msg("Download Directory: %s" % (download_dir))
## ##
# Manage the depot_tools directory. # Manage the depot_tools directory.
## ##
@ -622,16 +713,16 @@ if not os.path.exists(depot_tools_dir):
download_and_extract(options.depottoolsarchive, depot_tools_dir) download_and_extract(options.depottoolsarchive, depot_tools_dir)
else: else:
# On Linux and OS X check out depot_tools using Git. # On Linux and OS X check out depot_tools using Git.
run('git clone '+depot_tools_url+' '+depot_tools_dir, download_dir) run('git clone ' + depot_tools_url + ' ' + depot_tools_dir, download_dir)
if not options.nodepottoolsupdate: if not options.nodepottoolsupdate:
# Update depot_tools. # Update depot_tools.
# On Windows this will download required python and git binaries. # On Windows this will download required python and git binaries.
msg('Updating depot_tools') msg('Updating depot_tools')
if platform == 'windows': if platform == 'windows':
run('update_depot_tools.bat', depot_tools_dir, depot_tools_dir); run('update_depot_tools.bat', depot_tools_dir, depot_tools_dir)
else: else:
run('update_depot_tools', depot_tools_dir, depot_tools_dir); run('update_depot_tools', depot_tools_dir, depot_tools_dir)
# Determine the executables to use. # Determine the executables to use.
if platform == 'windows': if platform == 'windows':
@ -648,7 +739,6 @@ else:
git_exe = 'git' git_exe = 'git'
python_exe = 'python' python_exe = 'python'
## ##
# Manage the cef directory. # Manage the cef directory.
## ##
@ -722,7 +812,6 @@ if not options.nocefupdate and os.path.exists(cef_dir):
else: else:
cef_checkout_changed = False cef_checkout_changed = False
## ##
# Manage the out directory. # Manage the out directory.
## ##
@ -735,7 +824,6 @@ if options.forceclean and os.path.exists(out_dir):
msg("CEF Output Directory: %s" % (out_dir)) msg("CEF Output Directory: %s" % (out_dir))
## ##
# Manage the chromium directory. # Manage the chromium directory.
## ##
@ -749,7 +837,7 @@ cef_src_dir = os.path.join(chromium_src_dir, 'cef')
out_src_dir = os.path.join(chromium_src_dir, 'out') out_src_dir = os.path.join(chromium_src_dir, 'out')
if options.chromiumurl != '': if options.chromiumurl != '':
chromium_url = options.chromiumurl; chromium_url = options.chromiumurl
else: else:
chromium_url = 'https://chromium.googlesource.com/chromium/src.git' chromium_url = 'https://chromium.googlesource.com/chromium/src.git'
@ -830,7 +918,7 @@ else:
# Delete the existing src/cef directory. It will be re-copied from the download # Delete the existing src/cef directory. It will be re-copied from the download
# directory later. # directory later.
if cef_checkout_changed and os.path.exists(cef_src_dir): if cef_checkout_changed and os.path.exists(cef_src_dir):
delete_directory(cef_src_dir) delete_directory(cef_src_dir)
# Delete the existing src/out directory if requested. # Delete the existing src/out directory if requested.
if options.forceclean and os.path.exists(out_src_dir): if options.forceclean and os.path.exists(out_src_dir):
@ -899,7 +987,6 @@ elif not out_src_dir_exists:
# Write the config file for identifying the branch. # Write the config file for identifying the branch.
write_branch_config_file(out_src_dir, cef_branch) write_branch_config_file(out_src_dir, cef_branch)
## ##
# Build CEF. # Build CEF.
## ##
@ -938,7 +1025,7 @@ if not options.nobuild and (chromium_checkout_changed or \
msg('%s=%s' % (key, os.environ[key])) msg('%s=%s' % (key, os.environ[key]))
# Run the cef_create_projects script to generate project files. # Run the cef_create_projects script to generate project files.
path = os.path.join(cef_src_dir, 'cef_create_projects'+script_ext) path = os.path.join(cef_src_dir, 'cef_create_projects' + script_ext)
run(path, cef_src_dir, depot_tools_dir) run(path, cef_src_dir, depot_tools_dir)
# Build using Ninja. # Build using Ninja.
@ -995,7 +1082,6 @@ elif not options.nobuild:
msg('Not building. The source hashes have not changed and ' + msg('Not building. The source hashes have not changed and ' +
'the output folder "%s" already exists' % (out_src_dir)) 'the output folder "%s" already exists' % (out_src_dir))
## ##
# Create the CEF binary distribution. # Create the CEF binary distribution.
## ##
@ -1025,7 +1111,7 @@ if not options.nodistrib and (chromium_checkout_changed or \
# Create the requested distribution types. # Create the requested distribution types.
first_type = True first_type = True
for type in distrib_types: for type in distrib_types:
path = os.path.join(cef_tools_dir, 'make_distrib'+script_ext) path = os.path.join(cef_tools_dir, 'make_distrib' + script_ext)
if options.nodebugbuild or options.noreleasebuild or type != 'standard': if options.nodebugbuild or options.noreleasebuild or type != 'standard':
path = path + ' --allow-partial' path = path + ' --allow-partial'
path = path + ' --ninja-build' path = path + ' --ninja-build'

View File

@ -15,231 +15,253 @@ import hashlib
class cef_api_hash: class cef_api_hash:
""" CEF API hash calculator """ """ CEF API hash calculator """
def __init__(self, headerdir, debugdir = None, verbose = False): def __init__(self, headerdir, debugdir=None, verbose=False):
if headerdir is None or len(headerdir) == 0: if headerdir is None or len(headerdir) == 0:
raise AssertionError("headerdir is not specified") raise AssertionError("headerdir is not specified")
self.__headerdir = headerdir; self.__headerdir = headerdir
self.__debugdir = debugdir; self.__debugdir = debugdir
self.__verbose = verbose; self.__verbose = verbose
self.__debug_enabled = not (self.__debugdir is None) and len(self.__debugdir) > 0; self.__debug_enabled = not (self.__debugdir is
None) and len(self.__debugdir) > 0
self.platforms = [ "windows", "macosx", "linux" ]; self.platforms = ["windows", "macosx", "linux"]
self.platform_files = { self.platform_files = {
"windows": [ "windows": [
"internal/cef_types_win.h", "internal/cef_types_win.h",
], ],
"macosx": [ "macosx": [
"internal/cef_types_mac.h", "internal/cef_types_mac.h",
], ],
"linux": [ "linux": [
"internal/cef_types_linux.h", "internal/cef_types_linux.h",
] ]
}; }
self.included_files = [ self.included_files = []
];
self.excluded_files = [ self.excluded_files = [
"cef_version.h", "cef_version.h",
"internal/cef_tuple.h", "internal/cef_tuple.h",
"internal/cef_types_wrappers.h", "internal/cef_types_wrappers.h",
"internal/cef_string_wrappers.h", "internal/cef_string_wrappers.h",
"internal/cef_win.h", "internal/cef_win.h",
"internal/cef_mac.h", "internal/cef_mac.h",
"internal/cef_linux.h", "internal/cef_linux.h",
]; ]
def calculate(self): def calculate(self):
filenames = [filename for filename in self.__get_filenames() if not filename in self.excluded_files] filenames = [
filename for filename in self.__get_filenames()
if not filename in self.excluded_files
]
objects = [] objects = []
for filename in filenames: for filename in filenames:
if self.__verbose: if self.__verbose:
print "Processing " + filename + "..." print "Processing " + filename + "..."
content = read_file(os.path.join(self.__headerdir, filename), True) content = read_file(os.path.join(self.__headerdir, filename), True)
platforms = list([p for p in self.platforms if self.__is_platform_filename(filename, p)]) platforms = list([
p for p in self.platforms if self.__is_platform_filename(filename, p)
])
# Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration # Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration
content_objects = None content_objects = None
if filename == "internal/cef_string.h": if filename == "internal/cef_string.h":
content_objects = self.__parse_string_type(content) content_objects = self.__parse_string_type(content)
else: else:
content_objects = self.__parse_objects(content) content_objects = self.__parse_objects(content)
for o in content_objects: for o in content_objects:
o["text"] = self.__prepare_text(o["text"]) o["text"] = self.__prepare_text(o["text"])
o["platforms"] = platforms o["platforms"] = platforms
o["filename"] = filename o["filename"] = filename
objects.append(o) objects.append(o)
# objects will be sorted including filename, to make stable universal hashes # objects will be sorted including filename, to make stable universal hashes
objects = sorted(objects, key = lambda o: o["name"] + "@" + o["filename"]) objects = sorted(objects, key=lambda o: o["name"] + "@" + o["filename"])
if self.__debug_enabled: if self.__debug_enabled:
namelen = max([len(o["name"]) for o in objects]) namelen = max([len(o["name"]) for o in objects])
filenamelen = max([len(o["filename"]) for o in objects]) filenamelen = max([len(o["filename"]) for o in objects])
dumpsig = []; dumpsig = []
for o in objects: for o in objects:
dumpsig.append(format(o["name"], str(namelen) + "s") + "|" + format(o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"]); dumpsig.append(
self.__write_debug_file("objects.txt", dumpsig) format(o["name"], str(namelen) + "s") + "|" + format(
o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"])
self.__write_debug_file("objects.txt", dumpsig)
revisions = { }; revisions = {}
for platform in itertools.chain(["universal"], self.platforms): for platform in itertools.chain(["universal"], self.platforms):
sig = self.__get_final_sig(objects, platform) sig = self.__get_final_sig(objects, platform)
if self.__debug_enabled: if self.__debug_enabled:
self.__write_debug_file(platform + ".sig", sig) self.__write_debug_file(platform + ".sig", sig)
rev = hashlib.sha1(sig).digest(); rev = hashlib.sha1(sig).digest()
revstr = ''.join(format(ord(i),'0>2x') for i in rev) revstr = ''.join(format(ord(i), '0>2x') for i in rev)
revisions[platform] = revstr revisions[platform] = revstr
return revisions return revisions
def __parse_objects(self, content): def __parse_objects(self, content):
""" Returns array of objects in content file. """ """ Returns array of objects in content file. """
objects = [] objects = []
content = re.sub("//.*\n", "", content) content = re.sub("//.*\n", "", content)
# function declarations # function declarations
for m in re.finditer("\nCEF_EXPORT\s+?.*?\s+?(\w+)\s*?\(.*?\)\s*?;", content, flags = re.DOTALL): for m in re.finditer(
object = { "\nCEF_EXPORT\s+?.*?\s+?(\w+)\s*?\(.*?\)\s*?;",
"name": m.group(1), content,
"text": m.group(0).strip() flags=re.DOTALL):
} object = {"name": m.group(1), "text": m.group(0).strip()}
objects.append(object) objects.append(object)
# structs # structs
for m in re.finditer("\ntypedef\s+?struct\s+?(\w+)\s+?\{.*?\}\s+?(\w+)\s*?;", content, flags = re.DOTALL): for m in re.finditer(
object = { "\ntypedef\s+?struct\s+?(\w+)\s+?\{.*?\}\s+?(\w+)\s*?;",
"name": m.group(2), content,
"text": m.group(0).strip() flags=re.DOTALL):
} object = {"name": m.group(2), "text": m.group(0).strip()}
objects.append(object) objects.append(object)
# enums # enums
for m in re.finditer("\nenum\s+?(\w+)\s+?\{.*?\}\s*?;", content, flags = re.DOTALL): for m in re.finditer(
object = { "\nenum\s+?(\w+)\s+?\{.*?\}\s*?;", content, flags=re.DOTALL):
"name": m.group(1), object = {"name": m.group(1), "text": m.group(0).strip()}
"text": m.group(0).strip() objects.append(object)
}
objects.append(object)
# typedefs # typedefs
for m in re.finditer("\ntypedef\s+?.*?\s+(\w+);", content, flags = 0): for m in re.finditer("\ntypedef\s+?.*?\s+(\w+);", content, flags=0):
object = { object = {"name": m.group(1), "text": m.group(0).strip()}
"name": m.group(1), objects.append(object)
"text": m.group(0).strip()
}
objects.append(object)
return objects return objects
def __parse_string_type(self, content): def __parse_string_type(self, content):
""" Grab defined CEF_STRING_TYPE_xxx """ """ Grab defined CEF_STRING_TYPE_xxx """
objects = [] objects = []
for m in re.finditer("\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content, flags = 0): for m in re.finditer(
object = { "\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content,
"name": m.group(1), flags=0):
"text": m.group(0), object = {
} "name": m.group(1),
objects.append(object) "text": m.group(0),
return objects }
objects.append(object)
return objects
def __prepare_text(self, text): def __prepare_text(self, text):
text = text.strip() text = text.strip()
text = re.sub("\s+", " ", text); text = re.sub("\s+", " ", text)
text = re.sub("\(\s+", "(", text); text = re.sub("\(\s+", "(", text)
return text return text
def __get_final_sig(self, objects, platform): def __get_final_sig(self, objects, platform):
sig = [] sig = []
for o in objects: for o in objects:
if platform == "universal" or platform in o["platforms"]: if platform == "universal" or platform in o["platforms"]:
sig.append(o["text"]) sig.append(o["text"])
return "\n".join(sig) return "\n".join(sig)
def __get_filenames(self): def __get_filenames(self):
""" Returns file names to be processed, relative to headerdir """ """ Returns file names to be processed, relative to headerdir """
headers = [os.path.join(self.__headerdir, filename) for filename in self.included_files]; headers = [
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "capi", "*.h"))) os.path.join(self.__headerdir, filename)
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "internal", "*.h"))) for filename in self.included_files
]
headers = itertools.chain(
headers, get_files(os.path.join(self.__headerdir, "capi", "*.h")))
headers = itertools.chain(
headers, get_files(os.path.join(self.__headerdir, "internal", "*.h")))
for v in self.platform_files.values(): for v in self.platform_files.values():
headers = itertools.chain(headers, [os.path.join(self.__headerdir, f) for f in v]) headers = itertools.chain(headers,
[os.path.join(self.__headerdir, f) for f in v])
normalized = [os.path.relpath(filename, self.__headerdir) for filename in headers]; normalized = [
normalized = [f.replace('\\', '/').lower() for f in normalized]; os.path.relpath(filename, self.__headerdir) for filename in headers
]
normalized = [f.replace('\\', '/').lower() for f in normalized]
return list(set(normalized)); return list(set(normalized))
def __is_platform_filename(self, filename, platform): def __is_platform_filename(self, filename, platform):
if platform == "universal": if platform == "universal":
return True return True
if not platform in self.platform_files: if not platform in self.platform_files:
return False return False
listed = False listed = False
for p in self.platforms: for p in self.platforms:
if filename in self.platform_files[p]: if filename in self.platform_files[p]:
if p == platform: if p == platform:
return True return True
else: else:
listed = True listed = True
return not listed return not listed
def __write_debug_file(self, filename, content): def __write_debug_file(self, filename, content):
make_dir(self.__debugdir); make_dir(self.__debugdir)
outfile = os.path.join(self.__debugdir, filename); outfile = os.path.join(self.__debugdir, filename)
dir = os.path.dirname(outfile); dir = os.path.dirname(outfile)
make_dir(dir); make_dir(dir)
if not isinstance(content, basestring): if not isinstance(content, basestring):
content = "\n".join(content) content = "\n".join(content)
write_file(outfile, content) write_file(outfile, content)
if __name__ == "__main__": if __name__ == "__main__":
from optparse import OptionParser from optparse import OptionParser
import time import time
disc = """ disc = """
This utility calculates CEF API hash. This utility calculates CEF API hash.
""" """
parser = OptionParser(description=disc) parser = OptionParser(description=disc)
parser.add_option('--cpp-header-dir', dest='cppheaderdir', metavar='DIR', parser.add_option(
help='input directory for C++ header files [required]') '--cpp-header-dir',
parser.add_option('--debug-dir', dest='debugdir', metavar='DIR', dest='cppheaderdir',
help='intermediate directory for easy debugging') metavar='DIR',
parser.add_option('-v', '--verbose', help='input directory for C++ header files [required]')
action='store_true', dest='verbose', default=False, parser.add_option(
help='output detailed status information') '--debug-dir',
(options, args) = parser.parse_args() dest='debugdir',
metavar='DIR',
help='intermediate directory for easy debugging')
parser.add_option(
'-v',
'--verbose',
action='store_true',
dest='verbose',
default=False,
help='output detailed status information')
(options, args) = parser.parse_args()
# the cppheader option is required # the cppheader option is required
if options.cppheaderdir is None: if options.cppheaderdir is None:
parser.print_help(sys.stdout) parser.print_help(sys.stdout)
sys.exit() sys.exit()
# calculate # calculate
c_start_time = time.time() c_start_time = time.time()
calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose); calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose)
revisions = calc.calculate(); revisions = calc.calculate()
c_completed_in = time.time() - c_start_time c_completed_in = time.time() - c_start_time
print "{" print "{"
for k in sorted(revisions.keys()): for k in sorted(revisions.keys()):
print format("\"" + k + "\"", ">12s") + ": \"" + revisions[k] + "\"" print format("\"" + k + "\"", ">12s") + ": \"" + revisions[k] + "\""
print "}" print "}"
# print # print
# print 'Completed in: ' + str(c_completed_in) # print 'Completed in: ' + str(c_completed_in)
# print # print
# print "Press any key to continue..."; # print "Press any key to continue...";
# sys.stdin.readline(); # sys.stdin.readline();

File diff suppressed because it is too large Load Diff

View File

@ -36,6 +36,7 @@ import sys
# - Some global variables like "$year$" will be replaced in the whole template # - Some global variables like "$year$" will be replaced in the whole template
# before further parsing occurs. # before further parsing occurs.
class cef_html_builder: class cef_html_builder:
""" Class used to build the cefbuilds HTML file. """ """ Class used to build the cefbuilds HTML file. """
@ -47,7 +48,7 @@ class cef_html_builder:
def clear(self): def clear(self):
""" Clear the contents of this object. """ """ Clear the contents of this object. """
self._parts = {} self._parts = {}
return; return
@staticmethod @staticmethod
def _token(key): def _token(key):
@ -87,7 +88,9 @@ class cef_html_builder:
top = str[:start_pos] top = str[:start_pos]
middle = str[start_pos + len(start_tag):end_pos] middle = str[start_pos + len(start_tag):end_pos]
bottom = str[end_pos + len(end_tag):] bottom = str[end_pos + len(end_tag):]
return (top + cef_html_builder._token(cef_html_builder._section_key(section)) + bottom, middle) return (
top + cef_html_builder._token(cef_html_builder._section_key(section)) +
bottom, middle)
def load(self, html_template): def load(self, html_template):
""" Load the specified |html_template| string. """ """ Load the specified |html_template| string. """
@ -107,32 +110,32 @@ class cef_html_builder:
(version, file) = self._extract(version, 'file') (version, file) = self._extract(version, 'file')
self._parts = { self._parts = {
'root': root, 'root': root,
'platform_link': platform_link, 'platform_link': platform_link,
'platform': platform, 'platform': platform,
'version': version, 'version': version,
'file': file 'file': file
} }
@staticmethod @staticmethod
def _get_platform_name(platform): def _get_platform_name(platform):
return { return {
'linux32': 'Linux 32-bit', 'linux32': 'Linux 32-bit',
'linux64': 'Linux 64-bit', 'linux64': 'Linux 64-bit',
'linuxarm': 'Linux ARM', 'linuxarm': 'Linux ARM',
'macosx64': 'Mac OS X 64-bit', 'macosx64': 'Mac OS X 64-bit',
'windows32': 'Windows 32-bit', 'windows32': 'Windows 32-bit',
'windows64': 'Windows 64-bit' 'windows64': 'Windows 64-bit'
}[platform] }[platform]
@staticmethod @staticmethod
def _get_type_name(type): def _get_type_name(type):
return { return {
'standard': 'Standard Distribution', 'standard': 'Standard Distribution',
'minimal': 'Minimal Distribution', 'minimal': 'Minimal Distribution',
'client': 'Sample Application', 'client': 'Sample Application',
'debug_symbols': 'Debug Symbols', 'debug_symbols': 'Debug Symbols',
'release_symbols': 'Release Symbols' 'release_symbols': 'Release Symbols'
}[type] }[type]
@staticmethod @staticmethod
@ -146,7 +149,7 @@ class cef_html_builder:
size_name = ('B', 'KB', 'MB', 'GB') size_name = ('B', 'KB', 'MB', 'GB')
i = int(math.floor(math.log(size, 1024))) i = int(math.floor(math.log(size, 1024)))
p = math.pow(1024, i) p = math.pow(1024, i)
s = round(size/p, 2) s = round(size / p, 2)
return '%.2f %s' % (s, size_name[i]) return '%.2f %s' % (s, size_name[i])
@staticmethod @staticmethod
@ -175,11 +178,17 @@ class cef_html_builder:
else: else:
sample_app = 'cefclient' sample_app = 'cefclient'
return { return {
'standard': 'Standard binary distribution. Includes header files, libcef_dll_wrapper source code, binary files, CMake configuration files and source code for the cefclient and cefsimple sample applications. See the included README.txt file for usage and build requirements.', 'standard':
'minimal': 'Minimal binary distribution. Includes header files, libcef_dll_wrapper source code, Release build binary files and CMake configuration files. Does not include Debug build binary files or sample application source code. See the included README.txt file for usage and build requirements.', 'Standard binary distribution. Includes header files, libcef_dll_wrapper source code, binary files, CMake configuration files and source code for the cefclient and cefsimple sample applications. See the included README.txt file for usage and build requirements.',
'client': 'Release build of the ' + sample_app + ' sample application. See the included README.txt file for usage requirements.', 'minimal':
'debug_symbols': 'Debug build symbols. Must be extracted and placed next to the CEF Debug binary file with the same name and version.', 'Minimal binary distribution. Includes header files, libcef_dll_wrapper source code, Release build binary files and CMake configuration files. Does not include Debug build binary files or sample application source code. See the included README.txt file for usage and build requirements.',
'release_symbols': 'Release build symbols. Must be extracted and placed next to the CEF Release binary file with the same name and version.' 'client':
'Release build of the ' + sample_app +
' sample application. See the included README.txt file for usage requirements.',
'debug_symbols':
'Debug build symbols. Must be extracted and placed next to the CEF Debug binary file with the same name and version.',
'release_symbols':
'Release build symbols. Must be extracted and placed next to the CEF Release binary file with the same name and version.'
}[file['type']] }[file['type']]
def generate(self, json_builder): def generate(self, json_builder):
@ -189,8 +198,8 @@ class cef_html_builder:
# Substitution values are augmented at each nesting level. # Substitution values are augmented at each nesting level.
subs = { subs = {
'year': '2016', 'year': '2016',
'branding': self._branding, 'branding': self._branding,
} }
# Substitute variables. # Substitute variables.
@ -210,9 +219,12 @@ class cef_html_builder:
for version in json_builder.get_versions(platform): for version in json_builder.get_versions(platform):
subs['cef_version'] = version['cef_version'] subs['cef_version'] = version['cef_version']
subs['chromium_version'] = version['chromium_version'] subs['chromium_version'] = version['chromium_version']
subs['last_modified'] = self._get_date(version['files'][0]['last_modified']) subs['last_modified'] = self._get_date(
subs['cef_source_url'] = self._get_cef_source_url(version['cef_version']) version['files'][0]['last_modified'])
subs['chromium_source_url'] = self._get_chromium_source_url(version['chromium_version']) subs['cef_source_url'] = self._get_cef_source_url(
version['cef_version'])
subs['chromium_source_url'] = self._get_chromium_source_url(
version['chromium_version'])
# Substitute variables. # Substitute variables.
version_str = self._replace_all(self._parts['version'], subs) version_str = self._replace_all(self._parts['version'], subs)
@ -225,9 +237,12 @@ class cef_html_builder:
subs['size'] = self._get_file_size(file['size']) subs['size'] = self._get_file_size(file['size'])
subs['type'] = file['type'] subs['type'] = file['type']
subs['type_name'] = self._get_type_name(file['type']) subs['type_name'] = self._get_type_name(file['type'])
subs['file_url'] = self._get_file_url(platform, version['cef_version'], file) subs['file_url'] = self._get_file_url(platform,
subs['sha1_url'] = self._get_sha1_url(platform, version['cef_version'], file) version['cef_version'], file)
subs['tooltip_text'] = self._get_tooltip_text(platform, version['cef_version'], file) subs['sha1_url'] = self._get_sha1_url(platform,
version['cef_version'], file)
subs['tooltip_text'] = self._get_tooltip_text(
platform, version['cef_version'], file)
# Substitute variables. # Substitute variables.
file_str = self._replace_all(self._parts['file'], subs) file_str = self._replace_all(self._parts['file'], subs)
@ -236,25 +251,35 @@ class cef_html_builder:
if len(file_strs) > 0: if len(file_strs) > 0:
# Always output file types in the same order. # Always output file types in the same order.
file_out = '' file_out = ''
type_order = ['standard', 'minimal', 'client', 'debug_symbols', 'release_symbols'] type_order = [
'standard', 'minimal', 'client', 'debug_symbols',
'release_symbols'
]
for type in type_order: for type in type_order:
if type in file_strs: if type in file_strs:
file_out = file_out + file_strs[type] file_out = file_out + file_strs[type]
# Insert files. # Insert files.
version_str = self._replace(version_str, self._section_key('file'), file_out) version_str = self._replace(version_str,
self._section_key('file'), file_out)
version_strs.append(version_str) version_strs.append(version_str)
if len(version_strs) > 0: if len(version_strs) > 0:
# Insert versions. # Insert versions.
platform_str = self._replace(platform_str, self._section_key('version'), "".join(version_strs)) platform_str = self._replace(platform_str,
self._section_key('version'),
"".join(version_strs))
platform_strs.append(platform_str) platform_strs.append(platform_str)
platform_link_strs.append(platform_link_str) platform_link_strs.append(platform_link_str)
if len(platform_strs) > 0: if len(platform_strs) > 0:
# Insert platforms. # Insert platforms.
root_str = self._replace(root_str, self._section_key('platform_link'), "".join(platform_link_strs)) root_str = self._replace(root_str,
root_str = self._replace(root_str, self._section_key('platform'), "".join(platform_strs)) self._section_key('platform_link'),
"".join(platform_link_strs))
root_str = self._replace(root_str,
self._section_key('platform'),
"".join(platform_strs))
return root_str return root_str
@ -263,7 +288,8 @@ class cef_html_builder:
if __name__ == '__main__': if __name__ == '__main__':
# Verify command-line arguments. # Verify command-line arguments.
if len(sys.argv) < 4: if len(sys.argv) < 4:
sys.stderr.write('Usage: %s <json_file_in> <html_file_in> <html_file_out>' % sys.argv[0]) sys.stderr.write(
'Usage: %s <json_file_in> <html_file_in> <html_file_out>' % sys.argv[0])
sys.exit() sys.exit()
json_file_in = sys.argv[1] json_file_in = sys.argv[1]

View File

@ -43,19 +43,24 @@ import urllib
# directory listings. # directory listings.
_CEF_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" _CEF_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
def parse_date(date): def parse_date(date):
return datetime.datetime.strptime(date, _CEF_DATE_FORMAT) return datetime.datetime.strptime(date, _CEF_DATE_FORMAT)
def format_date(date): def format_date(date):
return date.strftime(_CEF_DATE_FORMAT) return date.strftime(_CEF_DATE_FORMAT)
# Helpers to format datetime values on JSON read/write. # Helpers to format datetime values on JSON read/write.
def cef_from_json(json_object): def cef_from_json(json_object):
if 'last_modified' in json_object: if 'last_modified' in json_object:
json_object['last_modified'] = parse_date(json_object['last_modified']) json_object['last_modified'] = parse_date(json_object['last_modified'])
return json_object return json_object
class cef_json_encoder(json.JSONEncoder): class cef_json_encoder(json.JSONEncoder):
def default(self, o): def default(self, o):
if isinstance(o, datetime.datetime): if isinstance(o, datetime.datetime):
return format_date(o) return format_date(o)
@ -75,7 +80,8 @@ class cef_json_builder:
@staticmethod @staticmethod
def get_platforms(): def get_platforms():
""" Returns the list of supported platforms. """ """ Returns the list of supported platforms. """
return ('linux32', 'linux64', 'linuxarm', 'macosx64', 'windows32', 'windows64') return ('linux32', 'linux64', 'linuxarm', 'macosx64', 'windows32',
'windows64')
@staticmethod @staticmethod
def get_distrib_types(): def get_distrib_types():
@ -85,7 +91,8 @@ class cef_json_builder:
@staticmethod @staticmethod
def is_valid_version(version): def is_valid_version(version):
""" Returns true if the specified CEF version is fully qualified and valid. """ """ Returns true if the specified CEF version is fully qualified and valid. """
return bool(re.compile('^3.[0-9]{4,5}.[0-9]{4,5}.g[0-9a-f]{7}$').match(version)) return bool(
re.compile('^3.[0-9]{4,5}.[0-9]{4,5}.g[0-9a-f]{7}$').match(version))
@staticmethod @staticmethod
def is_valid_chromium_version(version): def is_valid_chromium_version(version):
@ -117,8 +124,12 @@ class cef_json_builder:
# Return a string representation of this object. # Return a string representation of this object.
self._sort_versions() self._sort_versions()
if self._prettyprint: if self._prettyprint:
return json.dumps(self._data, cls=cef_json_encoder, sort_keys=True, return json.dumps(
indent=2, separators=(',', ': ')) self._data,
cls=cef_json_encoder,
sort_keys=True,
indent=2,
separators=(',', ': '))
else: else:
return json.dumps(self._data, cls=cef_json_encoder, sort_keys=True) return json.dumps(self._data, cls=cef_json_encoder, sort_keys=True)
@ -197,7 +208,7 @@ class cef_json_builder:
self._fatalerrors = fatalerrors self._fatalerrors = fatalerrors
new_data = json.JSONDecoder(object_hook = cef_from_json).decode(json_string) new_data = json.JSONDecoder(object_hook=cef_from_json).decode(json_string)
# Validate the new data's structure. # Validate the new data's structure.
for platform in self._data.keys(): for platform in self._data.keys():
@ -224,34 +235,41 @@ class cef_json_builder:
not 'size' in file or \ not 'size' in file or \
not 'last_modified' in file or \ not 'last_modified' in file or \
not 'sha1' in file: not 'sha1' in file:
self._print('load: Missing file key(s) for %s %s' % (platform, version['cef_version'])) self._print('load: Missing file key(s) for %s %s' %
(platform, version['cef_version']))
continue continue
(expected_platform, expected_version, expected_type) = self._parse_name(file['name']) (expected_platform, expected_version,
expected_type) = self._parse_name(file['name'])
if expected_platform != platform or \ if expected_platform != platform or \
expected_version != version['cef_version'] or \ expected_version != version['cef_version'] or \
expected_type != file['type']: expected_type != file['type']:
self._print('load: File name/attribute mismatch for %s %s %s' % self._print('load: File name/attribute mismatch for %s %s %s' %
(platform, version['cef_version'], file['name'])) (platform, version['cef_version'], file['name']))
continue continue
self._validate_args(platform, version['cef_version'], file['type'], self._validate_args(platform, version['cef_version'], file['type'],
file['size'], file['last_modified'], file['sha1']) file['size'], file['last_modified'], file['sha1'])
if file['type'] in found_types: if file['type'] in found_types:
self._print('load: Duplicate %s type for %s %s' % (file['type'], platform, version['cef_version'])) self._print('load: Duplicate %s type for %s %s' %
(file['type'], platform, version['cef_version']))
continue continue
found_types.append(file['type']) found_types.append(file['type'])
valid_files.append({ valid_files.append({
'type': file['type'], 'type': file['type'],
'name': file['name'], 'name': file['name'],
'size': file['size'], 'size': file['size'],
'last_modified': file['last_modified'], 'last_modified': file['last_modified'],
'sha1': file['sha1'], 'sha1': file['sha1'],
}) })
if len(valid_files) > 0: if len(valid_files) > 0:
valid_versions.append({ valid_versions.append({
'cef_version': version['cef_version'], 'cef_version':
'chromium_version': self.set_chromium_version(version['cef_version'], version['chromium_version']), version['cef_version'],
'files': self._sort_files(valid_files) 'chromium_version':
self.set_chromium_version(version['cef_version'],
version['chromium_version']),
'files':
self._sort_files(valid_files)
}) })
if len(valid_versions) > 0: if len(valid_versions) > 0:
@ -280,7 +298,8 @@ class cef_json_builder:
if name_no_ext[-4:] == '.tar': if name_no_ext[-4:] == '.tar':
name_no_ext = name_no_ext[:-4] name_no_ext = name_no_ext[:-4]
name_parts = name_no_ext.split('_') name_parts = name_no_ext.split('_')
if len(name_parts) < 4 or name_parts[0] != 'cef' or name_parts[1] != 'binary': if len(
name_parts) < 4 or name_parts[0] != 'cef' or name_parts[1] != 'binary':
raise Exception('Invalid filename: %s' % name) raise Exception('Invalid filename: %s' % name)
# Remove 'cef' and 'binary'. # Remove 'cef' and 'binary'.
@ -356,7 +375,7 @@ class cef_json_builder:
self._validate_args(platform, version, type, size, last_modified, sha1) self._validate_args(platform, version, type, size, last_modified, sha1)
# Find the existing version record. # Find the existing version record.
version_idx = -1; version_idx = -1
for i in range(0, len(self._data[platform]['versions'])): for i in range(0, len(self._data[platform]['versions'])):
if self._data[platform]['versions'][i]['cef_version'] == version: if self._data[platform]['versions'][i]['cef_version'] == version:
# Check the version record. # Check the version record.
@ -368,17 +387,20 @@ class cef_json_builder:
# Add a new version record. # Add a new version record.
self._print('add_file: Add %s %s' % (platform, version)) self._print('add_file: Add %s %s' % (platform, version))
self._data[platform]['versions'].append({ self._data[platform]['versions'].append({
'cef_version': version, 'cef_version': version,
'chromium_version': self.get_chromium_version(version), 'chromium_version': self.get_chromium_version(version),
'files': [] 'files': []
}) })
version_idx = len(self._data[platform]['versions']) - 1 version_idx = len(self._data[platform]['versions']) - 1
# Find the existing file record with matching type. # Find the existing file record with matching type.
file_changed = True file_changed = True
for i in range(0, len(self._data[platform]['versions'][version_idx]['files'])): for i in range(0,
if self._data[platform]['versions'][version_idx]['files'][i]['type'] == type: len(self._data[platform]['versions'][version_idx]['files'])):
existing_sha1 = self._data[platform]['versions'][version_idx]['files'][i]['sha1'] if self._data[platform]['versions'][version_idx]['files'][i][
'type'] == type:
existing_sha1 = self._data[platform]['versions'][version_idx]['files'][
i]['sha1']
if existing_sha1 != sha1: if existing_sha1 != sha1:
# Remove the existing file record. # Remove the existing file record.
self._print(' Remove %s %s' % (name, existing_sha1)) self._print(' Remove %s %s' % (name, existing_sha1))
@ -391,11 +413,11 @@ class cef_json_builder:
# Add a new file record. # Add a new file record.
self._print(' Add %s %s' % (name, sha1)) self._print(' Add %s %s' % (name, sha1))
self._data[platform]['versions'][version_idx]['files'].append({ self._data[platform]['versions'][version_idx]['files'].append({
'type': type, 'type': type,
'name': name, 'name': name,
'size': size, 'size': size,
'last_modified': last_modified, 'last_modified': last_modified,
'sha1': sha1 'sha1': sha1
}) })
# Sort file records by last_modified. # Sort file records by last_modified.
@ -420,7 +442,7 @@ class cef_json_builder:
if version is None or version_obj['cef_version'].find(version) == 0: if version is None or version_obj['cef_version'].find(version) == 0:
for file_obj in version_obj['files']: for file_obj in version_obj['files']:
if type is None or type == file_obj['type']: if type is None or type == file_obj['type']:
result_obj = file_obj; result_obj = file_obj
# Add additional metadata. # Add additional metadata.
result_obj['platform'] = platform result_obj['platform'] = platform
result_obj['cef_version'] = version_obj['cef_version'] result_obj['cef_version'] = version_obj['cef_version']

View File

@ -23,23 +23,32 @@ import random
import string import string
import sys import sys
# Create a fake sha1 checksum value. # Create a fake sha1 checksum value.
def make_fake_sha1(): def make_fake_sha1():
return ''.join(random.SystemRandom().choice('abcdef' + string.digits) for _ in range(40)) return ''.join(random.SystemRandom().choice('abcdef' + string.digits)
for _ in range(40))
# Create a fake file size value. # Create a fake file size value.
def make_fake_size(): def make_fake_size():
return random.randint(30000000, 60000000) return random.randint(30000000, 60000000)
# Create fake file info based on |platform| and |version|. # Create fake file info based on |platform| and |version|.
def make_fake_file_info(platform, version, type): def make_fake_file_info(platform, version, type):
return { return {
'name': cef_json_builder.get_file_name(version, platform, type) + '.tar.gz', 'name':
'size': make_fake_size(), cef_json_builder.get_file_name(version, platform, type) + '.tar.gz',
'lastModified': datetime.datetime.now(), 'size':
'sha1': make_fake_sha1() make_fake_size(),
'lastModified':
datetime.datetime.now(),
'sha1':
make_fake_sha1()
} }
# Returns a list of fake files based on |platform| and |version|. # Returns a list of fake files based on |platform| and |version|.
def create_fake_files(platform, version): def create_fake_files(platform, version):
files = [] files = []
@ -61,7 +70,8 @@ def create_fake_files(platform, version):
if __name__ == '__main__': if __name__ == '__main__':
# Verify command-line arguments. # Verify command-line arguments.
if len(sys.argv) < 5 or sys.argv[1] != 'add': if len(sys.argv) < 5 or sys.argv[1] != 'add':
sys.stderr.write('Usage: %s add <platform> <cef_version> <chromium_version>' % sys.argv[0]) sys.stderr.write('Usage: %s add <platform> <cef_version> <chromium_version>'
% sys.argv[0])
sys.exit() sys.exit()
# Requested platform. # Requested platform.
@ -124,7 +134,8 @@ if __name__ == '__main__':
# Add new files to the builder. # Add new files to the builder.
changed_files = [] changed_files = []
for file in new_files: for file in new_files:
if builder.add_file(file['name'], file['size'], file['lastModified'], file['sha1']): if builder.add_file(file['name'], file['size'], file['lastModified'],
file['sha1']):
changed_files.append(file) changed_files.append(file)
if len(changed_files) > 0: if len(changed_files) > 0:

View File

@ -6,6 +6,7 @@ from cef_json_builder import cef_json_builder
import datetime import datetime
import unittest import unittest
class TestCefJSONBuilder(unittest.TestCase): class TestCefJSONBuilder(unittest.TestCase):
# Write builder contents to string and then read in. # Write builder contents to string and then read in.
@ -16,33 +17,35 @@ class TestCefJSONBuilder(unittest.TestCase):
self.assertEqual(output, str(builder2)) self.assertEqual(output, str(builder2))
# Add a file record for testing purposes. # Add a file record for testing purposes.
def _add_test_file(self, builder, platform='linux32', version='3.2704.1414.g185cd6c', def _add_test_file(self,
type='standard', attrib_idx=0, shouldfail=False): builder,
platform='linux32',
version='3.2704.1414.g185cd6c',
type='standard',
attrib_idx=0,
shouldfail=False):
name = cef_json_builder.get_file_name(version, platform, type) + '.tar.gz' name = cef_json_builder.get_file_name(version, platform, type) + '.tar.gz'
# Some random attribute information. sha1 must be different to trigger replacement. # Some random attribute information. sha1 must be different to trigger replacement.
attribs = [ attribs = [{
{
'date_str': '2016-05-18T22:42:15.487Z', 'date_str': '2016-05-18T22:42:15.487Z',
'date_val': datetime.datetime(2016, 5, 18, 22, 42, 15, 487000), 'date_val': datetime.datetime(2016, 5, 18, 22, 42, 15, 487000),
'sha1': '2d48ee05ea6385c8fe80879c98c5dd505ad4b100', 'sha1': '2d48ee05ea6385c8fe80879c98c5dd505ad4b100',
'size': 48395610 'size': 48395610
}, }, {
{
'date_str': '2016-05-14T22:42:15.487Z', 'date_str': '2016-05-14T22:42:15.487Z',
'date_val': datetime.datetime(2016, 5, 14, 22, 42, 15, 487000), 'date_val': datetime.datetime(2016, 5, 14, 22, 42, 15, 487000),
'sha1': '2d48ee05ea6385c8fe80879c98c5dd505ad4b200', 'sha1': '2d48ee05ea6385c8fe80879c98c5dd505ad4b200',
'size': 48395620 'size': 48395620
} }]
]
# Populate the Chromium version to avoid queries. # Populate the Chromium version to avoid queries.
chromium_version = '49.0.2705.50' chromium_version = '49.0.2705.50'
self.assertEqual(chromium_version, builder.set_chromium_version(version, chromium_version)) self.assertEqual(chromium_version,
builder.set_chromium_version(version, chromium_version))
self.assertEqual(0, builder.get_query_count()) self.assertEqual(0, builder.get_query_count())
result = builder.add_file(name, result = builder.add_file(name, attribs[attrib_idx]['size'],
attribs[attrib_idx]['size'],
attribs[attrib_idx]['date_str'], attribs[attrib_idx]['date_str'],
attribs[attrib_idx]['sha1']) attribs[attrib_idx]['sha1'])
# Failure should be expected when adding the same file multiple times with the same sha1. # Failure should be expected when adding the same file multiple times with the same sha1.
@ -50,14 +53,14 @@ class TestCefJSONBuilder(unittest.TestCase):
# Return the result expected from get_files(). # Return the result expected from get_files().
return { return {
'chromium_version': chromium_version, 'chromium_version': chromium_version,
'sha1': attribs[attrib_idx]['sha1'], 'sha1': attribs[attrib_idx]['sha1'],
'name': name, 'name': name,
'platform': platform, 'platform': platform,
'last_modified': attribs[attrib_idx]['date_val'], 'last_modified': attribs[attrib_idx]['date_val'],
'cef_version': version, 'cef_version': version,
'type': type, 'type': type,
'size': attribs[attrib_idx]['size'] 'size': attribs[attrib_idx]['size']
} }
# Test with no file contents. # Test with no file contents.
@ -105,8 +108,11 @@ class TestCefJSONBuilder(unittest.TestCase):
builder = cef_json_builder() builder = cef_json_builder()
# Specify all values just in case the defaults change. # Specify all values just in case the defaults change.
expected = self._add_test_file(builder, expected = self._add_test_file(
platform='linux32', version='3.2704.1414.g185cd6c', type='standard') builder,
platform='linux32',
version='3.2704.1414.g185cd6c',
type='standard')
# No filter. # No filter.
files = builder.get_files() files = builder.get_files()
@ -140,14 +146,18 @@ class TestCefJSONBuilder(unittest.TestCase):
self.assertEqual(len(files), 0) self.assertEqual(len(files), 0)
# All filters. # All filters.
files = builder.get_files(platform='linux32', version='3.2704', type='standard') files = builder.get_files(
platform='linux32', version='3.2704', type='standard')
self.assertEqual(len(files), 1) self.assertEqual(len(files), 1)
self.assertEqual(expected, files[0]) self.assertEqual(expected, files[0])
files = builder.get_files(platform='linux32', version='3.2704', type='minimal') files = builder.get_files(
platform='linux32', version='3.2704', type='minimal')
self.assertEqual(len(files), 0) self.assertEqual(len(files), 0)
files = builder.get_files(platform='linux32', version='3.2623', type='standard') files = builder.get_files(
platform='linux32', version='3.2623', type='standard')
self.assertEqual(len(files), 0) self.assertEqual(len(files), 0)
files = builder.get_files(platform='linux64', version='3.2704', type='standard') files = builder.get_files(
platform='linux64', version='3.2704', type='standard')
self.assertEqual(len(files), 0) self.assertEqual(len(files), 0)
# Test add/get of multiple files. # Test add/get of multiple files.
@ -162,7 +172,9 @@ class TestCefJSONBuilder(unittest.TestCase):
for platform in platforms: for platform in platforms:
for version in versions: for version in versions:
for type in types: for type in types:
expected.append(self._add_test_file(builder, platform=platform, type=type, version=version)) expected.append(
self._add_test_file(
builder, platform=platform, type=type, version=version))
self._verify_write_read(builder) self._verify_write_read(builder)
@ -187,7 +199,8 @@ class TestCefJSONBuilder(unittest.TestCase):
for platform in platforms: for platform in platforms:
for version in versions: for version in versions:
for type in types: for type in types:
files = builder.get_files(platform=platform, type=type, version=version) files = builder.get_files(
platform=platform, type=type, version=version)
self.assertEqual(len(files), 1) self.assertEqual(len(files), 1)
self.assertEqual(expected[idx], files[0]) self.assertEqual(expected[idx], files[0])
idx = idx + 1 idx = idx + 1
@ -203,7 +216,8 @@ class TestCefJSONBuilder(unittest.TestCase):
# Initial file versions. # Initial file versions.
for platform in platforms: for platform in platforms:
for type in types: for type in types:
self._add_test_file(builder, platform=platform, type=type, version=version) self._add_test_file(
builder, platform=platform, type=type, version=version)
# No filter. # No filter.
files = builder.get_files() files = builder.get_files()
@ -214,8 +228,13 @@ class TestCefJSONBuilder(unittest.TestCase):
# Replace all file versions (due to new sha1). # Replace all file versions (due to new sha1).
for platform in platforms: for platform in platforms:
for type in types: for type in types:
expected.append(self._add_test_file(builder, expected.append(
platform=platform, type=type, version=version, attrib_idx=1)) self._add_test_file(
builder,
platform=platform,
type=type,
version=version,
attrib_idx=1))
# No filter. # No filter.
files = builder.get_files() files = builder.get_files()
@ -241,7 +260,8 @@ class TestCefJSONBuilder(unittest.TestCase):
# Initial file versions. # Initial file versions.
for platform in platforms: for platform in platforms:
for type in types: for type in types:
self._add_test_file(builder, platform=platform, type=type, version=version) self._add_test_file(
builder, platform=platform, type=type, version=version)
# No filter. # No filter.
files = builder.get_files() files = builder.get_files()
@ -252,8 +272,13 @@ class TestCefJSONBuilder(unittest.TestCase):
# Replace no file versions (due to same sha1). # Replace no file versions (due to same sha1).
for platform in platforms: for platform in platforms:
for type in types: for type in types:
expected.append(self._add_test_file(builder, expected.append(
platform=platform, type=type, version=version, shouldfail=True)) self._add_test_file(
builder,
platform=platform,
type=type,
version=version,
shouldfail=True))
# No filter. # No filter.
files = builder.get_files() files = builder.get_files()
@ -283,12 +308,10 @@ class TestCefJSONBuilder(unittest.TestCase):
self.assertFalse(builder.is_valid_chromium_version('foobar')) self.assertFalse(builder.is_valid_chromium_version('foobar'))
# The Git hashes must exist but the rest of the CEF version can be fake. # The Git hashes must exist but the rest of the CEF version can be fake.
versions = ( versions = (('3.2704.1414.g185cd6c',
('3.2704.1414.g185cd6c', '51.0.2704.47'), '51.0.2704.47'), ('3.2623.9999.gb90a3be', '49.0.2623.110'),
('3.2623.9999.gb90a3be', '49.0.2623.110'), ('3.2623.9999.g2a6491b',
('3.2623.9999.g2a6491b', '49.0.2623.87'), '49.0.2623.87'), ('3.9999.9999.gab2636b', 'master'),)
('3.9999.9999.gab2636b', 'master'),
)
# Test with no query. # Test with no query.
for (cef, chromium) in versions: for (cef, chromium) in versions:

View File

@ -16,6 +16,7 @@ if sys.platform == 'win32':
else: else:
clang_format_exe = 'clang-format' clang_format_exe = 'clang-format'
def clang_format(file_name, file_contents): def clang_format(file_name, file_contents):
# -assume-filename is necessary to find the .clang-format file and determine # -assume-filename is necessary to find the .clang-format file and determine
# the language when specifying contents via stdin. # the language when specifying contents via stdin.

View File

@ -4,7 +4,6 @@
# found in the LICENSE file. # found in the LICENSE file.
# TODO(slightlyoff): move to using shared version of this script. # TODO(slightlyoff): move to using shared version of this script.
'''This script makes it easy to combine libs and object files to a new lib, '''This script makes it easy to combine libs and object files to a new lib,
optionally removing some of the object files in the input libs by regular optionally removing some of the object files in the input libs by regular
expression matching. expression matching.
@ -19,10 +18,8 @@ import sys
def Shell(*args): def Shell(*args):
'''Runs the program and args in args, returns the output from the program.''' '''Runs the program and args in args, returns the output from the program.'''
process = subprocess.Popen(args, process = subprocess.Popen(
stdin = None, args, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = subprocess.PIPE,
stderr = subprocess.STDOUT)
output = process.stdout.readlines() output = process.stdout.readlines()
process.wait() process.wait()
retcode = process.returncode retcode = process.returncode
@ -69,13 +66,17 @@ any object file (in the input libraries) that matches a given regular
expression. expression.
''' '''
def GetOptionParser(): def GetOptionParser():
parser = optparse.OptionParser(USAGE) parser = optparse.OptionParser(USAGE)
parser.add_option('-o', '--output', dest = 'output', parser.add_option(
help = 'write to this output library') '-o', '--output', dest='output', help='write to this output library')
parser.add_option('-r', '--remove', dest = 'remove', parser.add_option(
help = 'object files matching this regexp will be removed ' '-r',
'from the output library') '--remove',
dest='remove',
help='object files matching this regexp will be removed '
'from the output library')
return parser return parser

View File

@ -8,8 +8,8 @@ import sys
# cannot be loaded as a module # cannot be loaded as a module
if __name__ != "__main__": if __name__ != "__main__":
sys.stderr.write('This file cannot be loaded as a module!') sys.stderr.write('This file cannot be loaded as a module!')
sys.exit() sys.exit()
if len(sys.argv) < 2: if len(sys.argv) < 2:
raise Exception('Path expected on command-line') raise Exception('Path expected on command-line')

View File

@ -2,7 +2,6 @@
# Copyright 2017 The Chromium Embedded Framework Authors. All rights reserved. # Copyright 2017 The Chromium Embedded Framework Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be found # Use of this source code is governed by a BSD-style license that can be found
# in the LICENSE file. # in the LICENSE file.
""" """
This script implements a simple HTTP server for receiving crash report uploads This script implements a simple HTTP server for receiving crash report uploads
from a Breakpad/Crashpad client (any CEF-based application). This script is from a Breakpad/Crashpad client (any CEF-based application). This script is
@ -120,16 +119,20 @@ import sys
import uuid import uuid
import zlib import zlib
def print_msg(msg): def print_msg(msg):
""" Write |msg| to stdout and flush. """ """ Write |msg| to stdout and flush. """
timestr = datetime.datetime.now().strftime("%m/%d/%Y %H:%M:%S") timestr = datetime.datetime.now().strftime("%m/%d/%Y %H:%M:%S")
sys.stdout.write("%s: %s\n" % (timestr, msg)) sys.stdout.write("%s: %s\n" % (timestr, msg))
sys.stdout.flush() sys.stdout.flush()
# Key identifying the minidump file. # Key identifying the minidump file.
minidump_key = 'upload_file_minidump' minidump_key = 'upload_file_minidump'
class CrashHTTPRequestHandler(BaseHTTPRequestHandler): class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
def __init__(self, dump_directory, *args): def __init__(self, dump_directory, *args):
self._dump_directory = dump_directory self._dump_directory = dump_directory
BaseHTTPRequestHandler.__init__(self, *args) BaseHTTPRequestHandler.__init__(self, *args)
@ -146,12 +149,12 @@ class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
if self.command != 'POST': if self.command != 'POST':
return None return None
return cgi.FieldStorage( return cgi.FieldStorage(
fp = cStringIO.StringIO(data), fp=cStringIO.StringIO(data),
headers = self.headers, headers=self.headers,
environ = { environ={
'REQUEST_METHOD': 'POST', 'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type'], 'CONTENT_TYPE': self.headers['Content-Type'],
}) })
def _get_chunk_size(self): def _get_chunk_size(self):
# Read to the next "\r\n". # Read to the next "\r\n".
@ -173,7 +176,7 @@ class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
def _unchunk_request(self, compressed): def _unchunk_request(self, compressed):
""" Read a chunked request body. Optionally decompress the result. """ """ Read a chunked request body. Optionally decompress the result. """
if compressed: if compressed:
d = zlib.decompressobj(16+zlib.MAX_WBITS) d = zlib.decompressobj(16 + zlib.MAX_WBITS)
# Chunked format is: <size>\r\n<bytes>\r\n<size>\r\n<bytes>\r\n0\r\n # Chunked format is: <size>\r\n<bytes>\r\n<size>\r\n<bytes>\r\n0\r\n
unchunked = b"" unchunked = b""
@ -251,13 +254,14 @@ class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
if chunked: if chunked:
request_body = self._unchunk_request(compressed) request_body = self._unchunk_request(compressed)
else: else:
content_length = int(self.headers['Content-Length']) if 'Content-Length' in self.headers else 0 content_length = int(self.headers[
'Content-Length']) if 'Content-Length' in self.headers else 0
if content_length > 0: if content_length > 0:
request_body = self.rfile.read(content_length) request_body = self.rfile.read(content_length)
else: else:
request_body = self.rfile.read() request_body = self.rfile.read()
if compressed: if compressed:
request_body = zlib.decompress(request_body, 16+zlib.MAX_WBITS) request_body = zlib.decompress(request_body, 16 + zlib.MAX_WBITS)
# Parse the multi-part request. # Parse the multi-part request.
form_data = self._parse_post_data(request_body) form_data = self._parse_post_data(request_body)
@ -284,15 +288,18 @@ class CrashHTTPRequestHandler(BaseHTTPRequestHandler):
with open(meta_file, 'w') as fp: with open(meta_file, 'w') as fp:
json.dump(metadata, fp) json.dump(metadata, fp)
def HandleRequestsUsing(dump_store): def HandleRequestsUsing(dump_store):
return lambda *args: CrashHTTPRequestHandler(dump_directory, *args) return lambda *args: CrashHTTPRequestHandler(dump_directory, *args)
def RunCrashServer(port, dump_directory): def RunCrashServer(port, dump_directory):
""" Run the crash handler HTTP server. """ """ Run the crash handler HTTP server. """
httpd = HTTPServer(('', port), HandleRequestsUsing(dump_directory)) httpd = HTTPServer(('', port), HandleRequestsUsing(dump_directory))
print_msg('Starting httpd on port %d' % port) print_msg('Starting httpd on port %d' % port)
httpd.serve_forever() httpd.serve_forever()
# Program entry point. # Program entry point.
if __name__ == "__main__": if __name__ == "__main__":
if len(sys.argv) != 3: if len(sys.argv) != 3:
@ -307,4 +314,3 @@ if __name__ == "__main__":
raise Exception('Directory does not exist: %s' % dump_directory) raise Exception('Directory does not exist: %s' % dump_directory)
RunCrashServer(int(sys.argv[1]), dump_directory) RunCrashServer(int(sys.argv[1]), dump_directory)

View File

@ -4,10 +4,12 @@
import datetime import datetime
def get_year(): def get_year():
""" Returns the current year. """ """ Returns the current year. """
return str(datetime.datetime.now().year) return str(datetime.datetime.now().year)
def get_date(): def get_date():
""" Returns the current date. """ """ Returns the current date. """
return datetime.datetime.now().strftime('%B %d, %Y') return datetime.datetime.now().strftime('%B %d, %Y')

View File

@ -5,6 +5,7 @@
from subprocess import Popen, PIPE from subprocess import Popen, PIPE
import sys import sys
def exec_cmd(cmd, path, input_string=None): def exec_cmd(cmd, path, input_string=None):
""" Execute the specified command and return the result. """ """ Execute the specified command and return the result. """
out = '' out = ''
@ -12,12 +13,21 @@ def exec_cmd(cmd, path, input_string=None):
parts = cmd.split() parts = cmd.split()
try: try:
if input_string is None: if input_string is None:
process = Popen(parts, cwd=path, stdout=PIPE, stderr=PIPE, process = Popen(
shell=(sys.platform == 'win32')) parts,
cwd=path,
stdout=PIPE,
stderr=PIPE,
shell=(sys.platform == 'win32'))
out, err = process.communicate() out, err = process.communicate()
else: else:
process = Popen(parts, cwd=path, stdin=PIPE, stdout=PIPE, stderr=PIPE, process = Popen(
shell=(sys.platform == 'win32')) parts,
cwd=path,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
shell=(sys.platform == 'win32'))
out, err = process.communicate(input=input_string) out, err = process.communicate(input=input_string)
except IOError, (errno, strerror): except IOError, (errno, strerror):
raise raise

View File

@ -8,134 +8,152 @@ import shutil
import sys import sys
import time import time
def read_file(name, normalize = True):
""" Read a file. """ def read_file(name, normalize=True):
try: """ Read a file. """
f = open(name, 'r') try:
# read the data f = open(name, 'r')
data = f.read() # read the data
if normalize: data = f.read()
# normalize line endings if normalize:
data = data.replace("\r\n", "\n") # normalize line endings
return data data = data.replace("\r\n", "\n")
except IOError, (errno, strerror): return data
sys.stderr.write('Failed to read file '+name+': '+strerror) except IOError, (errno, strerror):
raise sys.stderr.write('Failed to read file ' + name + ': ' + strerror)
else: raise
f.close() else:
f.close()
def write_file(name, data): def write_file(name, data):
""" Write a file. """ """ Write a file. """
try: try:
f = open(name, 'w') f = open(name, 'w')
# write the data # write the data
f.write(data) f.write(data)
except IOError, (errno, strerror): except IOError, (errno, strerror):
sys.stderr.write('Failed to write file '+name+': '+strerror) sys.stderr.write('Failed to write file ' + name + ': ' + strerror)
raise raise
else: else:
f.close() f.close()
def path_exists(name): def path_exists(name):
""" Returns true if the path currently exists. """ """ Returns true if the path currently exists. """
return os.path.exists(name) return os.path.exists(name)
def backup_file(name): def backup_file(name):
""" Rename the file to a name that includes the current time stamp. """ """ Rename the file to a name that includes the current time stamp. """
move_file(name, name+'.'+time.strftime('%Y-%m-%d-%H-%M-%S')) move_file(name, name + '.' + time.strftime('%Y-%m-%d-%H-%M-%S'))
def copy_file(src, dst, quiet = True):
""" Copy a file. """
try:
shutil.copy(src, dst)
if not quiet:
sys.stdout.write('Transferring '+src+' file.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to copy file from '+src+' to '+dst+': '+strerror)
raise
def move_file(src, dst, quiet = True): def copy_file(src, dst, quiet=True):
""" Move a file. """ """ Copy a file. """
try: try:
shutil.move(src, dst) shutil.copy(src, dst)
if not quiet: if not quiet:
sys.stdout.write('Moving '+src+' file.\n') sys.stdout.write('Transferring ' + src + ' file.\n')
except IOError, (errno, strerror): except IOError, (errno, strerror):
sys.stderr.write('Failed to move file from '+src+' to '+dst+': '+strerror) sys.stderr.write('Failed to copy file from ' + src + ' to ' + dst + ': ' +
raise strerror)
raise
def copy_files(src_glob, dst_folder, quiet = True):
""" Copy multiple files. """
for fname in iglob(src_glob):
dst = os.path.join(dst_folder, os.path.basename(fname))
if os.path.isdir(fname):
copy_dir(fname, dst, quiet)
else:
copy_file(fname, dst, quiet)
def remove_file(name, quiet = True): def move_file(src, dst, quiet=True):
""" Remove the specified file. """ """ Move a file. """
try: try:
if path_exists(name): shutil.move(src, dst)
os.remove(name) if not quiet:
if not quiet: sys.stdout.write('Moving ' + src + ' file.\n')
sys.stdout.write('Removing '+name+' file.\n') except IOError, (errno, strerror):
except IOError, (errno, strerror): sys.stderr.write('Failed to move file from ' + src + ' to ' + dst + ': ' +
sys.stderr.write('Failed to remove file '+name+': '+strerror) strerror)
raise raise
def copy_dir(src, dst, quiet = True):
""" Copy a directory tree. """
try:
remove_dir(dst, quiet)
shutil.copytree(src, dst)
if not quiet:
sys.stdout.write('Transferring '+src+' directory.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to copy directory from '+src+' to '+dst+': '+strerror)
raise
def remove_dir(name, quiet = True): def copy_files(src_glob, dst_folder, quiet=True):
""" Remove the specified directory. """ """ Copy multiple files. """
try: for fname in iglob(src_glob):
if path_exists(name): dst = os.path.join(dst_folder, os.path.basename(fname))
shutil.rmtree(name) if os.path.isdir(fname):
if not quiet: copy_dir(fname, dst, quiet)
sys.stdout.write('Removing '+name+' directory.\n') else:
except IOError, (errno, strerror): copy_file(fname, dst, quiet)
sys.stderr.write('Failed to remove directory '+name+': '+strerror)
raise
def remove_file(name, quiet=True):
""" Remove the specified file. """
try:
if path_exists(name):
os.remove(name)
if not quiet:
sys.stdout.write('Removing ' + name + ' file.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to remove file ' + name + ': ' + strerror)
raise
def copy_dir(src, dst, quiet=True):
""" Copy a directory tree. """
try:
remove_dir(dst, quiet)
shutil.copytree(src, dst)
if not quiet:
sys.stdout.write('Transferring ' + src + ' directory.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to copy directory from ' + src + ' to ' + dst +
': ' + strerror)
raise
def remove_dir(name, quiet=True):
""" Remove the specified directory. """
try:
if path_exists(name):
shutil.rmtree(name)
if not quiet:
sys.stdout.write('Removing ' + name + ' directory.\n')
except IOError, (errno, strerror):
sys.stderr.write('Failed to remove directory ' + name + ': ' + strerror)
raise
def make_dir(name, quiet=True):
""" Create the specified directory. """
try:
if not path_exists(name):
if not quiet:
sys.stdout.write('Creating ' + name + ' directory.\n')
os.makedirs(name)
except IOError, (errno, strerror):
sys.stderr.write('Failed to create directory ' + name + ': ' + strerror)
raise
def make_dir(name, quiet = True):
""" Create the specified directory. """
try:
if not path_exists(name):
if not quiet:
sys.stdout.write('Creating '+name+' directory.\n')
os.makedirs(name)
except IOError, (errno, strerror):
sys.stderr.write('Failed to create directory '+name+': '+strerror)
raise
def get_files(search_glob): def get_files(search_glob):
""" Returns all files matching the search glob. """ """ Returns all files matching the search glob. """
# Sort the result for consistency across platforms. # Sort the result for consistency across platforms.
return sorted(iglob(search_glob)) return sorted(iglob(search_glob))
def read_version_file(file, args): def read_version_file(file, args):
""" Read and parse a version file (key=value pairs, one per line). """ """ Read and parse a version file (key=value pairs, one per line). """
lines = read_file(file).split("\n") lines = read_file(file).split("\n")
for line in lines: for line in lines:
parts = line.split('=', 1) parts = line.split('=', 1)
if len(parts) == 2: if len(parts) == 2:
args[parts[0]] = parts[1] args[parts[0]] = parts[1]
def eval_file(src): def eval_file(src):
""" Loads and evaluates the contents of the specified file. """ """ Loads and evaluates the contents of the specified file. """
return eval(read_file(src), {'__builtins__': None}, None) return eval(read_file(src), {'__builtins__': None}, None)
def normalize_path(path): def normalize_path(path):
""" Normalizes the path separator to match the Unix standard. """ """ Normalizes the path separator to match the Unix standard. """
if sys.platform == 'win32': if sys.platform == 'win32':
return path.replace('\\', '/') return path.replace('\\', '/')
return path return path

View File

@ -25,17 +25,16 @@ else:
print 'Unknown operating system platform' print 'Unknown operating system platform'
sys.exit() sys.exit()
print "\nGenerating CEF version header file..." print "\nGenerating CEF version header file..."
cmd = [ 'python', 'tools/make_version_header.py', cmd = [
'--header', 'include/cef_version.h', 'python', 'tools/make_version_header.py', '--header',
'--cef_version', 'VERSION', 'include/cef_version.h', '--cef_version', 'VERSION', '--chrome_version',
'--chrome_version', '../chrome/VERSION', '../chrome/VERSION', '--cpp_header_dir', 'include'
'--cpp_header_dir', 'include' ] ]
RunAction(cef_dir, cmd) RunAction(cef_dir, cmd)
print "\nPatching build configuration and source files for CEF..." print "\nPatching build configuration and source files for CEF..."
cmd = [ 'python', 'tools/patcher.py' ] cmd = ['python', 'tools/patcher.py']
RunAction(cef_dir, cmd) RunAction(cef_dir, cmd)
print "\nGenerating CEF project files..." print "\nGenerating CEF project files..."
@ -99,14 +98,14 @@ if platform == 'windows':
# #
if bool(int(os.environ.get('WIN_CUSTOM_TOOLCHAIN', '0'))): if bool(int(os.environ.get('WIN_CUSTOM_TOOLCHAIN', '0'))):
required_vars = [ required_vars = [
'CEF_VCVARS', 'CEF_VCVARS',
'GYP_MSVS_OVERRIDE_PATH', 'GYP_MSVS_OVERRIDE_PATH',
'GYP_MSVS_VERSION', 'GYP_MSVS_VERSION',
'VS_CRT_ROOT', 'VS_CRT_ROOT',
'SDK_ROOT', 'SDK_ROOT',
'INCLUDE', 'INCLUDE',
'LIB', 'LIB',
'PATH', 'PATH',
] ]
for var in required_vars: for var in required_vars:
if not var in os.environ.keys(): if not var in os.environ.keys():
@ -128,7 +127,7 @@ for dir, config in configs.items():
write_file(args_gn_path, args_gn_contents) write_file(args_gn_path, args_gn_contents)
# Generate the Ninja config. # Generate the Ninja config.
cmd = [ 'gn', 'gen', os.path.join('out', dir) ] cmd = ['gn', 'gen', os.path.join('out', dir)]
if 'GN_ARGUMENTS' in os.environ.keys(): if 'GN_ARGUMENTS' in os.environ.keys():
cmd.extend(os.environ['GN_ARGUMENTS'].split(' ')) cmd.extend(os.environ['GN_ARGUMENTS'].split(' '))
RunAction(src_dir, cmd) RunAction(src_dir, cmd)

View File

@ -10,7 +10,7 @@ try:
import gclient_utils import gclient_utils
except ImportError, e: except ImportError, e:
# Search the PATH environment variable to find the depot_tools folder. # Search the PATH environment variable to find the depot_tools folder.
depot_tools = None; depot_tools = None
paths = os.environ.get('PATH').split(os.pathsep) paths = os.environ.get('PATH').split(os.pathsep)
for path in paths: for path in paths:
if os.path.exists(os.path.join(path, 'gclient_utils.py')): if os.path.exists(os.path.join(path, 'gclient_utils.py')):
@ -25,6 +25,7 @@ except ImportError, e:
sys.path.append(depot_tools) sys.path.append(depot_tools)
import gclient_utils import gclient_utils
# Copied from gclient.py python code. # Copied from gclient.py python code.
def RunAction(dir, command): def RunAction(dir, command):
"""Runs the action.""" """Runs the action."""
@ -35,8 +36,7 @@ def RunAction(dir, command):
command[0] = sys.executable command[0] = sys.executable
try: try:
gclient_utils.CheckCallAndFilterAndHeader( gclient_utils.CheckCallAndFilterAndHeader(command, cwd=dir, always=True)
command, cwd=dir, always=True)
except gclient_utils.Error, e: except gclient_utils.Error, e:
# Use a discrete exit status code of 2 to indicate that a hook action # Use a discrete exit status code of 2 to indicate that a hook action
# failed. Users of this script may wish to treat hook action failures # failed. Users of this script may wish to treat hook action failures

View File

@ -12,11 +12,13 @@ if sys.platform == 'win32':
else: else:
git_exe = 'git' git_exe = 'git'
def is_checkout(path): def is_checkout(path):
""" Returns true if the path represents a git checkout. """ """ Returns true if the path represents a git checkout. """
return os.path.isdir(os.path.join(path, '.git')) return os.path.isdir(os.path.join(path, '.git'))
def get_hash(path = '.', branch = 'HEAD'):
def get_hash(path='.', branch='HEAD'):
""" Returns the git hash for the specified branch/tag/hash. """ """ Returns the git hash for the specified branch/tag/hash. """
cmd = "%s rev-parse %s" % (git_exe, branch) cmd = "%s rev-parse %s" % (git_exe, branch)
result = exec_cmd(cmd, path) result = exec_cmd(cmd, path)
@ -24,7 +26,8 @@ def get_hash(path = '.', branch = 'HEAD'):
return result['out'].strip() return result['out'].strip()
return 'Unknown' return 'Unknown'
def get_url(path = '.'):
def get_url(path='.'):
""" Returns the origin url for the specified path. """ """ Returns the origin url for the specified path. """
cmd = "%s config --get remote.origin.url" % git_exe cmd = "%s config --get remote.origin.url" % git_exe
result = exec_cmd(cmd, path) result = exec_cmd(cmd, path)
@ -32,7 +35,8 @@ def get_url(path = '.'):
return result['out'].strip() return result['out'].strip()
return 'Unknown' return 'Unknown'
def get_commit_number(path = '.', branch = 'HEAD'):
def get_commit_number(path='.', branch='HEAD'):
""" Returns the number of commits in the specified branch/tag/hash. """ """ Returns the number of commits in the specified branch/tag/hash. """
cmd = "%s rev-list --count %s" % (git_exe, branch) cmd = "%s rev-list --count %s" % (git_exe, branch)
result = exec_cmd(cmd, path) result = exec_cmd(cmd, path)
@ -40,6 +44,7 @@ def get_commit_number(path = '.', branch = 'HEAD'):
return result['out'].strip() return result['out'].strip()
return '0' return '0'
def get_changed_files(path, hash): def get_changed_files(path, hash):
""" Retrieves the list of changed files. """ """ Retrieves the list of changed files. """
if hash == 'unstaged': if hash == 'unstaged':
@ -57,6 +62,7 @@ def get_changed_files(path, hash):
return files.strip().split("\n") return files.strip().split("\n")
return [] return []
def write_indented_output(output): def write_indented_output(output):
""" Apply a fixed amount of intent to lines before printing. """ """ Apply a fixed amount of intent to lines before printing. """
if output == '': if output == '':
@ -67,6 +73,7 @@ def write_indented_output(output):
continue continue
sys.stdout.write('\t%s\n' % line) sys.stdout.write('\t%s\n' % line)
def git_apply_patch_file(patch_path, patch_dir): def git_apply_patch_file(patch_path, patch_dir):
""" Apply |patch_path| to files in |patch_dir|. """ """ Apply |patch_path| to files in |patch_dir|. """
patch_name = os.path.basename(patch_path) patch_name = os.path.basename(patch_path)

View File

@ -80,16 +80,18 @@ else:
print 'Unknown operating system platform' print 'Unknown operating system platform'
sys.exit() sys.exit()
def msg(msg): def msg(msg):
print 'NOTE: ' + msg print 'NOTE: ' + msg
def NameValueListToDict(name_value_list): def NameValueListToDict(name_value_list):
""" """
Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
of the pairs. If a string is simply NAME, then the value in the dictionary of the pairs. If a string is simply NAME, then the value in the dictionary
is set to True. If VALUE can be converted to a boolean or integer, it is. is set to True. If VALUE can be converted to a boolean or integer, it is.
""" """
result = { } result = {}
for item in name_value_list: for item in name_value_list:
tokens = item.split('=', 1) tokens = item.split('=', 1)
if len(tokens) == 2: if len(tokens) == 2:
@ -111,6 +113,7 @@ def NameValueListToDict(name_value_list):
result[tokens[0]] = True result[tokens[0]] = True
return result return result
def ShlexEnv(env_name): def ShlexEnv(env_name):
""" """
Split an environment variable using shell-like syntax. Split an environment variable using shell-like syntax.
@ -120,6 +123,7 @@ def ShlexEnv(env_name):
flags = shlex.split(flags) flags = shlex.split(flags)
return flags return flags
def MergeDicts(*dict_args): def MergeDicts(*dict_args):
""" """
Given any number of dicts, shallow copy and merge into a new dict. Given any number of dicts, shallow copy and merge into a new dict.
@ -130,6 +134,7 @@ def MergeDicts(*dict_args):
result.update(dictionary) result.update(dictionary)
return result return result
def GetValueString(val): def GetValueString(val):
""" """
Return the string representation of |val| expected by GN. Return the string representation of |val| expected by GN.
@ -143,6 +148,7 @@ def GetValueString(val):
return 'false' return 'false'
return val return val
def GetChromiumDefaultArgs(): def GetChromiumDefaultArgs():
""" """
Return default GN args. These must match the Chromium defaults. Return default GN args. These must match the Chromium defaults.
@ -152,11 +158,11 @@ def GetChromiumDefaultArgs():
# the defaults. # the defaults.
defaults = { defaults = {
'dcheck_always_on': False, 'dcheck_always_on': False,
'is_asan': False, 'is_asan': False,
'is_debug': True, 'is_debug': True,
'is_official_build': False, 'is_official_build': False,
'target_cpu': 'x64', 'target_cpu': 'x64',
} }
if platform == 'linux': if platform == 'linux':
@ -171,6 +177,7 @@ def GetChromiumDefaultArgs():
return defaults return defaults
def GetArgValue(args, key): def GetArgValue(args, key):
""" """
Return an existing GN arg value or the Chromium default. Return an existing GN arg value or the Chromium default.
@ -179,6 +186,7 @@ def GetArgValue(args, key):
assert key in defaults, "No default Chromium value specified for %s" % key assert key in defaults, "No default Chromium value specified for %s" % key
return args.get(key, defaults[key]) return args.get(key, defaults[key])
def GetRecommendedDefaultArgs(): def GetRecommendedDefaultArgs():
""" """
Return recommended default GN args that differ from Chromium defaults. Return recommended default GN args that differ from Chromium defaults.
@ -187,8 +195,8 @@ def GetRecommendedDefaultArgs():
# the defaults. # the defaults.
result = { result = {
# Enable NaCL. Default is true. False is recommended for faster builds. # Enable NaCL. Default is true. False is recommended for faster builds.
'enable_nacl': False, 'enable_nacl': False,
} }
if platform == 'linux': if platform == 'linux':
@ -209,27 +217,29 @@ def GetRecommendedDefaultArgs():
return result return result
def GetGNEnvArgs(): def GetGNEnvArgs():
""" """
Return GN args specified via the GN_DEFINES env variable. Return GN args specified via the GN_DEFINES env variable.
""" """
return NameValueListToDict(ShlexEnv('GN_DEFINES')) return NameValueListToDict(ShlexEnv('GN_DEFINES'))
def GetRequiredArgs(): def GetRequiredArgs():
""" """
Return required GN args. Also enforced by assert() in //cef/BUILD.gn. Return required GN args. Also enforced by assert() in //cef/BUILD.gn.
""" """
result = { result = {
# Set ENABLE_PRINTING=1 ENABLE_BASIC_PRINTING=1. # Set ENABLE_PRINTING=1 ENABLE_BASIC_PRINTING=1.
'enable_basic_printing': True, 'enable_basic_printing': True,
'enable_print_preview': False, 'enable_print_preview': False,
# Enable support for Widevine CDM. # Enable support for Widevine CDM.
'enable_widevine': True, 'enable_widevine': True,
# CEF does not currently support component builds. See # CEF does not currently support component builds. See
# https://bitbucket.org/chromiumembedded/cef/issues/1617 # https://bitbucket.org/chromiumembedded/cef/issues/1617
'is_component_build': False, 'is_component_build': False,
} }
if platform == 'linux' or platform == 'macosx': if platform == 'linux' or platform == 'macosx':
@ -243,6 +253,7 @@ def GetRequiredArgs():
return result return result
def GetMergedArgs(build_args): def GetMergedArgs(build_args):
""" """
Return merged GN args. Return merged GN args.
@ -258,6 +269,7 @@ def GetMergedArgs(build_args):
return MergeDicts(dict, required) return MergeDicts(dict, required)
def ValidateArgs(args): def ValidateArgs(args):
""" """
Validate GN arg combinations that we know about. Also provide suggestions Validate GN arg combinations that we know about. Also provide suggestions
@ -288,7 +300,8 @@ def ValidateArgs(args):
elif platform == 'windows': elif platform == 'windows':
assert target_cpu in ('x86', 'x64'), 'target_cpu must be "x86" or "x64"' assert target_cpu in ('x86', 'x64'), 'target_cpu must be "x86" or "x64"'
elif platform == 'linux': elif platform == 'linux':
assert target_cpu in ('x86', 'x64', 'arm'), 'target_cpu must be "x86", "x64" or "arm"' assert target_cpu in ('x86', 'x64',
'arm'), 'target_cpu must be "x86", "x64" or "arm"'
if platform == 'linux': if platform == 'linux':
if target_cpu == 'x86': if target_cpu == 'x86':
@ -313,7 +326,8 @@ def ValidateArgs(args):
# Non-official debug builds should use /DEBUG:FASTLINK. # Non-official debug builds should use /DEBUG:FASTLINK.
if not is_official_build and is_debug and not is_win_fastlink: if not is_official_build and is_debug and not is_win_fastlink:
msg('is_official_build=false + is_debug=true recommends is_win_fastlink=true') msg('is_official_build=false + is_debug=true recommends is_win_fastlink=true'
)
# Windows custom toolchain requirements. # Windows custom toolchain requirements.
# #
@ -374,6 +388,7 @@ def ValidateArgs(args):
if (os.path.exists(vcvars_path)): if (os.path.exists(vcvars_path)):
msg('INCLUDE/LIB/PATH values will be derived from %s' % vcvars_path) msg('INCLUDE/LIB/PATH values will be derived from %s' % vcvars_path)
def GetConfigArgs(args, is_debug, cpu): def GetConfigArgs(args, is_debug, cpu):
""" """
Return merged GN args for the configuration and validate. Return merged GN args for the configuration and validate.
@ -390,8 +405,8 @@ def GetConfigArgs(args, is_debug, cpu):
add_args['dcheck_always_on'] = True add_args['dcheck_always_on'] = True
result = MergeDicts(args, add_args, { result = MergeDicts(args, add_args, {
'is_debug': is_debug, 'is_debug': is_debug,
'target_cpu': cpu, 'target_cpu': cpu,
}) })
if platform == 'linux' and cpu != 'arm': if platform == 'linux' and cpu != 'arm':
@ -403,6 +418,7 @@ def GetConfigArgs(args, is_debug, cpu):
ValidateArgs(result) ValidateArgs(result)
return result return result
def LinuxSysrootExists(cpu): def LinuxSysrootExists(cpu):
""" """
Returns true if the sysroot for the specified |cpu| architecture exists. Returns true if the sysroot for the specified |cpu| architecture exists.
@ -421,6 +437,7 @@ def LinuxSysrootExists(cpu):
return os.path.isdir(os.path.join(sysroot_root, sysroot_name)) return os.path.isdir(os.path.join(sysroot_root, sysroot_name))
def GetAllPlatformConfigs(build_args): def GetAllPlatformConfigs(build_args):
""" """
Return a map of directory name to GN args for the current platform. Return a map of directory name to GN args for the current platform.
@ -447,7 +464,8 @@ def GetAllPlatformConfigs(build_args):
if LinuxSysrootExists(cpu): if LinuxSysrootExists(cpu):
supported_cpus.append(cpu) supported_cpus.append(cpu)
else: else:
msg('Not generating %s configuration due to missing sysroot directory' % cpu) msg('Not generating %s configuration due to missing sysroot directory'
% cpu)
else: else:
supported_cpus = ['x64'] supported_cpus = ['x64']
elif platform == 'windows': elif platform == 'windows':
@ -464,6 +482,7 @@ def GetAllPlatformConfigs(build_args):
return result return result
def GetConfigFileContents(args): def GetConfigFileContents(args):
""" """
Generate config file contents for the arguments. Generate config file contents for the arguments.
@ -473,6 +492,7 @@ def GetConfigFileContents(args):
pairs.append("%s=%s" % (k, GetValueString(args[k]))) pairs.append("%s=%s" % (k, GetValueString(args[k])))
return "\n".join(pairs) return "\n".join(pairs)
# Program entry point. # Program entry point.
if __name__ == '__main__': if __name__ == '__main__':
import sys import sys

View File

@ -14,14 +14,36 @@ import sys
import os import os
module_order = [ module_order = [
"_sse", "-sse", "_ssse", "-ssse", "_sse",
"_sse2", "-sse2", "_ssse2", "-ssse2", "-sse",
"_sse3", "-sse3", "_ssse3", "-ssse3", "_ssse",
"_sse4", "-sse4", "_ssse4", "-ssse4", "-ssse",
"_avx", "-avx", "_savx", "-savx", "_sse2",
"_avx1", "-avx1", "_savx1", "-savx1", "-sse2",
"_avx2", "-avx2", "_savx2", "-savx2", "_ssse2",
] "-ssse2",
"_sse3",
"-sse3",
"_ssse3",
"-ssse3",
"_sse4",
"-sse4",
"_ssse4",
"-ssse4",
"_avx",
"-avx",
"_savx",
"-savx",
"_avx1",
"-avx1",
"_savx1",
"-savx1",
"_avx2",
"-avx2",
"_savx2",
"-savx2",
]
def get_obj_class(item): def get_obj_class(item):
item = item.lower() item = item.lower()
@ -31,12 +53,16 @@ def get_obj_class(item):
return 1 + i return 1 + i
return 0 return 0
def obj_compare(x, y): def obj_compare(x, y):
xc = get_obj_class(x) xc = get_obj_class(x)
yc = get_obj_class(y) yc = get_obj_class(y)
if xc < yc: return -1 if xc < yc:
elif xc > yc: return 1 return -1
else: return 0 elif xc > yc:
return 1
else:
return 0
def process_line(line): def process_line(line):
@ -50,7 +76,7 @@ def process_line(line):
objects = part2[:stampsIndex] objects = part2[:stampsIndex]
objects_list = objects.split() objects_list = objects.split()
objects_list = sorted(objects_list, cmp = obj_compare) objects_list = sorted(objects_list, cmp=obj_compare)
return part1 + " " + " ".join(objects_list) + " " + stamps return part1 + " " + " ".join(objects_list) + " " + stamps
return line return line
@ -70,5 +96,6 @@ def process_file(path):
f.write("\n".join(result)) f.write("\n".join(result))
f.write("\n") f.write("\n")
def apply(confpath): def apply(confpath):
process_file(os.path.join(confpath, "obj", "cef", "libcef.ninja")) process_file(os.path.join(confpath, "obj", "cef", "libcef.ninja"))

View File

@ -5,45 +5,48 @@
from cef_parser import * from cef_parser import *
from date_util import * from date_util import *
def make_capi_global_funcs(funcs, defined_names, translate_map, indent): def make_capi_global_funcs(funcs, defined_names, translate_map, indent):
result = '' result = ''
first = True first = True
for func in funcs: for func in funcs:
comment = func.get_comment() comment = func.get_comment()
if first or len(comment) > 0: if first or len(comment) > 0:
result += '\n'+format_comment(comment, indent, translate_map); result += '\n' + format_comment(comment, indent, translate_map)
if func.get_retval().get_type().is_result_string(): if func.get_retval().get_type().is_result_string():
result += indent+'// The resulting string must be freed by calling cef_string_userfree_free().\n' result += indent + '// The resulting string must be freed by calling cef_string_userfree_free().\n'
result += indent+'CEF_EXPORT '+func.get_capi_proto(defined_names)+';\n' result += indent + 'CEF_EXPORT ' + func.get_capi_proto(defined_names) + ';\n'
if first: if first:
first = False first = False
return result return result
def make_capi_member_funcs(funcs, defined_names, translate_map, indent): def make_capi_member_funcs(funcs, defined_names, translate_map, indent):
result = '' result = ''
first = True first = True
for func in funcs: for func in funcs:
comment = func.get_comment() comment = func.get_comment()
if first or len(comment) > 0: if first or len(comment) > 0:
result += '\n'+format_comment(comment, indent, translate_map) result += '\n' + format_comment(comment, indent, translate_map)
if func.get_retval().get_type().is_result_string(): if func.get_retval().get_type().is_result_string():
result += indent+'// The resulting string must be freed by calling cef_string_userfree_free().\n' result += indent + '// The resulting string must be freed by calling cef_string_userfree_free().\n'
parts = func.get_capi_parts() parts = func.get_capi_parts()
result += indent+parts['retval']+' (CEF_CALLBACK *'+parts['name']+ \ result += indent+parts['retval']+' (CEF_CALLBACK *'+parts['name']+ \
')('+string.join(parts['args'], ', ')+');\n' ')('+string.join(parts['args'], ', ')+');\n'
if first: if first:
first = False first = False
return result return result
def make_capi_header(header, filename): def make_capi_header(header, filename):
# structure names that have already been defined # structure names that have already been defined
defined_names = header.get_defined_structs() defined_names = header.get_defined_structs()
# map of strings that will be changed in C++ comments # map of strings that will be changed in C++ comments
translate_map = header.get_capi_translations() translate_map = header.get_capi_translations()
# header string # header string
result = \ result = \
"""// Copyright (c) $YEAR$ Marshall A. Greenblatt. All rights reserved. """// Copyright (c) $YEAR$ Marshall A. Greenblatt. All rights reserved.
// //
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
@ -88,9 +91,9 @@ def make_capi_header(header, filename):
""" """
# Protect against incorrect use of test headers. # Protect against incorrect use of test headers.
if filename.startswith('test/'): if filename.startswith('test/'):
result += \ result += \
"""#if !defined(BUILDING_CEF_SHARED) && !defined(WRAPPING_CEF_SHARED) && \\ """#if !defined(BUILDING_CEF_SHARED) && !defined(WRAPPING_CEF_SHARED) && \\
!defined(UNIT_TEST) !defined(UNIT_TEST)
#error This file can be included for unit tests only #error This file can be included for unit tests only
@ -98,46 +101,47 @@ def make_capi_header(header, filename):
""" """
classes = header.get_classes(filename) classes = header.get_classes(filename)
# identify all includes and forward declarations # identify all includes and forward declarations
translated_includes = set([]) translated_includes = set([])
internal_includes = set([]) internal_includes = set([])
all_declares = set([]) all_declares = set([])
for cls in classes: for cls in classes:
includes = cls.get_includes() includes = cls.get_includes()
for include in includes: for include in includes:
if include.startswith('base/'): if include.startswith('base/'):
# base/ headers are C++. They should not be included by # base/ headers are C++. They should not be included by
# translated CEF API headers. # translated CEF API headers.
raise Exception('Disallowed include of %s.h from %s' % (include, filename)) raise Exception('Disallowed include of %s.h from %s' % (include,
elif include.startswith('internal/'): filename))
# internal/ headers may be C or C++. Include them as-is. elif include.startswith('internal/'):
internal_includes.add(include) # internal/ headers may be C or C++. Include them as-is.
else: internal_includes.add(include)
translated_includes.add(include) else:
declares = cls.get_forward_declares() translated_includes.add(include)
for declare in declares: declares = cls.get_forward_declares()
declare_cls = header.get_class(declare) for declare in declares:
if declare_cls is None: declare_cls = header.get_class(declare)
raise Exception('Unknown class: %s' % declare) if declare_cls is None:
all_declares.add(declare_cls.get_capi_name()) raise Exception('Unknown class: %s' % declare)
all_declares.add(declare_cls.get_capi_name())
# output translated includes # output translated includes
if len(translated_includes) > 0: if len(translated_includes) > 0:
sorted_includes = sorted(translated_includes) sorted_includes = sorted(translated_includes)
for include in sorted_includes: for include in sorted_includes:
result += '#include "include/capi/' + include + '_capi.h"\n' result += '#include "include/capi/' + include + '_capi.h"\n'
else: else:
result += '#include "include/capi/cef_base_capi.h"\n' result += '#include "include/capi/cef_base_capi.h"\n'
# output internal includes # output internal includes
if len(internal_includes) > 0: if len(internal_includes) > 0:
sorted_includes = sorted(internal_includes) sorted_includes = sorted(internal_includes)
for include in sorted_includes: for include in sorted_includes:
result += '#include "include/' + include + '.h"\n' result += '#include "include/' + include + '.h"\n'
result += \ result += \
""" """
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
@ -145,42 +149,41 @@ extern "C" {
""" """
# output forward declarations # output forward declarations
if len(all_declares) > 0: if len(all_declares) > 0:
sorted_declares = sorted(all_declares) sorted_declares = sorted(all_declares)
for declare in sorted_declares: for declare in sorted_declares:
result += 'struct _' + declare + ';\n' result += 'struct _' + declare + ';\n'
# output classes # output classes
for cls in classes: for cls in classes:
# virtual functions are inside the structure # virtual functions are inside the structure
classname = cls.get_capi_name() classname = cls.get_capi_name()
result += '\n'+format_comment(cls.get_comment(), '', translate_map); result += '\n' + format_comment(cls.get_comment(), '', translate_map)
result += 'typedef struct _'+classname+' {\n'+\ result += 'typedef struct _'+classname+' {\n'+\
' ///\n'+\ ' ///\n'+\
' // Base structure.\n'+\ ' // Base structure.\n'+\
' ///\n'+\ ' ///\n'+\
' '+cls.get_parent_capi_name()+' base;\n' ' '+cls.get_parent_capi_name()+' base;\n'
funcs = cls.get_virtual_funcs() funcs = cls.get_virtual_funcs()
result += make_capi_member_funcs(funcs, defined_names, result += make_capi_member_funcs(funcs, defined_names, translate_map, ' ')
translate_map, ' ') result += '} ' + classname + ';\n\n'
result += '} '+classname+';\n\n'
defined_names.append(cls.get_capi_name()) defined_names.append(cls.get_capi_name())
# static functions become global # static functions become global
funcs = cls.get_static_funcs() funcs = cls.get_static_funcs()
if len(funcs) > 0:
result += make_capi_global_funcs(funcs, defined_names,
translate_map, '')+'\n'
# output global functions
funcs = header.get_funcs(filename)
if len(funcs) > 0: if len(funcs) > 0:
result += make_capi_global_funcs(funcs, defined_names, translate_map, '') result += make_capi_global_funcs(funcs, defined_names, translate_map,
'') + '\n'
# footer string # output global functions
result += \ funcs = header.get_funcs(filename)
if len(funcs) > 0:
result += make_capi_global_funcs(funcs, defined_names, translate_map, '')
# footer string
result += \
""" """
#ifdef __cplusplus #ifdef __cplusplus
} }
@ -189,34 +192,35 @@ extern "C" {
#endif // $GUARD$ #endif // $GUARD$
""" """
# add the copyright year # add the copyright year
result = result.replace('$YEAR$', get_year()) result = result.replace('$YEAR$', get_year())
# add the guard string # add the guard string
guard = 'CEF_INCLUDE_CAPI_'+string.upper(filename.replace('/', '_').replace('.', '_capi_'))+'_' guard = 'CEF_INCLUDE_CAPI_' + string.upper(
result = result.replace('$GUARD$', guard) filename.replace('/', '_').replace('.', '_capi_')) + '_'
result = result.replace('$GUARD$', guard)
return result return result
def write_capi_header(header, header_dir, filename): def write_capi_header(header, header_dir, filename):
file = get_capi_file_name(os.path.join(header_dir, filename)) file = get_capi_file_name(os.path.join(header_dir, filename))
newcontents = make_capi_header(header, filename) newcontents = make_capi_header(header, filename)
return (file, newcontents) return (file, newcontents)
# test the module # test the module
if __name__ == "__main__": if __name__ == "__main__":
import sys import sys
# verify that the correct number of command-line arguments are provided # verify that the correct number of command-line arguments are provided
if len(sys.argv) < 2: if len(sys.argv) < 2:
sys.stderr.write('Usage: '+sys.argv[0]+' <infile>') sys.stderr.write('Usage: ' + sys.argv[0] + ' <infile>')
sys.exit() sys.exit()
# create the header object # create the header object
header = obj_header() header = obj_header()
header.add_file(sys.argv[1]) header.add_file(sys.argv[1])
# dump the result to stdout # dump the result to stdout
filename = os.path.split(sys.argv[1])[1] filename = os.path.split(sys.argv[1])[1]
sys.stdout.write(make_capi_header(header, filename)) sys.stdout.write(make_capi_header(header, filename))

View File

@ -12,234 +12,242 @@ script_dir = os.path.dirname(__file__)
# CEF root directory. # CEF root directory.
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir)) cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
def get_files_for_variable(cmake_path, variables, variable): def get_files_for_variable(cmake_path, variables, variable):
""" Returns the path values associated with |variable| and relative to the """ Returns the path values associated with |variable| and relative to the
|cmake_path| directory. """ |cmake_path| directory. """
if not variable in variables: if not variable in variables:
raise Exception('Variable %s does not exist' % variable) raise Exception('Variable %s does not exist' % variable)
# Cmake file directory. # Cmake file directory.
cmake_dirname = os.path.dirname(cmake_path) + '/' cmake_dirname = os.path.dirname(cmake_path) + '/'
# Return path values relative to the cmake file directory.
# Example 1:
# cmake file = "/path/to/libcef_dll/CMakeLists.txt"
# include path = "/path/to/libcef_dll/wrapper/cef_browser_info_map.h"
# return path = "wrapper/cef_browser_info_map.h"
# Example 2:
# cmake file = "/path/to/libcef_dll/CMakeLists.txt"
# include path = "/path/to/include/internal/cef_export.h"
# return path = "../include/internal/cef_export.h"
new_paths = []
paths = variables[variable]
for path in paths:
abspath = os.path.join(cef_dir, path)
newpath = normalize_path(os.path.relpath(abspath, cmake_dirname))
new_paths.append(newpath)
return new_paths
# Return path values relative to the cmake file directory.
# Example 1:
# cmake file = "/path/to/libcef_dll/CMakeLists.txt"
# include path = "/path/to/libcef_dll/wrapper/cef_browser_info_map.h"
# return path = "wrapper/cef_browser_info_map.h"
# Example 2:
# cmake file = "/path/to/libcef_dll/CMakeLists.txt"
# include path = "/path/to/include/internal/cef_export.h"
# return path = "../include/internal/cef_export.h"
new_paths = []
paths = variables[variable]
for path in paths:
abspath = os.path.join(cef_dir, path)
newpath = normalize_path(os.path.relpath(abspath, cmake_dirname))
new_paths.append(newpath)
return new_paths
def format_cmake_set(name, values): def format_cmake_set(name, values):
result = 'set(%s\n' % name result = 'set(%s\n' % name
for value in values: for value in values:
result += ' %s\n' % value result += ' %s\n' % value
return result + ' )\n' return result + ' )\n'
def format_cmake_group(cmake_path, name, files, platform_sep, append_macro): def format_cmake_group(cmake_path, name, files, platform_sep, append_macro):
platforms = {} platforms = {}
common = [] common = []
# Folder will be the cmake parent directory name combined with the path to # Folder will be the cmake parent directory name combined with the path to
# first file in the files list. # first file in the files list.
# Example 1: # Example 1:
# cmake file = "/path/to/libcef_dll/CMakeLists.txt" # cmake file = "/path/to/libcef_dll/CMakeLists.txt"
# include path = "wrapper/cef_browser_info_map.h" # include path = "wrapper/cef_browser_info_map.h"
# folder = "libcef_dll\\\\wrapper" # folder = "libcef_dll\\\\wrapper"
# Example 2: # Example 2:
# cmake file = "/path/to/libcef_dll/CMakeLists.txt" # cmake file = "/path/to/libcef_dll/CMakeLists.txt"
# include path = "../include/internal/cef_export.h" # include path = "../include/internal/cef_export.h"
# folder = "include\\\\internal" # folder = "include\\\\internal"
folder = os.path.basename(os.path.dirname(cmake_path)) folder = os.path.basename(os.path.dirname(cmake_path))
folder = os.path.dirname(os.path.normpath(os.path.join(folder, files[0]))) folder = os.path.dirname(os.path.normpath(os.path.join(folder, files[0])))
folder = normalize_path(folder).replace('/', '\\\\\\\\') folder = normalize_path(folder).replace('/', '\\\\\\\\')
# Group the files by platform. # Group the files by platform.
for file in files: for file in files:
parts = file.split(platform_sep) parts = file.split(platform_sep)
file = parts[0] file = parts[0]
if len(parts) > 1: if len(parts) > 1:
# Add the file under the platform. # Add the file under the platform.
platform = parts[1] platform = parts[1]
if not platform in platforms: if not platform in platforms:
platforms[platform] = [] platforms[platform] = []
platforms[platform].append(file) platforms[platform].append(file)
else: else:
common.append(file) common.append(file)
result = '' result = ''
if len(common) > 0: if len(common) > 0:
result += format_cmake_set(name, common) result += format_cmake_set(name, common)
if len(platforms) > 0: if len(platforms) > 0:
keys = sorted(platforms.keys()) keys = sorted(platforms.keys())
for key in keys: for key in keys:
result += format_cmake_set(name + '_' + key, platforms[key]) result += format_cmake_set(name + '_' + key, platforms[key])
result += '%s(%s)\n' % (append_macro, name) result += '%s(%s)\n' % (append_macro, name)
result += 'source_group(%s FILES ${%s})\n\n' % (folder, name)
return result
result += 'source_group(%s FILES ${%s})\n\n' % (folder, name)
return result
def format_cmake_library(name, group_names): def format_cmake_library(name, group_names):
result = 'add_library(%s\n' % name result = 'add_library(%s\n' % name
for group in group_names: for group in group_names:
result += ' ${%s}\n' % group result += ' ${%s}\n' % group
return result + ' )\n\n' return result + ' )\n\n'
def process_cmake_template_segment(segment, segment_ct, cmake_path, variables): def process_cmake_template_segment(segment, segment_ct, cmake_path, variables):
prefix = None prefix = None
library = None library = None
set = None set = None
includes = [] includes = []
suffix = '_SRCS' # Appended to each group name before the platform name. suffix = '_SRCS' # Appended to each group name before the platform name.
platform_sep = ':' # Used to separate value from platform name. platform_sep = ':' # Used to separate value from platform name.
append_macro = 'APPEND_PLATFORM_SOURCES' # CMake macro name. append_macro = 'APPEND_PLATFORM_SOURCES' # CMake macro name.
# Extract values from |segment|. Example |segment| contents: # Extract values from |segment|. Example |segment| contents:
# 'prefix': 'cefsimple', # 'prefix': 'cefsimple',
# 'includes': [ # 'includes': [
# 'cefsimple_sources_common', # 'cefsimple_sources_common',
# 'cefsimple_sources_win:WINDOWS', # 'cefsimple_sources_win:WINDOWS',
# 'cefsimple_sources_mac:MACOSX', # 'cefsimple_sources_mac:MACOSX',
# 'cefsimple_sources_linux:LINUX', # 'cefsimple_sources_linux:LINUX',
# ], # ],
values = eval('{' + segment + '}', {'__builtins__': None}, None) values = eval('{' + segment + '}', {'__builtins__': None}, None)
if 'prefix' in values: if 'prefix' in values:
prefix = values['prefix'] prefix = values['prefix']
else: else:
raise Exception('Missing prefix value in segment %d' % segment_ct) raise Exception('Missing prefix value in segment %d' % segment_ct)
if 'library' in values: if 'library' in values:
library = values['library'] library = values['library']
if 'set' in values: if 'set' in values:
set = values['set'] set = values['set']
if 'append_macro' in values: if 'append_macro' in values:
append_macro = values['append_macro'] append_macro = values['append_macro']
if 'includes' in values and len(values['includes']) > 0: if 'includes' in values and len(values['includes']) > 0:
for include in values['includes']: for include in values['includes']:
parts = include.strip().split(platform_sep) parts = include.strip().split(platform_sep)
files = get_files_for_variable(cmake_path, variables, parts[0]) files = get_files_for_variable(cmake_path, variables, parts[0])
if len(parts) == 2: if len(parts) == 2:
# Append the platform to each file path. # Append the platform to each file path.
files = [file + platform_sep + parts[1] for file in files] files = [file + platform_sep + parts[1] for file in files]
includes.extend(files) includes.extend(files)
else: else:
raise Exception('Missing includes value in segment %d' % segment_ct) raise Exception('Missing includes value in segment %d' % segment_ct)
# Sort the file paths alphabetically. # Sort the file paths alphabetically.
includes.sort() includes.sort()
# Group files by path. # Group files by path.
# For example, '../include/base/foo.h' and '../include/base/bar.h' will be # For example, '../include/base/foo.h' and '../include/base/bar.h' will be
# grouped as 'PREFIX_INCLUDE_BASE'. # grouped as 'PREFIX_INCLUDE_BASE'.
groups = {} groups = {}
for include in includes: for include in includes:
paths = include.split('/') paths = include.split('/')
label = prefix label = prefix
for path in paths[0:-1]: for path in paths[0:-1]:
if path == '..': if path == '..':
continue continue
label += '_' + path label += '_' + path
label = label.replace('.', '_').upper() label = label.replace('.', '_').upper()
if not label in groups: if not label in groups:
groups[label] = [] groups[label] = []
groups[label].append(include) groups[label].append(include)
# Create the output results. # Create the output results.
result = '' result = ''
keys = sorted(groups.keys()) keys = sorted(groups.keys())
for key in keys: for key in keys:
# Add a group of files that share the same path. # Add a group of files that share the same path.
result += format_cmake_group(cmake_path, key + suffix, groups[key], \ result += format_cmake_group(cmake_path, key + suffix, groups[key], \
platform_sep, append_macro) platform_sep, append_macro)
if not library is None: if not library is None:
# Add the library declaration if requested. # Add the library declaration if requested.
result += format_cmake_library(library, [key + suffix for key in keys]) result += format_cmake_library(library, [key + suffix for key in keys])
if not set is None: if not set is None:
# Add the set declaration if requested. # Add the set declaration if requested.
result += format_cmake_set(set, \ result += format_cmake_set(set, \
['${' + key + suffix + '}' for key in keys]) ['${' + key + suffix + '}' for key in keys])
return result.strip() return result.strip()
def process_cmake_template(input, output, variables, quiet = False):
""" Reads the |input| template, parses variable substitution sections and def process_cmake_template(input, output, variables, quiet=False):
""" Reads the |input| template, parses variable substitution sections and
writes |output|. """ writes |output|. """
if not quiet: if not quiet:
sys.stdout.write('Processing "%s" to "%s"...\n' % (input, output)) sys.stdout.write('Processing "%s" to "%s"...\n' % (input, output))
if not os.path.exists(input): if not os.path.exists(input):
raise Exception('File %s does not exist' % input) raise Exception('File %s does not exist' % input)
cmake_path = normalize_path(os.path.abspath(input)) cmake_path = normalize_path(os.path.abspath(input))
template = read_file(cmake_path) template = read_file(cmake_path)
delim_start = '{{' delim_start = '{{'
delim_end = '}}' delim_end = '}}'
# Process the template file, replacing segments delimited by |delim_start| # Process the template file, replacing segments delimited by |delim_start|
# and |delim_end|. # and |delim_end|.
result = '' result = ''
end = 0 end = 0
segment_ct = 0 segment_ct = 0
while True: while True:
start = template.find(delim_start, end) start = template.find(delim_start, end)
if start == -1: if start == -1:
break break
result += template[end:start] result += template[end:start]
end = template.find(delim_end, start + len(delim_start)) end = template.find(delim_end, start + len(delim_start))
if end == -1: if end == -1:
break break
segment = template[start + len(delim_start):end] segment = template[start + len(delim_start):end]
segment_ct = segment_ct + 1 segment_ct = segment_ct + 1
result += process_cmake_template_segment(segment, segment_ct, \ result += process_cmake_template_segment(segment, segment_ct, \
cmake_path, variables) cmake_path, variables)
end += len(delim_end) end += len(delim_end)
result += template[end:] result += template[end:]
# Only write the output file if the contents have changed.
changed = True
if os.path.exists(output):
existing = read_file(output)
changed = result != existing
if changed:
write_file(output, result)
# Only write the output file if the contents have changed.
changed = True
if os.path.exists(output):
existing = read_file(output)
changed = result != existing
if changed:
write_file(output, result)
def read_gypi_variables(source): def read_gypi_variables(source):
""" Read the |source| gypi file and extract the variables section. """ """ Read the |source| gypi file and extract the variables section. """
path = os.path.join(cef_dir, source + '.gypi') path = os.path.join(cef_dir, source + '.gypi')
if not os.path.exists(path): if not os.path.exists(path):
raise Exception('File %s does not exist' % path) raise Exception('File %s does not exist' % path)
contents = eval_file(path) contents = eval_file(path)
if not 'variables' in contents: if not 'variables' in contents:
raise Exception('File %s does not have a variables section' % path) raise Exception('File %s does not have a variables section' % path)
return contents['variables'] return contents['variables']
# File entry point. # File entry point.
if __name__ == "__main__": if __name__ == "__main__":
# Verify that the correct number of command-line arguments are provided. # Verify that the correct number of command-line arguments are provided.
if len(sys.argv) != 3: if len(sys.argv) != 3:
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <outfile>') sys.stderr.write('Usage: ' + sys.argv[0] + ' <infile> <outfile>')
sys.exit() sys.exit()
# Read the gypi files and combine into a single dictionary. # Read the gypi files and combine into a single dictionary.
variables1 = read_gypi_variables('cef_paths') variables1 = read_gypi_variables('cef_paths')
variables2 = read_gypi_variables('cef_paths2') variables2 = read_gypi_variables('cef_paths2')
variables = dict(variables1.items() + variables2.items()) variables = dict(variables1.items() + variables2.items())
# Process the cmake template. # Process the cmake template.
process_cmake_template(sys.argv[1], sys.argv[2], variables) process_cmake_template(sys.argv[1], sys.argv[2], variables)

View File

@ -4,103 +4,104 @@
from cef_parser import * from cef_parser import *
def make_cpptoc_header(header, clsname): def make_cpptoc_header(header, clsname):
cls = header.get_class(clsname) cls = header.get_class(clsname)
if cls is None: if cls is None:
raise Exception('Class does not exist: '+clsname) raise Exception('Class does not exist: ' + clsname)
dllside = cls.is_library_side() dllside = cls.is_library_side()
directory = cls.get_file_directory() directory = cls.get_file_directory()
defname = '' defname = ''
if not directory is None: if not directory is None:
defname += directory + '_' defname += directory + '_'
defname += get_capi_name(clsname[3:], False) defname += get_capi_name(clsname[3:], False)
defname = defname.upper() defname = defname.upper()
capiname = cls.get_capi_name() capiname = cls.get_capi_name()
result = get_copyright() result = get_copyright()
result += '#ifndef CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n'+ \ result += '#ifndef CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n'+ \
'#define CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n' + \ '#define CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n' + \
'#pragma once\n' '#pragma once\n'
if dllside: if dllside:
result += """ result += """
#if !defined(BUILDING_CEF_SHARED) #if !defined(BUILDING_CEF_SHARED)
#error This file can be included DLL-side only #error This file can be included DLL-side only
#endif #endif
""" """
else: else:
result += """ result += """
#if !defined(WRAPPING_CEF_SHARED) #if !defined(WRAPPING_CEF_SHARED)
#error This file can be included wrapper-side only #error This file can be included wrapper-side only
#endif #endif
""" """
# include the headers for this class # include the headers for this class
result += '\n#include "include/'+cls.get_file_name()+'"\n' \ result += '\n#include "include/'+cls.get_file_name()+'"\n' \
'#include "include/capi/'+cls.get_capi_file_name()+'"\n' '#include "include/capi/'+cls.get_capi_file_name()+'"\n'
# include headers for any forward declared classes that are not in the same file # include headers for any forward declared classes that are not in the same file
declares = cls.get_forward_declares() declares = cls.get_forward_declares()
for declare in declares: for declare in declares:
dcls = header.get_class(declare) dcls = header.get_class(declare)
if dcls.get_file_name() != cls.get_file_name(): if dcls.get_file_name() != cls.get_file_name():
result += '#include "include/'+dcls.get_file_name()+'"\n' \ result += '#include "include/'+dcls.get_file_name()+'"\n' \
'#include "include/capi/'+dcls.get_capi_file_name()+'"\n' '#include "include/capi/'+dcls.get_capi_file_name()+'"\n'
base_class_name = header.get_base_class_name(clsname) base_class_name = header.get_base_class_name(clsname)
base_scoped = True if base_class_name == 'CefBaseScoped' else False base_scoped = True if base_class_name == 'CefBaseScoped' else False
if base_scoped: if base_scoped:
template_file = 'cpptoc_scoped.h' template_file = 'cpptoc_scoped.h'
template_class = 'CefCppToCScoped' template_class = 'CefCppToCScoped'
else: else:
template_file = 'cpptoc_ref_counted.h' template_file = 'cpptoc_ref_counted.h'
template_class = 'CefCppToCRefCounted' template_class = 'CefCppToCRefCounted'
result += '#include "libcef_dll/cpptoc/' + template_file + '"' result += '#include "libcef_dll/cpptoc/' + template_file + '"'
result += '\n\n// Wrap a C++ class with a C structure.\n' result += '\n\n// Wrap a C++ class with a C structure.\n'
if dllside: if dllside:
result += '// This class may be instantiated and accessed DLL-side only.\n' result += '// This class may be instantiated and accessed DLL-side only.\n'
else: else:
result += '// This class may be instantiated and accessed wrapper-side only.\n' result += '// This class may be instantiated and accessed wrapper-side only.\n'
result += 'class '+clsname+'CppToC\n'+ \ result += 'class '+clsname+'CppToC\n'+ \
' : public ' + template_class + '<'+clsname+'CppToC, '+clsname+', '+capiname+'> {\n'+ \ ' : public ' + template_class + '<'+clsname+'CppToC, '+clsname+', '+capiname+'> {\n'+ \
' public:\n'+ \ ' public:\n'+ \
' '+clsname+'CppToC();\n'+ \ ' '+clsname+'CppToC();\n'+ \
'};\n\n' '};\n\n'
result += '#endif // CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_' result += '#endif // CEF_LIBCEF_DLL_CPPTOC_' + defname + '_CPPTOC_H_'
return result return result
def write_cpptoc_header(header, clsname, dir): def write_cpptoc_header(header, clsname, dir):
# give the output file the same directory offset as the input file # give the output file the same directory offset as the input file
cls = header.get_class(clsname) cls = header.get_class(clsname)
dir = os.path.dirname(os.path.join(dir, cls.get_file_name())) dir = os.path.dirname(os.path.join(dir, cls.get_file_name()))
file = os.path.join(dir, get_capi_name(clsname[3:], False)+'_cpptoc.h') file = os.path.join(dir, get_capi_name(clsname[3:], False) + '_cpptoc.h')
newcontents = make_cpptoc_header(header, clsname) newcontents = make_cpptoc_header(header, clsname)
return (file, newcontents) return (file, newcontents)
# test the module # test the module
if __name__ == "__main__": if __name__ == "__main__":
import sys import sys
# verify that the correct number of command-line arguments are provided # verify that the correct number of command-line arguments are provided
if len(sys.argv) < 3: if len(sys.argv) < 3:
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname>') sys.stderr.write('Usage: ' + sys.argv[0] + ' <infile> <classname>')
sys.exit() sys.exit()
# create the header object # create the header object
header = obj_header() header = obj_header()
header.add_file(sys.argv[1]) header.add_file(sys.argv[1])
# dump the result to stdout # dump the result to stdout
sys.stdout.write(make_cpptoc_header(header, sys.argv[2])) sys.stdout.write(make_cpptoc_header(header, sys.argv[2]))

File diff suppressed because it is too large Load Diff

View File

@ -4,146 +4,149 @@
from cef_parser import * from cef_parser import *
def make_function_body_block(cls): def make_function_body_block(cls):
impl = ' // '+cls.get_name()+' methods.\n'; impl = ' // ' + cls.get_name() + ' methods.\n'
funcs = cls.get_virtual_funcs() funcs = cls.get_virtual_funcs()
for func in funcs: for func in funcs:
impl += ' '+func.get_cpp_proto() impl += ' ' + func.get_cpp_proto()
if cls.is_client_side(): if cls.is_client_side():
impl += ' override;\n' impl += ' override;\n'
else: else:
impl += ' OVERRIDE;\n' impl += ' OVERRIDE;\n'
return impl
return impl
def make_function_body(header, cls): def make_function_body(header, cls):
impl = make_function_body_block(cls) impl = make_function_body_block(cls)
cur_cls = cls cur_cls = cls
while True: while True:
parent_name = cur_cls.get_parent_name() parent_name = cur_cls.get_parent_name()
if is_base_class(parent_name): if is_base_class(parent_name):
break break
else: else:
parent_cls = header.get_class(parent_name) parent_cls = header.get_class(parent_name)
if parent_cls is None: if parent_cls is None:
raise Exception('Class does not exist: '+parent_name) raise Exception('Class does not exist: ' + parent_name)
if len(impl) > 0: if len(impl) > 0:
impl += '\n' impl += '\n'
impl += make_function_body_block(parent_cls) impl += make_function_body_block(parent_cls)
cur_cls = header.get_class(parent_name) cur_cls = header.get_class(parent_name)
return impl
return impl
def make_ctocpp_header(header, clsname): def make_ctocpp_header(header, clsname):
cls = header.get_class(clsname) cls = header.get_class(clsname)
if cls is None: if cls is None:
raise Exception('Class does not exist: '+clsname) raise Exception('Class does not exist: ' + clsname)
clientside = cls.is_client_side() clientside = cls.is_client_side()
directory = cls.get_file_directory() directory = cls.get_file_directory()
defname = '' defname = ''
if not directory is None: if not directory is None:
defname += directory + '_' defname += directory + '_'
defname += get_capi_name(clsname[3:], False) defname += get_capi_name(clsname[3:], False)
defname = defname.upper() defname = defname.upper()
capiname = cls.get_capi_name() capiname = cls.get_capi_name()
result = get_copyright() result = get_copyright()
result += '#ifndef CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n'+ \ result += '#ifndef CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n'+ \
'#define CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n' + \ '#define CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n' + \
'#pragma once\n' '#pragma once\n'
if clientside: if clientside:
result += """ result += """
#if !defined(BUILDING_CEF_SHARED) #if !defined(BUILDING_CEF_SHARED)
#error This file can be included DLL-side only #error This file can be included DLL-side only
#endif #endif
""" """
else: else:
result += """ result += """
#if !defined(WRAPPING_CEF_SHARED) #if !defined(WRAPPING_CEF_SHARED)
#error This file can be included wrapper-side only #error This file can be included wrapper-side only
#endif #endif
""" """
# build the function body # build the function body
func_body = make_function_body(header, cls) func_body = make_function_body(header, cls)
# include standard headers # include standard headers
if func_body.find('std::map') > 0 or func_body.find('std::multimap') > 0: if func_body.find('std::map') > 0 or func_body.find('std::multimap') > 0:
result += '\n#include <map>' result += '\n#include <map>'
if func_body.find('std::vector') > 0: if func_body.find('std::vector') > 0:
result += '\n#include <vector>' result += '\n#include <vector>'
# include the headers for this class # include the headers for this class
result += '\n#include "include/'+cls.get_file_name()+'"'+ \ result += '\n#include "include/'+cls.get_file_name()+'"'+ \
'\n#include "include/capi/'+cls.get_capi_file_name()+'"\n' '\n#include "include/capi/'+cls.get_capi_file_name()+'"\n'
# include headers for any forward declared classes that are not in the same file # include headers for any forward declared classes that are not in the same file
declares = cls.get_forward_declares() declares = cls.get_forward_declares()
for declare in declares: for declare in declares:
dcls = header.get_class(declare) dcls = header.get_class(declare)
if dcls.get_file_name() != cls.get_file_name(): if dcls.get_file_name() != cls.get_file_name():
result += '#include "include/'+dcls.get_file_name()+'"\n' \ result += '#include "include/'+dcls.get_file_name()+'"\n' \
'#include "include/capi/'+dcls.get_capi_file_name()+'"\n' '#include "include/capi/'+dcls.get_capi_file_name()+'"\n'
base_class_name = header.get_base_class_name(clsname) base_class_name = header.get_base_class_name(clsname)
base_scoped = True if base_class_name == 'CefBaseScoped' else False base_scoped = True if base_class_name == 'CefBaseScoped' else False
if base_scoped: if base_scoped:
template_file = 'ctocpp_scoped.h' template_file = 'ctocpp_scoped.h'
template_class = 'CefCToCppScoped' template_class = 'CefCToCppScoped'
else: else:
template_file = 'ctocpp_ref_counted.h' template_file = 'ctocpp_ref_counted.h'
template_class = 'CefCToCppRefCounted' template_class = 'CefCToCppRefCounted'
result += '#include "libcef_dll/ctocpp/' + template_file + '"' result += '#include "libcef_dll/ctocpp/' + template_file + '"'
result += '\n\n// Wrap a C structure with a C++ class.\n' result += '\n\n// Wrap a C structure with a C++ class.\n'
if clientside: if clientside:
result += '// This class may be instantiated and accessed DLL-side only.\n' result += '// This class may be instantiated and accessed DLL-side only.\n'
else: else:
result += '// This class may be instantiated and accessed wrapper-side only.\n' result += '// This class may be instantiated and accessed wrapper-side only.\n'
result += 'class '+clsname+'CToCpp\n'+ \ result += 'class '+clsname+'CToCpp\n'+ \
' : public ' + template_class + '<'+clsname+'CToCpp, '+clsname+', '+capiname+'> {\n'+ \ ' : public ' + template_class + '<'+clsname+'CToCpp, '+clsname+', '+capiname+'> {\n'+ \
' public:\n'+ \ ' public:\n'+ \
' '+clsname+'CToCpp();\n\n' ' '+clsname+'CToCpp();\n\n'
result += func_body result += func_body
result += '};\n\n' result += '};\n\n'
result += '#endif // CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_' result += '#endif // CEF_LIBCEF_DLL_CTOCPP_' + defname + '_CTOCPP_H_'
return result return result
def write_ctocpp_header(header, clsname, dir): def write_ctocpp_header(header, clsname, dir):
# give the output file the same directory offset as the input file # give the output file the same directory offset as the input file
cls = header.get_class(clsname) cls = header.get_class(clsname)
dir = os.path.dirname(os.path.join(dir, cls.get_file_name())) dir = os.path.dirname(os.path.join(dir, cls.get_file_name()))
file = os.path.join(dir, get_capi_name(clsname[3:], False)+'_ctocpp.h') file = os.path.join(dir, get_capi_name(clsname[3:], False) + '_ctocpp.h')
newcontents = make_ctocpp_header(header, clsname) newcontents = make_ctocpp_header(header, clsname)
return (file, newcontents) return (file, newcontents)
# test the module # test the module
if __name__ == "__main__": if __name__ == "__main__":
import sys import sys
# verify that the correct number of command-line arguments are provided # verify that the correct number of command-line arguments are provided
if len(sys.argv) < 3: if len(sys.argv) < 3:
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname>') sys.stderr.write('Usage: ' + sys.argv[0] + ' <infile> <classname>')
sys.exit() sys.exit()
# create the header object # create the header object
header = obj_header() header = obj_header()
header.add_file(sys.argv[1]) header.add_file(sys.argv[1])
# dump the result to stdout # dump the result to stdout
sys.stdout.write(make_ctocpp_header(header, sys.argv[2])) sys.stdout.write(make_ctocpp_header(header, sys.argv[2]))

File diff suppressed because it is too large Load Diff

View File

@ -15,10 +15,12 @@ import sys
import tarfile import tarfile
import zipfile import zipfile
def create_zip_archive(input_dir): def create_zip_archive(input_dir):
""" Creates a zip archive of the specified input directory. """ """ Creates a zip archive of the specified input directory. """
zip_file = input_dir + '.zip' zip_file = input_dir + '.zip'
zf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED, True) zf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED, True)
def addDir(dir): def addDir(dir):
for f in os.listdir(dir): for f in os.listdir(dir):
full_path = os.path.join(dir, f) full_path = os.path.join(dir, f)
@ -27,9 +29,11 @@ def create_zip_archive(input_dir):
else: else:
zf.write(full_path, os.path.relpath(full_path, \ zf.write(full_path, os.path.relpath(full_path, \
os.path.join(input_dir, os.pardir))) os.path.join(input_dir, os.pardir)))
addDir(input_dir) addDir(input_dir)
zf.close() zf.close()
def create_tar_archive(input_dir, format): def create_tar_archive(input_dir, format):
""" Creates a tar archive of the specified input directory. """ """ Creates a tar archive of the specified input directory. """
# Supported formats include "gz" and "bz2". # Supported formats include "gz" and "bz2".
@ -38,6 +42,7 @@ def create_tar_archive(input_dir, format):
tf.add(input_dir, arcname=os.path.basename(input_dir)) tf.add(input_dir, arcname=os.path.basename(input_dir))
tf.close() tf.close()
def create_7z_archive(input_dir, format): def create_7z_archive(input_dir, format):
""" Creates a 7z archive of the specified input directory. """ """ Creates a 7z archive of the specified input directory. """
# CEF_COMMAND_7ZIP might be "c:\Program Files (x86)\7Zip\7z.exe" or /usr/bin/7za # CEF_COMMAND_7ZIP might be "c:\Program Files (x86)\7Zip\7z.exe" or /usr/bin/7za
@ -58,11 +63,13 @@ def create_7z_archive(input_dir, format):
zip_input = input_dir zip_input = input_dir
# Create the compressed archive. # Create the compressed archive.
run('"%s" a -t%s -y %s %s' % (command, format, zip_file, zip_input), working_dir) run('"%s" a -t%s -y %s %s' % (command, format, zip_file, zip_input),
working_dir)
if not tar_file is None: if not tar_file is None:
remove_file(tar_file) remove_file(tar_file)
def create_output_dir(name, parent_dir): def create_output_dir(name, parent_dir):
""" Creates an output directory and adds the path to the archive list. """ """ Creates an output directory and adds the path to the archive list. """
output_dir = os.path.abspath(os.path.join(parent_dir, name)) output_dir = os.path.abspath(os.path.join(parent_dir, name))
@ -71,6 +78,7 @@ def create_output_dir(name, parent_dir):
archive_dirs.append(output_dir) archive_dirs.append(output_dir)
return output_dir return output_dir
def get_readme_component(name): def get_readme_component(name):
""" Loads a README file component. """ """ Loads a README file component. """
paths = [] paths = []
@ -88,12 +96,13 @@ def get_readme_component(name):
# load the file if it exists # load the file if it exists
for path in paths: for path in paths:
file = os.path.join(path, 'README.' +name + '.txt') file = os.path.join(path, 'README.' + name + '.txt')
if path_exists(file): if path_exists(file):
return read_file(file) return read_file(file)
raise Exception('Readme component not found: ' + name) raise Exception('Readme component not found: ' + name)
def create_readme(): def create_readme():
""" Creates the README.TXT file. """ """ Creates the README.TXT file. """
# gather the components # gather the components
@ -148,6 +157,7 @@ def create_readme():
if not options.quiet: if not options.quiet:
sys.stdout.write('Creating README.TXT file.\n') sys.stdout.write('Creating README.TXT file.\n')
def create_fuzed_gtest(tests_dir): def create_fuzed_gtest(tests_dir):
""" Generate a fuzed version of gtest and build the expected directory structure. """ """ Generate a fuzed version of gtest and build the expected directory structure. """
src_gtest_dir = os.path.join(src_dir, 'testing', 'gtest') src_gtest_dir = os.path.join(src_dir, 'testing', 'gtest')
@ -177,11 +187,14 @@ def create_fuzed_gtest(tests_dir):
move_file(gtest_cpp, target_gtest_cpp_dir, options.quiet) move_file(gtest_cpp, target_gtest_cpp_dir, options.quiet)
# gtest LICENSE file at tests/gtest/LICENSE # gtest LICENSE file at tests/gtest/LICENSE
copy_file(os.path.join(src_gtest_dir, 'LICENSE'), target_gtest_dir, options.quiet) copy_file(
os.path.join(src_gtest_dir, 'LICENSE'), target_gtest_dir, options.quiet)
# CEF README file at tests/gtest/README.cef # CEF README file at tests/gtest/README.cef
copy_file(os.path.join(cef_dir, 'tests', 'gtest', 'README.cef.in'), copy_file(
os.path.join(target_gtest_dir, 'README.cef'), options.quiet) os.path.join(cef_dir, 'tests', 'gtest', 'README.cef.in'),
os.path.join(target_gtest_dir, 'README.cef'), options.quiet)
def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet): def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet):
""" Transfer files from one location to another. """ """ Transfer files from one location to another. """
@ -192,7 +205,8 @@ def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet):
make_dir(dst_path, quiet) make_dir(dst_path, quiet)
copy_file(src, dst, quiet) copy_file(src, dst, quiet)
def normalize_headers(file, new_path = ''):
def normalize_headers(file, new_path=''):
""" Normalize headers post-processing. Remove the path component from any """ Normalize headers post-processing. Remove the path component from any
project include directives. """ project include directives. """
data = read_file(file) data = read_file(file)
@ -200,6 +214,7 @@ def normalize_headers(file, new_path = ''):
"// Include path modified for CEF Binary Distribution.\n#include \""+new_path+"\\1\"", data) "// Include path modified for CEF Binary Distribution.\n#include \""+new_path+"\\1\"", data)
write_file(file, data) write_file(file, data)
def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet): def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
""" Transfer files based on the specified configuration. """ """ Transfer files based on the specified configuration. """
if not path_exists(transfer_cfg): if not path_exists(transfer_cfg):
@ -219,8 +234,9 @@ def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
# place a readme file in the destination directory # place a readme file in the destination directory
readme = os.path.join(dst_path, 'README-TRANSFER.txt') readme = os.path.join(dst_path, 'README-TRANSFER.txt')
if not path_exists(readme): if not path_exists(readme):
copy_file(os.path.join(script_dir, 'distrib/README-TRANSFER.txt'), readme) copy_file(
open(readme, 'ab').write(cfg['source']+"\n") os.path.join(script_dir, 'distrib/README-TRANSFER.txt'), readme)
open(readme, 'ab').write(cfg['source'] + "\n")
# perform any required post-processing # perform any required post-processing
if 'post-process' in cfg: if 'post-process' in cfg:
@ -231,7 +247,9 @@ def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
new_path = cfg['new_header_path'] new_path = cfg['new_header_path']
normalize_headers(dst, new_path) normalize_headers(dst, new_path)
def transfer_files(cef_dir, script_dir, transfer_cfg_dir, mode, output_dir, quiet):
def transfer_files(cef_dir, script_dir, transfer_cfg_dir, mode, output_dir,
quiet):
# Non-mode-specific transfers. # Non-mode-specific transfers.
transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer.cfg') transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer.cfg')
eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet) eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet)
@ -239,9 +257,11 @@ def transfer_files(cef_dir, script_dir, transfer_cfg_dir, mode, output_dir, quie
transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer_%s.cfg' % mode) transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer_%s.cfg' % mode)
eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet) eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet)
def combine_libs(build_dir, libs, dest_lib): def combine_libs(build_dir, libs, dest_lib):
""" Combine multiple static libraries into a single static library. """ """ Combine multiple static libraries into a single static library. """
cmdline = 'msvs_env.bat win%s python combine_libs.py -o "%s"' % (platform_arch, dest_lib) cmdline = 'msvs_env.bat win%s python combine_libs.py -o "%s"' % (
platform_arch, dest_lib)
for lib in libs: for lib in libs:
lib_path = os.path.join(build_dir, lib) lib_path = os.path.join(build_dir, lib)
for path in get_files(lib_path): # Expand wildcards in |lib_path|. for path in get_files(lib_path): # Expand wildcards in |lib_path|.
@ -250,13 +270,15 @@ def combine_libs(build_dir, libs, dest_lib):
cmdline = cmdline + ' "%s"' % path cmdline = cmdline + ' "%s"' % path
run(cmdline, os.path.join(cef_dir, 'tools')) run(cmdline, os.path.join(cef_dir, 'tools'))
def run(command_line, working_dir): def run(command_line, working_dir):
""" Run a command. """ """ Run a command. """
sys.stdout.write('-------- Running "'+command_line+'" in "'+\ sys.stdout.write('-------- Running "'+command_line+'" in "'+\
working_dir+'"...'+"\n") working_dir+'"...'+"\n")
args = shlex.split(command_line.replace('\\', '\\\\')) args = shlex.split(command_line.replace('\\', '\\\\'))
return subprocess.check_call(args, cwd=working_dir, env=os.environ, return subprocess.check_call(
shell=(sys.platform == 'win32')) args, cwd=working_dir, env=os.environ, shell=(sys.platform == 'win32'))
# cannot be loaded as a module # cannot be loaded as a module
if __name__ != "__main__": if __name__ != "__main__":
@ -269,45 +291,81 @@ This utility builds the CEF Binary Distribution.
""" """
parser = OptionParser(description=disc) parser = OptionParser(description=disc)
parser.add_option('--output-dir', dest='outputdir', metavar='DIR', parser.add_option(
help='output directory [required]') '--output-dir',
parser.add_option('--distrib-subdir', dest='distribsubdir', dest='outputdir',
help='name of the subdirectory for the distribution', metavar='DIR',
default='') help='output directory [required]')
parser.add_option('--allow-partial', parser.add_option(
action='store_true', dest='allowpartial', default=False, '--distrib-subdir',
help='allow creation of partial distributions') dest='distribsubdir',
parser.add_option('--no-symbols', help='name of the subdirectory for the distribution',
action='store_true', dest='nosymbols', default=False, default='')
help='don\'t create symbol files') parser.add_option(
parser.add_option('--no-docs', '--allow-partial',
action='store_true', dest='nodocs', default=False, action='store_true',
help='don\'t create documentation') dest='allowpartial',
parser.add_option('--no-archive', default=False,
action='store_true', dest='noarchive', default=False, help='allow creation of partial distributions')
help='don\'t create archives for output directories') parser.add_option(
parser.add_option('--ninja-build', '--no-symbols',
action='store_true', dest='ninjabuild', default=False, action='store_true',
help='build was created using ninja') dest='nosymbols',
parser.add_option('--x64-build', default=False,
action='store_true', dest='x64build', default=False, help='don\'t create symbol files')
help='create a 64-bit binary distribution') parser.add_option(
parser.add_option('--arm-build', '--no-docs',
action='store_true', dest='armbuild', default=False, action='store_true',
help='create an ARM binary distribution') dest='nodocs',
parser.add_option('--minimal', default=False,
action='store_true', dest='minimal', default=False, help='don\'t create documentation')
help='include only release build binary files') parser.add_option(
parser.add_option('--client', '--no-archive',
action='store_true', dest='client', default=False, action='store_true',
help='include only the sample application') dest='noarchive',
parser.add_option('-q', '--quiet', default=False,
action='store_true', dest='quiet', default=False, help='don\'t create archives for output directories')
help='do not output detailed status information') parser.add_option(
'--ninja-build',
action='store_true',
dest='ninjabuild',
default=False,
help='build was created using ninja')
parser.add_option(
'--x64-build',
action='store_true',
dest='x64build',
default=False,
help='create a 64-bit binary distribution')
parser.add_option(
'--arm-build',
action='store_true',
dest='armbuild',
default=False,
help='create an ARM binary distribution')
parser.add_option(
'--minimal',
action='store_true',
dest='minimal',
default=False,
help='include only release build binary files')
parser.add_option(
'--client',
action='store_true',
dest='client',
default=False,
help='include only the sample application')
parser.add_option(
'-q',
'--quiet',
action='store_true',
dest='quiet',
default=False,
help='do not output detailed status information')
(options, args) = parser.parse_args() (options, args) = parser.parse_args()
# Test the operating system. # Test the operating system.
platform = ''; platform = ''
if sys.platform == 'win32': if sys.platform == 'win32':
platform = 'windows' platform = 'windows'
elif sys.platform == 'darwin': elif sys.platform == 'darwin':
@ -369,8 +427,9 @@ args = {}
read_version_file(os.path.join(cef_dir, 'VERSION'), args) read_version_file(os.path.join(cef_dir, 'VERSION'), args)
read_version_file(os.path.join(cef_dir, '../chrome/VERSION'), args) read_version_file(os.path.join(cef_dir, '../chrome/VERSION'), args)
cef_ver = '%s.%s.%s.g%s' % (args['CEF_MAJOR'], args['BUILD'], cef_commit_number, cef_rev[:7]) cef_ver = '%s.%s.%s.g%s' % (args['CEF_MAJOR'], args['BUILD'], cef_commit_number,
chromium_ver = args['MAJOR']+'.'+args['MINOR']+'.'+args['BUILD']+'.'+args['PATCH'] cef_rev[:7])
chromium_ver = args['MAJOR'] + '.' + args['MINOR'] + '.' + args['BUILD'] + '.' + args['PATCH']
# list of output directories to be archived # list of output directories to be archived
archive_dirs = [] archive_dirs = []
@ -456,8 +515,8 @@ if mode == 'standard' or mode == 'minimal':
# Transfer generated include files. # Transfer generated include files.
generated_includes = [ generated_includes = [
'cef_pack_resources.h', 'cef_pack_resources.h',
'cef_pack_strings.h', 'cef_pack_strings.h',
] ]
for include in generated_includes: for include in generated_includes:
# Debug and Release build should be the same so grab whichever exists. # Debug and Release build should be the same so grab whichever exists.
@ -586,12 +645,12 @@ if platform == 'windows':
libcef_dll_file = 'libcef.dll.lib' libcef_dll_file = 'libcef.dll.lib'
sandbox_libs = [ sandbox_libs = [
'obj\\base\\allocator\\unified_allocator_shim\\*.obj', 'obj\\base\\allocator\\unified_allocator_shim\\*.obj',
'obj\\base\\base.lib', 'obj\\base\\base.lib',
'obj\\base\\base_static.lib', 'obj\\base\\base_static.lib',
'obj\\base\\third_party\\dynamic_annotations\\dynamic_annotations.lib', 'obj\\base\\third_party\\dynamic_annotations\\dynamic_annotations.lib',
'obj\\cef\\cef_sandbox.lib', 'obj\\cef\\cef_sandbox.lib',
'obj\\sandbox\\win\\sandbox.lib', 'obj\\sandbox\\win\\sandbox.lib',
] ]
valid_build_dir = None valid_build_dir = None
@ -599,47 +658,64 @@ if platform == 'windows':
if mode == 'standard': if mode == 'standard':
# transfer Debug files # transfer Debug files
build_dir = build_dir_debug build_dir = build_dir_debug
if not options.allowpartial or path_exists(os.path.join(build_dir, 'libcef.dll')): if not options.allowpartial or path_exists(
os.path.join(build_dir, 'libcef.dll')):
valid_build_dir = build_dir valid_build_dir = build_dir
dst_dir = os.path.join(output_dir, 'Debug') dst_dir = os.path.join(output_dir, 'Debug')
make_dir(dst_dir, options.quiet) make_dir(dst_dir, options.quiet)
copy_files(os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet) copy_files(
os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
for binary in binaries: for binary in binaries:
copy_file(os.path.join(build_dir, binary), os.path.join(dst_dir, os.path.basename(binary)), options.quiet) copy_file(
os.path.join(build_dir, binary),
os.path.join(dst_dir, os.path.basename(binary)), options.quiet)
copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \ copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \
options.quiet) options.quiet)
combine_libs(build_dir, sandbox_libs, os.path.join(dst_dir, 'cef_sandbox.lib')); combine_libs(build_dir, sandbox_libs,
os.path.join(dst_dir, 'cef_sandbox.lib'))
if not options.nosymbols: if not options.nosymbols:
# create the symbol output directory # create the symbol output directory
symbol_output_dir = create_output_dir(output_dir_name + '_debug_symbols', options.outputdir) symbol_output_dir = create_output_dir(
output_dir_name + '_debug_symbols', options.outputdir)
# transfer contents # transfer contents
copy_file(os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir, options.quiet) copy_file(
os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir,
options.quiet)
else: else:
sys.stderr.write("No Debug build files.\n") sys.stderr.write("No Debug build files.\n")
# transfer Release files # transfer Release files
build_dir = build_dir_release build_dir = build_dir_release
if not options.allowpartial or path_exists(os.path.join(build_dir, 'libcef.dll')): if not options.allowpartial or path_exists(
os.path.join(build_dir, 'libcef.dll')):
valid_build_dir = build_dir valid_build_dir = build_dir
dst_dir = os.path.join(output_dir, 'Release') dst_dir = os.path.join(output_dir, 'Release')
make_dir(dst_dir, options.quiet) make_dir(dst_dir, options.quiet)
copy_files(os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet) copy_files(
os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
for binary in binaries: for binary in binaries:
copy_file(os.path.join(build_dir, binary), os.path.join(dst_dir, os.path.basename(binary)), options.quiet) copy_file(
os.path.join(build_dir, binary),
os.path.join(dst_dir, os.path.basename(binary)), options.quiet)
if mode != 'client': if mode != 'client':
copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \ copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \
options.quiet) options.quiet)
combine_libs(build_dir, sandbox_libs, os.path.join(dst_dir, 'cef_sandbox.lib')); combine_libs(build_dir, sandbox_libs,
os.path.join(dst_dir, 'cef_sandbox.lib'))
else: else:
copy_file(os.path.join(build_dir, 'cefclient.exe'), dst_dir, options.quiet) copy_file(
os.path.join(build_dir, 'cefclient.exe'), dst_dir, options.quiet)
if not options.nosymbols: if not options.nosymbols:
# create the symbol output directory # create the symbol output directory
symbol_output_dir = create_output_dir(output_dir_name + '_release_symbols', options.outputdir) symbol_output_dir = create_output_dir(
output_dir_name + '_release_symbols', options.outputdir)
# transfer contents # transfer contents
copy_file(os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir, options.quiet) copy_file(
os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir,
options.quiet)
else: else:
sys.stderr.write("No Release build files.\n") sys.stderr.write("No Release build files.\n")
@ -652,12 +728,19 @@ if platform == 'windows':
dst_dir = os.path.join(output_dir, 'Resources') dst_dir = os.path.join(output_dir, 'Resources')
make_dir(dst_dir, options.quiet) make_dir(dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet) copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet) copy_file(
copy_file(os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet) os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'cef_extensions.pak'), dst_dir, options.quiet) copy_file(
copy_file(os.path.join(build_dir, 'devtools_resources.pak'), dst_dir, options.quiet) os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet)
copy_file(
os.path.join(build_dir, 'cef_extensions.pak'), dst_dir, options.quiet)
copy_file(
os.path.join(build_dir, 'devtools_resources.pak'), dst_dir,
options.quiet)
copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet) copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet)
copy_dir(os.path.join(build_dir, 'locales'), os.path.join(dst_dir, 'locales'), options.quiet) copy_dir(
os.path.join(build_dir, 'locales'),
os.path.join(dst_dir, 'locales'), options.quiet)
if mode == 'standard' or mode == 'minimal': if mode == 'standard' or mode == 'minimal':
# transfer include files # transfer include files
@ -689,12 +772,13 @@ if platform == 'windows':
if not options.nodocs: if not options.nodocs:
# generate doc files # generate doc files
os.popen('make_cppdocs.bat '+cef_rev) os.popen('make_cppdocs.bat ' + cef_rev)
src_dir = os.path.join(cef_dir, 'docs') src_dir = os.path.join(cef_dir, 'docs')
if path_exists(src_dir): if path_exists(src_dir):
# create the docs output directory # create the docs output directory
docs_output_dir = create_output_dir(output_dir_base + '_docs', options.outputdir) docs_output_dir = create_output_dir(output_dir_base + '_docs',
options.outputdir)
# transfer contents # transfer contents
copy_dir(src_dir, docs_output_dir, options.quiet) copy_dir(src_dir, docs_output_dir, options.quiet)
@ -705,46 +789,60 @@ elif platform == 'macosx':
if mode == 'standard': if mode == 'standard':
# transfer Debug files # transfer Debug files
build_dir = build_dir_debug build_dir = build_dir_debug
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient.app')): if not options.allowpartial or path_exists(
os.path.join(build_dir, 'cefclient.app')):
valid_build_dir = build_dir valid_build_dir = build_dir
dst_dir = os.path.join(output_dir, 'Debug') dst_dir = os.path.join(output_dir, 'Debug')
make_dir(dst_dir, options.quiet) make_dir(dst_dir, options.quiet)
copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \ copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \
os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet) os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet)
copy_file(os.path.join(script_dir, 'distrib/mac/widevinecdmadapter.plugin'), dst_dir, options.quiet) copy_file(
os.path.join(script_dir, 'distrib/mac/widevinecdmadapter.plugin'),
dst_dir, options.quiet)
if not options.nosymbols: if not options.nosymbols:
# create the symbol output directory # create the symbol output directory
symbol_output_dir = create_output_dir(output_dir_name + '_debug_symbols', options.outputdir) symbol_output_dir = create_output_dir(
output_dir_name + '_debug_symbols', options.outputdir)
# The real dSYM already exists, just copy it to the output directory. # The real dSYM already exists, just copy it to the output directory.
# dSYMs are only generated when is_official_build=true or enable_dsyms=true. # dSYMs are only generated when is_official_build=true or enable_dsyms=true.
# See //build/config/mac/symbols.gni. # See //build/config/mac/symbols.gni.
copy_dir(os.path.join(build_dir, '%s.dSYM' % framework_name), copy_dir(
os.path.join(symbol_output_dir, '%s.dSYM' % framework_name), options.quiet) os.path.join(build_dir, '%s.dSYM' % framework_name),
os.path.join(symbol_output_dir, '%s.dSYM' % framework_name),
options.quiet)
# transfer Release files # transfer Release files
build_dir = build_dir_release build_dir = build_dir_release
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient.app')): if not options.allowpartial or path_exists(
os.path.join(build_dir, 'cefclient.app')):
valid_build_dir = build_dir valid_build_dir = build_dir
dst_dir = os.path.join(output_dir, 'Release') dst_dir = os.path.join(output_dir, 'Release')
make_dir(dst_dir, options.quiet) make_dir(dst_dir, options.quiet)
if mode != 'client': if mode != 'client':
copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \ copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \
os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet) os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet)
copy_file(os.path.join(script_dir, 'distrib/mac/widevinecdmadapter.plugin'), dst_dir, options.quiet) copy_file(
os.path.join(script_dir, 'distrib/mac/widevinecdmadapter.plugin'),
dst_dir, options.quiet)
else: else:
copy_dir(os.path.join(build_dir, 'cefclient.app'), os.path.join(dst_dir, 'cefclient.app'), options.quiet) copy_dir(
os.path.join(build_dir, 'cefclient.app'),
os.path.join(dst_dir, 'cefclient.app'), options.quiet)
if not options.nosymbols: if not options.nosymbols:
# create the symbol output directory # create the symbol output directory
symbol_output_dir = create_output_dir(output_dir_name + '_release_symbols', options.outputdir) symbol_output_dir = create_output_dir(
output_dir_name + '_release_symbols', options.outputdir)
# The real dSYM already exists, just copy it to the output directory. # The real dSYM already exists, just copy it to the output directory.
# dSYMs are only generated when is_official_build=true or enable_dsyms=true. # dSYMs are only generated when is_official_build=true or enable_dsyms=true.
# See //build/config/mac/symbols.gni. # See //build/config/mac/symbols.gni.
copy_dir(os.path.join(build_dir, '%s.dSYM' % framework_name), copy_dir(
os.path.join(symbol_output_dir, '%s.dSYM' % framework_name), options.quiet) os.path.join(build_dir, '%s.dSYM' % framework_name),
os.path.join(symbol_output_dir, '%s.dSYM' % framework_name),
options.quiet)
if mode == 'standard' or mode == 'minimal': if mode == 'standard' or mode == 'minimal':
# transfer include files # transfer include files
@ -804,11 +902,17 @@ elif platform == 'linux':
valid_build_dir = build_dir valid_build_dir = build_dir
dst_dir = os.path.join(output_dir, 'Debug') dst_dir = os.path.join(output_dir, 'Debug')
make_dir(dst_dir, options.quiet) make_dir(dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'chrome_sandbox'), os.path.join(dst_dir, 'chrome-sandbox'), options.quiet) copy_file(
os.path.join(build_dir, 'chrome_sandbox'),
os.path.join(dst_dir, 'chrome-sandbox'), options.quiet)
copy_file(libcef_path, dst_dir, options.quiet) copy_file(libcef_path, dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'libwidevinecdmadapter.so'), dst_dir, options.quiet) copy_file(
copy_file(os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet) os.path.join(build_dir, 'libwidevinecdmadapter.so'), dst_dir,
copy_file(os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet) options.quiet)
copy_file(
os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet)
copy_file(
os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet)
else: else:
sys.stderr.write("No Debug build files.\n") sys.stderr.write("No Debug build files.\n")
@ -823,10 +927,16 @@ elif platform == 'linux':
if mode == 'client': if mode == 'client':
copy_file(os.path.join(build_dir, 'cefsimple'), dst_dir, options.quiet) copy_file(os.path.join(build_dir, 'cefsimple'), dst_dir, options.quiet)
copy_file(libcef_path, dst_dir, options.quiet) copy_file(libcef_path, dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'chrome_sandbox'), os.path.join(dst_dir, 'chrome-sandbox'), options.quiet) copy_file(
copy_file(os.path.join(build_dir, 'libwidevinecdmadapter.so'), dst_dir, options.quiet) os.path.join(build_dir, 'chrome_sandbox'),
copy_file(os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet) os.path.join(dst_dir, 'chrome-sandbox'), options.quiet)
copy_file(os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet) copy_file(
os.path.join(build_dir, 'libwidevinecdmadapter.so'), dst_dir,
options.quiet)
copy_file(
os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet)
copy_file(
os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet)
else: else:
sys.stderr.write("No Release build files.\n") sys.stderr.write("No Release build files.\n")
@ -839,12 +949,19 @@ elif platform == 'linux':
dst_dir = os.path.join(output_dir, 'Resources') dst_dir = os.path.join(output_dir, 'Resources')
make_dir(dst_dir, options.quiet) make_dir(dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet) copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet) copy_file(
copy_file(os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet) os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet)
copy_file(os.path.join(build_dir, 'cef_extensions.pak'), dst_dir, options.quiet) copy_file(
copy_file(os.path.join(build_dir, 'devtools_resources.pak'), dst_dir, options.quiet) os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet)
copy_file(
os.path.join(build_dir, 'cef_extensions.pak'), dst_dir, options.quiet)
copy_file(
os.path.join(build_dir, 'devtools_resources.pak'), dst_dir,
options.quiet)
copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet) copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet)
copy_dir(os.path.join(build_dir, 'locales'), os.path.join(dst_dir, 'locales'), options.quiet) copy_dir(
os.path.join(build_dir, 'locales'),
os.path.join(dst_dir, 'locales'), options.quiet)
if mode == 'standard' or mode == 'minimal': if mode == 'standard' or mode == 'minimal':
# transfer include files # transfer include files
@ -885,7 +1002,8 @@ if not options.noarchive:
for dir in archive_dirs: for dir in archive_dirs:
if not options.quiet: if not options.quiet:
sys.stdout.write("Creating %s archive for %s...\n" % (archive_format, os.path.basename(dir))) sys.stdout.write("Creating %s archive for %s...\n" %
(archive_format, os.path.basename(dir)))
if archive_format == 'zip': if archive_format == 'zip':
create_zip_archive(dir) create_zip_archive(dir)
elif archive_format == 'tar.gz': elif archive_format == 'tar.gz':

View File

@ -4,9 +4,10 @@
from cef_parser import * from cef_parser import *
def make_gypi_file(header): def make_gypi_file(header):
# header string # header string
result = \ result = \
"""# Copyright (c) $YEAR$ The Chromium Embedded Framework Authors. All rights """# Copyright (c) $YEAR$ The Chromium Embedded Framework Authors. All rights
# reserved. Use of this source code is governed by a BSD-style license that # reserved. Use of this source code is governed by a BSD-style license that
# can be found in the LICENSE file. # can be found in the LICENSE file.
@ -23,83 +24,84 @@ def make_gypi_file(header):
{ {
'variables': { 'variables': {
""" """
filenames = sorted(header.get_file_names()) filenames = sorted(header.get_file_names())
# cpp includes # cpp includes
result += " 'autogen_cpp_includes': [\n" result += " 'autogen_cpp_includes': [\n"
for filename in filenames: for filename in filenames:
result += " 'include/"+filename+"',\n" result += " 'include/" + filename + "',\n"
result += " ],\n" result += " ],\n"
# capi includes # capi includes
result += " 'autogen_capi_includes': [\n" result += " 'autogen_capi_includes': [\n"
for filename in filenames: for filename in filenames:
result += " 'include/capi/"+get_capi_file_name(filename)+"',\n" result += " 'include/capi/" + get_capi_file_name(filename) + "',\n"
result += " ],\n" result += " ],\n"
classes = sorted(header.get_class_names()) classes = sorted(header.get_class_names())
# library side includes # library side includes
result += " 'autogen_library_side': [\n" result += " 'autogen_library_side': [\n"
for clsname in classes: for clsname in classes:
cls = header.get_class(clsname) cls = header.get_class(clsname)
filename = get_capi_name(clsname[3:], False) filename = get_capi_name(clsname[3:], False)
dir = cls.get_file_directory() dir = cls.get_file_directory()
if not dir is None: if not dir is None:
filename = dir+'/'+filename filename = dir + '/' + filename
if cls.is_library_side(): if cls.is_library_side():
result += " 'libcef_dll/cpptoc/"+filename+"_cpptoc.cc',\n" \ result += " 'libcef_dll/cpptoc/"+filename+"_cpptoc.cc',\n" \
" 'libcef_dll/cpptoc/"+filename+"_cpptoc.h',\n" " 'libcef_dll/cpptoc/"+filename+"_cpptoc.h',\n"
else: else:
result += " 'libcef_dll/ctocpp/"+filename+"_ctocpp.cc',\n" \ result += " 'libcef_dll/ctocpp/"+filename+"_ctocpp.cc',\n" \
" 'libcef_dll/ctocpp/"+filename+"_ctocpp.h',\n" " 'libcef_dll/ctocpp/"+filename+"_ctocpp.h',\n"
result += " ],\n" result += " ],\n"
# client side includes # client side includes
result += " 'autogen_client_side': [\n" result += " 'autogen_client_side': [\n"
for clsname in classes: for clsname in classes:
cls = header.get_class(clsname) cls = header.get_class(clsname)
filename = get_capi_name(clsname[3:], False) filename = get_capi_name(clsname[3:], False)
dir = cls.get_file_directory() dir = cls.get_file_directory()
if not dir is None: if not dir is None:
filename = dir+'/'+filename filename = dir + '/' + filename
if cls.is_library_side(): if cls.is_library_side():
result += " 'libcef_dll/ctocpp/"+filename+"_ctocpp.cc',\n" \ result += " 'libcef_dll/ctocpp/"+filename+"_ctocpp.cc',\n" \
" 'libcef_dll/ctocpp/"+filename+"_ctocpp.h',\n" " 'libcef_dll/ctocpp/"+filename+"_ctocpp.h',\n"
else: else:
result += " 'libcef_dll/cpptoc/"+filename+"_cpptoc.cc',\n" \ result += " 'libcef_dll/cpptoc/"+filename+"_cpptoc.cc',\n" \
" 'libcef_dll/cpptoc/"+filename+"_cpptoc.h',\n" " 'libcef_dll/cpptoc/"+filename+"_cpptoc.h',\n"
result += " ],\n" result += " ],\n"
# footer string # footer string
result += \ result += \
""" }, """ },
} }
""" """
# add the copyright year
result = result.replace('$YEAR$', get_year())
return result # add the copyright year
result = result.replace('$YEAR$', get_year())
return result
def write_gypi_file(header, file): def write_gypi_file(header, file):
newcontents = make_gypi_file(header) newcontents = make_gypi_file(header)
return (file, newcontents) return (file, newcontents)
# test the module # test the module
if __name__ == "__main__": if __name__ == "__main__":
import sys import sys
# verify that the correct number of command-line arguments are provided # verify that the correct number of command-line arguments are provided
if len(sys.argv) < 2: if len(sys.argv) < 2:
sys.stderr.write('Usage: '+sys.argv[0]+' <infile>') sys.stderr.write('Usage: ' + sys.argv[0] + ' <infile>')
sys.exit() sys.exit()
# create the header object # create the header object
header = obj_header() header = obj_header()
header.add_file(sys.argv[1]) header.add_file(sys.argv[1])
# dump the result to stdout # dump the result to stdout
sys.stdout.write(make_gypi_file(header)) sys.stdout.write(make_gypi_file(header))

View File

@ -2,7 +2,6 @@
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights # Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
# reserved. Use of this source code is governed by a BSD-style license # reserved. Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file. # that can be found in the LICENSE file.
""" """
A simple utility function to merge pack resource files into a single resource file. A simple utility function to merge pack resource files into a single resource file.
""" """
@ -95,7 +94,7 @@ def MakeFile(output, input):
result = result.replace('$YEAR$', get_year()) result = result.replace('$YEAR$', get_year())
# add the guard string # add the guard string
filename = os.path.split(output)[1] filename = os.path.split(output)[1]
guard = 'CEF_INCLUDE_'+string.upper(filename.replace('.', '_'))+'_' guard = 'CEF_INCLUDE_' + string.upper(filename.replace('.', '_')) + '_'
result = result.replace('$GUARD$', guard) result = result.replace('$GUARD$', guard)
if path_exists(output): if path_exists(output):
@ -105,14 +104,15 @@ def MakeFile(output, input):
if (result != old_contents): if (result != old_contents):
write_file(output, result) write_file(output, result)
sys.stdout.write('File '+output+' updated.\n') sys.stdout.write('File ' + output + ' updated.\n')
else: else:
sys.stdout.write('File '+output+' is already up to date.\n') sys.stdout.write('File ' + output + ' is already up to date.\n')
def main(argv): def main(argv):
if len(argv) < 3: if len(argv) < 3:
print ("Usage:\n %s <output_filename> <input_file1> [input_file2] ... " % print("Usage:\n %s <output_filename> <input_file1> [input_file2] ... " %
argv[0]) argv[0])
sys.exit(-1) sys.exit(-1)
MakeFile(argv[1], argv[2:]) MakeFile(argv[1], argv[2:])

View File

@ -11,9 +11,8 @@ import sys
# cannot be loaded as a module # cannot be loaded as a module
if __name__ != "__main__": if __name__ != "__main__":
sys.stderr.write('This file cannot be loaded as a module!') sys.stderr.write('This file cannot be loaded as a module!')
sys.exit() sys.exit()
# parse command-line options # parse command-line options
disc = """ disc = """
@ -21,153 +20,174 @@ This utility creates the version header file.
""" """
parser = OptionParser(description=disc) parser = OptionParser(description=disc)
parser.add_option('--header', dest='header', metavar='FILE', parser.add_option(
help='output version header file [required]') '--header',
parser.add_option('--cef_version', dest='cef_version', metavar='FILE', dest='header',
help='input CEF version config file [required]') metavar='FILE',
parser.add_option('--chrome_version', dest='chrome_version', metavar='FILE', help='output version header file [required]')
help='input Chrome version config file [required]') parser.add_option(
parser.add_option('--cpp_header_dir', dest='cpp_header_dir', metavar='DIR', '--cef_version',
help='input directory for C++ header files [required]') dest='cef_version',
parser.add_option('-q', '--quiet', metavar='FILE',
action='store_true', dest='quiet', default=False, help='input CEF version config file [required]')
help='do not output detailed status information') parser.add_option(
'--chrome_version',
dest='chrome_version',
metavar='FILE',
help='input Chrome version config file [required]')
parser.add_option(
'--cpp_header_dir',
dest='cpp_header_dir',
metavar='DIR',
help='input directory for C++ header files [required]')
parser.add_option(
'-q',
'--quiet',
action='store_true',
dest='quiet',
default=False,
help='do not output detailed status information')
(options, args) = parser.parse_args() (options, args) = parser.parse_args()
# the header option is required # the header option is required
if options.header is None or options.cef_version is None or options.chrome_version is None or options.cpp_header_dir is None: if options.header is None or options.cef_version is None or options.chrome_version is None or options.cpp_header_dir is None:
parser.print_help(sys.stdout) parser.print_help(sys.stdout)
sys.exit() sys.exit()
def write_version_header(header, chrome_version, cef_version, cpp_header_dir): def write_version_header(header, chrome_version, cef_version, cpp_header_dir):
""" Creates the header file for the current revision and Chrome version information """ Creates the header file for the current revision and Chrome version information
if the information has changed or if the file doesn't already exist. """ if the information has changed or if the file doesn't already exist. """
if not path_exists(chrome_version): if not path_exists(chrome_version):
raise Exception('Chrome version file '+chrome_version+' does not exist.') raise Exception('Chrome version file ' + chrome_version +
if not path_exists(cef_version): ' does not exist.')
raise Exception('CEF version file '+cef_version+' does not exist.') if not path_exists(cef_version):
raise Exception('CEF version file ' + cef_version + ' does not exist.')
args = {} args = {}
read_version_file(chrome_version, args) read_version_file(chrome_version, args)
read_version_file(cef_version, args) read_version_file(cef_version, args)
if path_exists(header): if path_exists(header):
oldcontents = read_file(header) oldcontents = read_file(header)
else: else:
oldcontents = '' oldcontents = ''
year = get_year() year = get_year()
if not git.is_checkout('.'): if not git.is_checkout('.'):
raise Exception('Not a valid checkout') raise Exception('Not a valid checkout')
commit_number = git.get_commit_number() commit_number = git.get_commit_number()
commit_hash = git.get_hash() commit_hash = git.get_hash()
version = '%s.%s.%s.g%s' % (args['CEF_MAJOR'], args['BUILD'], commit_number, commit_hash[:7]) version = '%s.%s.%s.g%s' % (args['CEF_MAJOR'], args['BUILD'], commit_number,
commit_hash[:7])
# calculate api hashes # calculate api hashes
api_hash_calculator = cef_api_hash(cpp_header_dir, verbose = False) api_hash_calculator = cef_api_hash(cpp_header_dir, verbose=False)
api_hashes = api_hash_calculator.calculate() api_hashes = api_hash_calculator.calculate()
newcontents = '// Copyright (c) '+year+' Marshall A. Greenblatt. All rights reserved.\n'+\ newcontents = '// Copyright (c) '+year+' Marshall A. Greenblatt. All rights reserved.\n'+\
'//\n'+\ '//\n'+\
'// Redistribution and use in source and binary forms, with or without\n'+\ '// Redistribution and use in source and binary forms, with or without\n'+\
'// modification, are permitted provided that the following conditions are\n'+\ '// modification, are permitted provided that the following conditions are\n'+\
'// met:\n'+\ '// met:\n'+\
'//\n'+\ '//\n'+\
'// * Redistributions of source code must retain the above copyright\n'+\ '// * Redistributions of source code must retain the above copyright\n'+\
'// notice, this list of conditions and the following disclaimer.\n'+\ '// notice, this list of conditions and the following disclaimer.\n'+\
'// * Redistributions in binary form must reproduce the above\n'+\ '// * Redistributions in binary form must reproduce the above\n'+\
'// copyright notice, this list of conditions and the following disclaimer\n'+\ '// copyright notice, this list of conditions and the following disclaimer\n'+\
'// in the documentation and/or other materials provided with the\n'+\ '// in the documentation and/or other materials provided with the\n'+\
'// distribution.\n'+\ '// distribution.\n'+\
'// * Neither the name of Google Inc. nor the name Chromium Embedded\n'+\ '// * Neither the name of Google Inc. nor the name Chromium Embedded\n'+\
'// Framework nor the names of its contributors may be used to endorse\n'+\ '// Framework nor the names of its contributors may be used to endorse\n'+\
'// or promote products derived from this software without specific prior\n'+\ '// or promote products derived from this software without specific prior\n'+\
'// written permission.\n'+\ '// written permission.\n'+\
'//\n'+\ '//\n'+\
'// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n'+\ '// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n'+\
'// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n'+\ '// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n'+\
'// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n'+\ '// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n'+\
'// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n'+\ '// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n'+\
'// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n'+\ '// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n'+\
'// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n'+\ '// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n'+\
'// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n'+\ '// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n'+\
'// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n'+\ '// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n'+\
'// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n'+\ '// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n'+\
'// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n'+\ '// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n'+\
'// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n'+\ '// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n'+\
'//\n'+\ '//\n'+\
'// ---------------------------------------------------------------------------\n'+\ '// ---------------------------------------------------------------------------\n'+\
'//\n'+\ '//\n'+\
'// This file is generated by the make_version_header.py tool.\n'+\ '// This file is generated by the make_version_header.py tool.\n'+\
'//\n\n'+\ '//\n\n'+\
'#ifndef CEF_INCLUDE_CEF_VERSION_H_\n'+\ '#ifndef CEF_INCLUDE_CEF_VERSION_H_\n'+\
'#define CEF_INCLUDE_CEF_VERSION_H_\n\n'+\ '#define CEF_INCLUDE_CEF_VERSION_H_\n\n'+\
'#define CEF_VERSION "' + version + '"\n'+\ '#define CEF_VERSION "' + version + '"\n'+\
'#define CEF_VERSION_MAJOR ' + args['CEF_MAJOR'] + '\n'+\ '#define CEF_VERSION_MAJOR ' + args['CEF_MAJOR'] + '\n'+\
'#define CEF_COMMIT_NUMBER ' + commit_number + '\n'+\ '#define CEF_COMMIT_NUMBER ' + commit_number + '\n'+\
'#define CEF_COMMIT_HASH "' + commit_hash + '"\n'+\ '#define CEF_COMMIT_HASH "' + commit_hash + '"\n'+\
'#define COPYRIGHT_YEAR ' + year + '\n\n'+\ '#define COPYRIGHT_YEAR ' + year + '\n\n'+\
'#define CHROME_VERSION_MAJOR ' + args['MAJOR'] + '\n'+\ '#define CHROME_VERSION_MAJOR ' + args['MAJOR'] + '\n'+\
'#define CHROME_VERSION_MINOR ' + args['MINOR'] + '\n'+\ '#define CHROME_VERSION_MINOR ' + args['MINOR'] + '\n'+\
'#define CHROME_VERSION_BUILD ' + args['BUILD'] + '\n'+\ '#define CHROME_VERSION_BUILD ' + args['BUILD'] + '\n'+\
'#define CHROME_VERSION_PATCH ' + args['PATCH'] + '\n\n'+\ '#define CHROME_VERSION_PATCH ' + args['PATCH'] + '\n\n'+\
'#define DO_MAKE_STRING(p) #p\n'+\ '#define DO_MAKE_STRING(p) #p\n'+\
'#define MAKE_STRING(p) DO_MAKE_STRING(p)\n\n'+\ '#define MAKE_STRING(p) DO_MAKE_STRING(p)\n\n'+\
'#ifndef APSTUDIO_HIDDEN_SYMBOLS\n\n'\ '#ifndef APSTUDIO_HIDDEN_SYMBOLS\n\n'\
'#include "include/internal/cef_export.h"\n\n'+\ '#include "include/internal/cef_export.h"\n\n'+\
'#ifdef __cplusplus\n'+\ '#ifdef __cplusplus\n'+\
'extern "C" {\n'+\ 'extern "C" {\n'+\
'#endif\n\n'+\ '#endif\n\n'+\
'// The API hash is created by analyzing CEF header files for C API type\n'+\ '// The API hash is created by analyzing CEF header files for C API type\n'+\
'// definitions. The hash value will change when header files are modified\n'+\ '// definitions. The hash value will change when header files are modified\n'+\
'// in a way that may cause binary incompatibility with other builds. The\n'+\ '// in a way that may cause binary incompatibility with other builds. The\n'+\
'// universal hash value will change if any platform is affected whereas the\n'+\ '// universal hash value will change if any platform is affected whereas the\n'+\
'// platform hash values will change only if that particular platform is\n'+\ '// platform hash values will change only if that particular platform is\n'+\
'// affected.\n'+\ '// affected.\n'+\
'#define CEF_API_HASH_UNIVERSAL "' + api_hashes['universal'] + '"\n'+\ '#define CEF_API_HASH_UNIVERSAL "' + api_hashes['universal'] + '"\n'+\
'#if defined(OS_WIN)\n'+\ '#if defined(OS_WIN)\n'+\
'#define CEF_API_HASH_PLATFORM "' + api_hashes['windows'] + '"\n'+\ '#define CEF_API_HASH_PLATFORM "' + api_hashes['windows'] + '"\n'+\
'#elif defined(OS_MACOSX)\n'+\ '#elif defined(OS_MACOSX)\n'+\
'#define CEF_API_HASH_PLATFORM "' + api_hashes['macosx'] + '"\n'+\ '#define CEF_API_HASH_PLATFORM "' + api_hashes['macosx'] + '"\n'+\
'#elif defined(OS_LINUX)\n'+\ '#elif defined(OS_LINUX)\n'+\
'#define CEF_API_HASH_PLATFORM "' + api_hashes['linux'] + '"\n'+\ '#define CEF_API_HASH_PLATFORM "' + api_hashes['linux'] + '"\n'+\
'#endif\n\n'+\ '#endif\n\n'+\
'// Returns CEF version information for the libcef library. The |entry|\n'+\ '// Returns CEF version information for the libcef library. The |entry|\n'+\
'// parameter describes which version component will be returned:\n'+\ '// parameter describes which version component will be returned:\n'+\
'// 0 - CEF_VERSION_MAJOR\n'+\ '// 0 - CEF_VERSION_MAJOR\n'+\
'// 1 - CEF_COMMIT_NUMBER\n'+\ '// 1 - CEF_COMMIT_NUMBER\n'+\
'// 2 - CHROME_VERSION_MAJOR\n'+\ '// 2 - CHROME_VERSION_MAJOR\n'+\
'// 3 - CHROME_VERSION_MINOR\n'+\ '// 3 - CHROME_VERSION_MINOR\n'+\
'// 4 - CHROME_VERSION_BUILD\n'+\ '// 4 - CHROME_VERSION_BUILD\n'+\
'// 5 - CHROME_VERSION_PATCH\n'+\ '// 5 - CHROME_VERSION_PATCH\n'+\
'///\n'+\ '///\n'+\
'CEF_EXPORT int cef_version_info(int entry);\n\n'+\ 'CEF_EXPORT int cef_version_info(int entry);\n\n'+\
'///\n'+\ '///\n'+\
'// Returns CEF API hashes for the libcef library. The returned string is owned\n'+\ '// Returns CEF API hashes for the libcef library. The returned string is owned\n'+\
'// by the library and should not be freed. The |entry| parameter describes which\n'+\ '// by the library and should not be freed. The |entry| parameter describes which\n'+\
'// hash value will be returned:\n'+\ '// hash value will be returned:\n'+\
'// 0 - CEF_API_HASH_PLATFORM\n'+\ '// 0 - CEF_API_HASH_PLATFORM\n'+\
'// 1 - CEF_API_HASH_UNIVERSAL\n'+\ '// 1 - CEF_API_HASH_UNIVERSAL\n'+\
'// 2 - CEF_COMMIT_HASH\n'+\ '// 2 - CEF_COMMIT_HASH\n'+\
'///\n'+\ '///\n'+\
'CEF_EXPORT const char* cef_api_hash(int entry);\n\n'+\ 'CEF_EXPORT const char* cef_api_hash(int entry);\n\n'+\
'#ifdef __cplusplus\n'+\ '#ifdef __cplusplus\n'+\
'}\n'+\ '}\n'+\
'#endif\n\n'+\ '#endif\n\n'+\
'#endif // APSTUDIO_HIDDEN_SYMBOLS\n\n'+\ '#endif // APSTUDIO_HIDDEN_SYMBOLS\n\n'+\
'#endif // CEF_INCLUDE_CEF_VERSION_H_\n' '#endif // CEF_INCLUDE_CEF_VERSION_H_\n'
if newcontents != oldcontents: if newcontents != oldcontents:
write_file(header, newcontents) write_file(header, newcontents)
return True return True
return False return False
written = write_version_header(options.header, options.chrome_version, options.cef_version, options.cpp_header_dir)
written = write_version_header(options.header, options.chrome_version,
options.cef_version, options.cpp_header_dir)
if not options.quiet: if not options.quiet:
if written: if written:
sys.stdout.write('File '+options.header+' updated.\n') sys.stdout.write('File ' + options.header + ' updated.\n')
else: else:
sys.stdout.write('File '+options.header+' is already up to date.\n') sys.stdout.write('File ' + options.header + ' is already up to date.\n')

View File

@ -5,86 +5,91 @@
from cef_parser import * from cef_parser import *
from make_ctocpp_impl import * from make_ctocpp_impl import *
def make_views_function_stub_impl(clsname, func): def make_views_function_stub_impl(clsname, func):
name = func.get_name() name = func.get_name()
# Build the C++ prototype. # Build the C++ prototype.
parts = func.get_cpp_parts(True) parts = func.get_cpp_parts(True)
result = make_ctocpp_impl_proto(clsname, name, func, parts)+' {' result = make_ctocpp_impl_proto(clsname, name, func, parts) + ' {'
# Retrieve the function return value. # Retrieve the function return value.
retval = func.get_retval() retval = func.get_retval()
retval_type = retval.get_retval_type() retval_type = retval.get_retval_type()
if retval_type == 'invalid': if retval_type == 'invalid':
notify(name+' could not be autogenerated') notify(name + ' could not be autogenerated')
# Code could not be auto-generated. # Code could not be auto-generated.
result += '\n // COULD NOT IMPLEMENT DUE TO: (return value)' result += '\n // COULD NOT IMPLEMENT DUE TO: (return value)'
result += '\n #pragma message("Warning: "__FILE__": '+name+' is not implemented")' result += '\n #pragma message("Warning: "__FILE__": ' + name + ' is not implemented")'
retval_default = '' retval_default = ''
else: else:
retval_default = retval.get_retval_default(False) retval_default = retval.get_retval_default(False)
result += '\n NOTIMPLEMENTED();' result += '\n NOTIMPLEMENTED();'
if retval_default != '': if retval_default != '':
result += '\n return ' + retval_default + ';' result += '\n return ' + retval_default + ';'
result += '\n}\n\n' result += '\n}\n\n'
return result
return result
def make_views_class_stub_impl(header, cls): def make_views_class_stub_impl(header, cls):
impl = '' impl = ''
clsname = cls.get_name() clsname = cls.get_name()
funcs = cls.get_static_funcs() funcs = cls.get_static_funcs()
for func in funcs: for func in funcs:
impl += make_views_function_stub_impl(clsname, func) impl += make_views_function_stub_impl(clsname, func)
return impl
return impl
def make_views_stub_impl(header): def make_views_stub_impl(header):
includes = '' includes = ''
impl = '' impl = ''
allclasses = header.get_classes() allclasses = header.get_classes()
for cls in allclasses: for cls in allclasses:
dir = cls.get_file_directory() dir = cls.get_file_directory()
# Only process files in the views/ directory. # Only process files in the views/ directory.
if dir != None and dir.find('views') == 0: if dir != None and dir.find('views') == 0:
cls_impl = make_views_class_stub_impl(header, cls) cls_impl = make_views_class_stub_impl(header, cls)
if cls_impl != '': if cls_impl != '':
impl += cls_impl impl += cls_impl
includes += '#include "include/'+cls.get_file_name()+'"\n' includes += '#include "include/' + cls.get_file_name() + '"\n'
includes += '\n#include "base/logging.h"\n' includes += '\n#include "base/logging.h"\n'
# Build the final output.
result = get_copyright() + includes
result += '\n\n// STATIC STUB METHODS - Do not edit by hand.\n\n'
result += impl
return result
# Build the final output.
result = get_copyright() + includes
result += '\n\n// STATIC STUB METHODS - Do not edit by hand.\n\n'
result += impl
return result
def write_views_stub_impl(header, file): def write_views_stub_impl(header, file):
newcontents = make_views_stub_impl(header) newcontents = make_views_stub_impl(header)
return (file, newcontents) return (file, newcontents)
# Test the module. # Test the module.
if __name__ == "__main__": if __name__ == "__main__":
import sys import sys
# Verify that the correct number of command-line arguments are provided. # Verify that the correct number of command-line arguments are provided.
if len(sys.argv) < 2: if len(sys.argv) < 2:
sys.stderr.write('Usage: '+sys.argv[0]+' <cpp_header_dir>') sys.stderr.write('Usage: ' + sys.argv[0] + ' <cpp_header_dir>')
sys.exit() sys.exit()
cpp_header_dir = sys.argv[1] cpp_header_dir = sys.argv[1]
# Create the header object. Should match the logic in translator.py. # Create the header object. Should match the logic in translator.py.
header = obj_header() header = obj_header()
header.set_root_directory(cpp_header_dir) header.set_root_directory(cpp_header_dir)
excluded_files = ['cef_application_mac.h', 'cef_version.h'] excluded_files = ['cef_application_mac.h', 'cef_version.h']
header.add_directory(cpp_header_dir, excluded_files) header.add_directory(cpp_header_dir, excluded_files)
header.add_directory(os.path.join(cpp_header_dir, 'views')) header.add_directory(os.path.join(cpp_header_dir, 'views'))
# Dump the result to stdout. # Dump the result to stdout.
sys.stdout.write(make_views_stub_impl(header)) sys.stdout.write(make_views_stub_impl(header))

View File

@ -4,44 +4,45 @@
from cef_parser import * from cef_parser import *
def make_wrapper_types_header(header): def make_wrapper_types_header(header):
result = get_copyright() result = get_copyright()
result += '#ifndef CEF_LIBCEF_DLL_WRAPPER_TYPES_H_\n'+ \ result += '#ifndef CEF_LIBCEF_DLL_WRAPPER_TYPES_H_\n'+ \
'#define CEF_LIBCEF_DLL_WRAPPER_TYPES_H_\n' + \ '#define CEF_LIBCEF_DLL_WRAPPER_TYPES_H_\n' + \
'#pragma once\n\n' + \ '#pragma once\n\n' + \
'enum CefWrapperType {\n' + \ 'enum CefWrapperType {\n' + \
' WT_BASE_REF_COUNTED = 1,\n' + \ ' WT_BASE_REF_COUNTED = 1,\n' + \
' WT_BASE_SCOPED,\n' ' WT_BASE_SCOPED,\n'
clsnames = sorted(header.get_class_names()) clsnames = sorted(header.get_class_names())
for clsname in clsnames: for clsname in clsnames:
result += ' '+get_wrapper_type_enum(clsname)+',\n' result += ' ' + get_wrapper_type_enum(clsname) + ',\n'
result += '};\n\n' + \ result += '};\n\n' + \
'#endif // CEF_LIBCEF_DLL_WRAPPER_TYPES_H_' '#endif // CEF_LIBCEF_DLL_WRAPPER_TYPES_H_'
return result return result
def write_wrapper_types_header(header, file): def write_wrapper_types_header(header, file):
newcontents = make_wrapper_types_header(header) newcontents = make_wrapper_types_header(header)
return (file, newcontents) return (file, newcontents)
# test the module # test the module
if __name__ == "__main__": if __name__ == "__main__":
import sys import sys
# verify that the correct number of command-line arguments are provided # verify that the correct number of command-line arguments are provided
if len(sys.argv) < 2: if len(sys.argv) < 2:
sys.stderr.write('Usage: '+sys.argv[0]+' <include_dir>') sys.stderr.write('Usage: ' + sys.argv[0] + ' <include_dir>')
sys.exit() sys.exit()
# create the header object # create the header object
header = obj_header() header = obj_header()
excluded_files = ['cef_application_mac.h', 'cef_version.h'] excluded_files = ['cef_application_mac.h', 'cef_version.h']
header.add_directory(sys.argv[1], excluded_files) header.add_directory(sys.argv[1], excluded_files)
# dump the result to stdout # dump the result to stdout
sys.stdout.write(make_wrapper_types_header(header)) sys.stdout.write(make_wrapper_types_header(header))

View File

@ -9,16 +9,19 @@ import sys
from exec_util import exec_cmd from exec_util import exec_cmd
import git_util as git import git_util as git
def msg(message): def msg(message):
""" Output a message. """ """ Output a message. """
sys.stdout.write('--> ' + message + "\n") sys.stdout.write('--> ' + message + "\n")
def warn(message): def warn(message):
""" Output a warning. """ """ Output a warning. """
sys.stdout.write('-' * 80 + "\n") sys.stdout.write('-' * 80 + "\n")
sys.stdout.write('!!!! WARNING: ' + message + "\n") sys.stdout.write('!!!! WARNING: ' + message + "\n")
sys.stdout.write('-' * 80 + "\n") sys.stdout.write('-' * 80 + "\n")
def extract_paths(file): def extract_paths(file):
""" Extract the list of modified paths from the patch file. """ """ Extract the list of modified paths from the patch file. """
paths = [] paths = []
@ -32,6 +35,7 @@ def extract_paths(file):
paths.append(match.group(1).strip()) paths.append(match.group(1).strip())
return paths return paths
# Cannot be loaded as a module. # Cannot be loaded as a module.
if __name__ != "__main__": if __name__ != "__main__":
sys.stderr.write('This file cannot be loaded as a module!') sys.stderr.write('This file cannot be loaded as a module!')
@ -42,6 +46,7 @@ disc = """
This utility updates existing patch files. This utility updates existing patch files.
""" """
# Support options with multiple arguments. # Support options with multiple arguments.
class MultipleOption(Option): class MultipleOption(Option):
ACTIONS = Option.ACTIONS + ("extend",) ACTIONS = Option.ACTIONS + ("extend",)
@ -55,17 +60,27 @@ class MultipleOption(Option):
else: else:
Option.take_action(self, action, dest, opt, value, values, parser) Option.take_action(self, action, dest, opt, value, values, parser)
parser = OptionParser(option_class=MultipleOption,
description=disc) parser = OptionParser(option_class=MultipleOption, description=disc)
parser.add_option('--resave', parser.add_option(
action='store_true', dest='resave', default=False, '--resave',
help='re-save existing patch files to pick up manual changes') action='store_true',
parser.add_option('--revert', dest='resave',
action='store_true', dest='revert', default=False, default=False,
help='revert all changes from existing patch files') help='re-save existing patch files to pick up manual changes')
parser.add_option('--patch', parser.add_option(
action='extend', dest='patch', type='string', default=[], '--revert',
help='optional patch name to process (multiples allowed)') action='store_true',
dest='revert',
default=False,
help='revert all changes from existing patch files')
parser.add_option(
'--patch',
action='extend',
dest='patch',
type='string',
default=[],
help='optional patch name to process (multiples allowed)')
(options, args) = parser.parse_args() (options, args) = parser.parse_args()
if options.resave and options.revert: if options.resave and options.revert:
@ -100,7 +115,7 @@ for patch in patches:
continue continue
sys.stdout.write('\n') sys.stdout.write('\n')
patch_file = os.path.join(patches_dir, patch['name']+'.patch') patch_file = os.path.join(patches_dir, patch['name'] + '.patch')
if os.path.isfile(patch_file): if os.path.isfile(patch_file):
msg('Reading patch file %s' % patch_file) msg('Reading patch file %s' % patch_file)

View File

@ -19,12 +19,14 @@ cef_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
cef_patch_dir = os.path.join(cef_dir, 'patch') cef_patch_dir = os.path.join(cef_dir, 'patch')
src_dir = os.path.abspath(os.path.join(cef_dir, os.pardir)) src_dir = os.path.abspath(os.path.join(cef_dir, os.pardir))
def write_note(type, note): def write_note(type, note):
separator = '-' * 79 + '\n' separator = '-' * 79 + '\n'
sys.stdout.write(separator) sys.stdout.write(separator)
sys.stdout.write('!!!! %s: %s\n' % (type, note)) sys.stdout.write('!!!! %s: %s\n' % (type, note))
sys.stdout.write(separator) sys.stdout.write(separator)
def apply_patch_file(patch_file, patch_dir): def apply_patch_file(patch_file, patch_dir):
''' Apply a specific patch file in optional patch directory. ''' ''' Apply a specific patch file in optional patch directory. '''
patch_path = os.path.join(cef_patch_dir, 'patches', patch_file + '.patch') patch_path = os.path.join(cef_patch_dir, 'patches', patch_file + '.patch')
@ -39,9 +41,11 @@ def apply_patch_file(patch_file, patch_dir):
result = git_apply_patch_file(patch_path, patch_dir) result = git_apply_patch_file(patch_path, patch_dir)
if result == 'fail': if result == 'fail':
write_note('ERROR', 'This patch failed to apply. Your build will not be correct.') write_note('ERROR',
'This patch failed to apply. Your build will not be correct.')
return result return result
def apply_patch_config(): def apply_patch_config():
''' Apply patch files based on a configuration file. ''' ''' Apply patch files based on a configuration file. '''
config_file = os.path.join(cef_patch_dir, 'patch.cfg') config_file = os.path.join(cef_patch_dir, 'patch.cfg')
@ -66,7 +70,8 @@ def apply_patch_config():
dopatch = False dopatch = False
if dopatch: if dopatch:
result = apply_patch_file(patch_file, patch['path'] if 'path' in patch else None) result = apply_patch_file(patch_file, patch['path']
if 'path' in patch else None)
results[result] += 1 results[result] += 1
if 'note' in patch: if 'note' in patch:
@ -79,8 +84,13 @@ def apply_patch_config():
if results['fail'] > 0: if results['fail'] > 0:
sys.stdout.write('\n') sys.stdout.write('\n')
write_note('ERROR', '%d patches failed to apply. Your build will not be correct.' % results['fail']) write_note('ERROR',
raise Exception('%d patches failed to apply. Your build will not be correct.' % results['fail']) '%d patches failed to apply. Your build will not be correct.' %
results['fail'])
raise Exception(
'%d patches failed to apply. Your build will not be correct.' %
results['fail'])
# Parse command-line options. # Parse command-line options.
disc = """ disc = """
@ -88,10 +98,13 @@ This utility applies patch files.
""" """
parser = OptionParser(description=disc) parser = OptionParser(description=disc)
parser.add_option('--patch-file', dest='patchfile', metavar='FILE', parser.add_option(
help='patch source file') '--patch-file', dest='patchfile', metavar='FILE', help='patch source file')
parser.add_option('--patch-dir', dest='patchdir', metavar='DIR', parser.add_option(
help='patch target directory') '--patch-dir',
dest='patchdir',
metavar='DIR',
help='patch target directory')
(options, args) = parser.parse_args() (options, args) = parser.parse_args()
if not options.patchfile is None: if not options.patchfile is None:

View File

@ -17,12 +17,10 @@ from make_views_stub_impl import *
from make_wrapper_types_header import * from make_wrapper_types_header import *
from optparse import OptionParser from optparse import OptionParser
# cannot be loaded as a module # cannot be loaded as a module
if __name__ != "__main__": if __name__ != "__main__":
sys.stderr.write('This file cannot be loaded as a module!') sys.stderr.write('This file cannot be loaded as a module!')
sys.exit() sys.exit()
# parse command-line options # parse command-line options
disc = """ disc = """
@ -30,25 +28,42 @@ This utility generates files for the CEF C++ to C API translation layer.
""" """
parser = OptionParser(description=disc) parser = OptionParser(description=disc)
parser.add_option('--root-dir', dest='rootdir', metavar='DIR', parser.add_option(
help='CEF root directory [required]') '--root-dir',
parser.add_option('--backup', dest='rootdir',
action='store_true', dest='backup', default=False, metavar='DIR',
help='create a backup of modified files') help='CEF root directory [required]')
parser.add_option('--force', parser.add_option(
action='store_true', dest='force', default=False, '--backup',
help='force rewrite of the file') action='store_true',
parser.add_option('-c', '--classes', dest='classes', action='append', dest='backup',
help='only translate the specified classes') default=False,
parser.add_option('-q', '--quiet', help='create a backup of modified files')
action='store_true', dest='quiet', default=False, parser.add_option(
help='do not output detailed status information') '--force',
action='store_true',
dest='force',
default=False,
help='force rewrite of the file')
parser.add_option(
'-c',
'--classes',
dest='classes',
action='append',
help='only translate the specified classes')
parser.add_option(
'-q',
'--quiet',
action='store_true',
dest='quiet',
default=False,
help='do not output detailed status information')
(options, args) = parser.parse_args() (options, args) = parser.parse_args()
# the rootdir option is required # the rootdir option is required
if options.rootdir is None: if options.rootdir is None:
parser.print_help(sys.stdout) parser.print_help(sys.stdout)
sys.exit() sys.exit()
# determine the paths # determine the paths
root_dir = os.path.abspath(options.rootdir) root_dir = os.path.abspath(options.rootdir)
@ -58,7 +73,8 @@ cpp_header_views_dir = os.path.join(cpp_header_dir, 'views')
capi_header_dir = os.path.join(cpp_header_dir, 'capi') capi_header_dir = os.path.join(cpp_header_dir, 'capi')
libcef_dll_dir = os.path.join(root_dir, 'libcef_dll') libcef_dll_dir = os.path.join(root_dir, 'libcef_dll')
cpptoc_global_impl = os.path.join(libcef_dll_dir, 'libcef_dll.cc') cpptoc_global_impl = os.path.join(libcef_dll_dir, 'libcef_dll.cc')
ctocpp_global_impl = os.path.join(libcef_dll_dir, 'wrapper', 'libcef_dll_wrapper.cc') ctocpp_global_impl = os.path.join(libcef_dll_dir, 'wrapper',
'libcef_dll_wrapper.cc')
wrapper_types_header = os.path.join(libcef_dll_dir, 'wrapper_types.h') wrapper_types_header = os.path.join(libcef_dll_dir, 'wrapper_types.h')
cpptoc_dir = os.path.join(libcef_dll_dir, 'cpptoc') cpptoc_dir = os.path.join(libcef_dll_dir, 'cpptoc')
ctocpp_dir = os.path.join(libcef_dll_dir, 'ctocpp') ctocpp_dir = os.path.join(libcef_dll_dir, 'ctocpp')
@ -67,12 +83,12 @@ views_stub_impl = os.path.join(libcef_dll_dir, 'views_stub.cc')
# make sure the header directory exists # make sure the header directory exists
if not path_exists(cpp_header_dir): if not path_exists(cpp_header_dir):
sys.stderr.write('Directory '+cpp_header_dir+' does not exist.') sys.stderr.write('Directory ' + cpp_header_dir + ' does not exist.')
sys.exit() sys.exit()
# create the header object # create the header object
if not options.quiet: if not options.quiet:
sys.stdout.write('Parsing C++ headers from '+cpp_header_dir+'...\n') sys.stdout.write('Parsing C++ headers from ' + cpp_header_dir + '...\n')
header = obj_header() header = obj_header()
# add include files to be processed # add include files to be processed
@ -82,135 +98,135 @@ header.add_directory(cpp_header_dir, excluded_files)
header.add_directory(cpp_header_test_dir) header.add_directory(cpp_header_test_dir)
header.add_directory(cpp_header_views_dir) header.add_directory(cpp_header_views_dir)
# Track the number of files that were written. # Track the number of files that were written.
writect = 0 writect = 0
def update_file(file, newcontents): def update_file(file, newcontents):
""" Replaces the contents of |file| with |newcontents| if necessary. """ """ Replaces the contents of |file| with |newcontents| if necessary. """
oldcontents = '' oldcontents = ''
oldhash = '' oldhash = ''
if newcontents[-1:] != "\n": if newcontents[-1:] != "\n":
# Add newline at end of file. # Add newline at end of file.
newcontents += "\n" newcontents += "\n"
# clang-format is slow so we don't want to apply it if the pre-formatted # clang-format is slow so we don't want to apply it if the pre-formatted
# content hasn't changed. To check for changes we embed a hash of the pre- # content hasn't changed. To check for changes we embed a hash of the pre-
# formatted content in the resulting file. # formatted content in the resulting file.
hash_start = "$hash=" hash_start = "$hash="
hash_end = "$" hash_end = "$"
hash_token = "$$HASH$$" hash_token = "$$HASH$$"
if not options.force and path_exists(file): if not options.force and path_exists(file):
oldcontents = read_file(file) oldcontents = read_file(file)
# Extract the existing hash. # Extract the existing hash.
start = oldcontents.find(hash_start) start = oldcontents.find(hash_start)
if start > 0: if start > 0:
end = oldcontents.find(hash_end, start + len(hash_start)) end = oldcontents.find(hash_end, start + len(hash_start))
if end > 0: if end > 0:
oldhash = oldcontents[start + len(hash_start):end] oldhash = oldcontents[start + len(hash_start):end]
# Compute the new hash. # Compute the new hash.
rev = hashlib.sha1(newcontents).digest(); rev = hashlib.sha1(newcontents).digest()
newhash = ''.join(format(ord(i),'0>2x') for i in rev) newhash = ''.join(format(ord(i), '0>2x') for i in rev)
if oldhash == newhash: if oldhash == newhash:
# Pre-formatted contents have not changed. # Pre-formatted contents have not changed.
return return
newcontents = newcontents.replace(hash_token, newhash, 1) newcontents = newcontents.replace(hash_token, newhash, 1)
# Apply clang-format for C/C++ files. # Apply clang-format for C/C++ files.
if os.path.splitext(file)[1][1:] in ('cc', 'cpp', 'h'): if os.path.splitext(file)[1][1:] in ('cc', 'cpp', 'h'):
result = clang_format(file, newcontents) result = clang_format(file, newcontents)
if result != None: if result != None:
newcontents = result newcontents = result
else: else:
raise Exception("Call to clang-format failed") raise Exception("Call to clang-format failed")
if options.backup and oldcontents != '': if options.backup and oldcontents != '':
backup_file(file) backup_file(file)
filedir = os.path.split(file)[0] filedir = os.path.split(file)[0]
if not os.path.isdir(filedir): if not os.path.isdir(filedir):
make_dir(filedir) make_dir(filedir)
write_file(file, newcontents) write_file(file, newcontents)
global writect global writect
writect += 1 writect += 1
# output the C API header # output the C API header
if not options.quiet: if not options.quiet:
sys.stdout.write('In C API header directory '+capi_header_dir+'...\n') sys.stdout.write('In C API header directory ' + capi_header_dir + '...\n')
filenames = sorted(header.get_file_names()) filenames = sorted(header.get_file_names())
for filename in filenames: for filename in filenames:
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating '+filename+' C API header...\n') sys.stdout.write('Generating ' + filename + ' C API header...\n')
update_file(*write_capi_header(header, capi_header_dir, filename)) update_file(*write_capi_header(header, capi_header_dir, filename))
# output the wrapper types header # output the wrapper types header
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating wrapper types header...\n') sys.stdout.write('Generating wrapper types header...\n')
update_file(*write_wrapper_types_header(header, wrapper_types_header)) update_file(*write_wrapper_types_header(header, wrapper_types_header))
# build the list of classes to parse # build the list of classes to parse
allclasses = header.get_class_names() allclasses = header.get_class_names()
if not options.classes is None: if not options.classes is None:
for cls in options.classes: for cls in options.classes:
if not cls in allclasses: if not cls in allclasses:
sys.stderr.write('ERROR: Unknown class: '+cls) sys.stderr.write('ERROR: Unknown class: ' + cls)
sys.exit() sys.exit()
classes = options.classes classes = options.classes
else: else:
classes = allclasses classes = allclasses
classes = sorted(classes) classes = sorted(classes)
# output CppToC global file # output CppToC global file
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating CppToC global implementation...\n') sys.stdout.write('Generating CppToC global implementation...\n')
update_file(*write_cpptoc_impl(header, None, cpptoc_global_impl)) update_file(*write_cpptoc_impl(header, None, cpptoc_global_impl))
# output CToCpp global file # output CToCpp global file
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating CToCpp global implementation...\n') sys.stdout.write('Generating CToCpp global implementation...\n')
update_file(*write_ctocpp_impl(header, None, ctocpp_global_impl)) update_file(*write_ctocpp_impl(header, None, ctocpp_global_impl))
# output CppToC class files # output CppToC class files
if not options.quiet: if not options.quiet:
sys.stdout.write('In CppToC directory '+cpptoc_dir+'...\n') sys.stdout.write('In CppToC directory ' + cpptoc_dir + '...\n')
for cls in classes: for cls in classes:
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating '+cls+'CppToC class header...\n') sys.stdout.write('Generating ' + cls + 'CppToC class header...\n')
update_file(*write_cpptoc_header(header, cls, cpptoc_dir)) update_file(*write_cpptoc_header(header, cls, cpptoc_dir))
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating '+cls+'CppToC class implementation...\n') sys.stdout.write('Generating ' + cls + 'CppToC class implementation...\n')
update_file(*write_cpptoc_impl(header, cls, cpptoc_dir)) update_file(*write_cpptoc_impl(header, cls, cpptoc_dir))
# output CppToC class files # output CppToC class files
if not options.quiet: if not options.quiet:
sys.stdout.write('In CToCpp directory '+ctocpp_dir+'...\n') sys.stdout.write('In CToCpp directory ' + ctocpp_dir + '...\n')
for cls in classes: for cls in classes:
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating '+cls+'CToCpp class header...\n') sys.stdout.write('Generating ' + cls + 'CToCpp class header...\n')
update_file(*write_ctocpp_header(header, cls, ctocpp_dir)) update_file(*write_ctocpp_header(header, cls, ctocpp_dir))
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating '+cls+'CToCpp class implementation...\n') sys.stdout.write('Generating ' + cls + 'CToCpp class implementation...\n')
update_file(*write_ctocpp_impl(header, cls, ctocpp_dir)) update_file(*write_ctocpp_impl(header, cls, ctocpp_dir))
# output the gypi file # output the gypi file
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating '+gypi_file+' file...\n') sys.stdout.write('Generating ' + gypi_file + ' file...\n')
update_file(*write_gypi_file(header, gypi_file)) update_file(*write_gypi_file(header, gypi_file))
# output the views stub file # output the views stub file
if not options.quiet: if not options.quiet:
sys.stdout.write('Generating '+views_stub_impl+' file...\n') sys.stdout.write('Generating ' + views_stub_impl + ' file...\n')
update_file(*write_views_stub_impl(header, views_stub_impl)) update_file(*write_views_stub_impl(header, views_stub_impl))
if not options.quiet: if not options.quiet:
sys.stdout.write('Done - Wrote '+str(writect)+' files.\n') sys.stdout.write('Done - Wrote ' + str(writect) + ' files.\n')