mirror of
https://bitbucket.org/chromiumembedded/cef
synced 2025-06-05 21:39:12 +02:00
Create 2272 release branch for CEF3.
git-svn-id: https://chromiumembedded.googlecode.com/svn/branches/2272@1993 5089003a-bbd8-11dd-ad1f-f1f9622dbc98
This commit is contained in:
960
tools/automate/automate-git.py
Normal file
960
tools/automate/automate-git.py
Normal file
@@ -0,0 +1,960 @@
|
||||
# Copyright (c) 2014 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import urllib
|
||||
import xml.etree.ElementTree as ET
|
||||
import zipfile
|
||||
|
||||
##
|
||||
# Default URLs.
|
||||
##
|
||||
|
||||
depot_tools_url = 'https://chromium.googlesource.com/chromium/tools/depot_tools.git'
|
||||
depot_tools_archive_url = 'https://src.chromium.org/svn/trunk/tools/depot_tools.zip'
|
||||
|
||||
cef_git_trunk_url = 'https://chromiumembedded@bitbucket.org/chromiumembedded/trunk-cef3.git'
|
||||
cef_git_branch_url = 'https://chromiumembedded@bitbucket.org/chromiumembedded/branches-%1-cef3.git'
|
||||
cef_svn_trunk_url = 'https://chromiumembedded.googlecode.com/svn/trunk/cef3'
|
||||
cef_svn_branch_url = 'https://chromiumembedded.googlecode.com/svn/branches/%1/cef3'
|
||||
|
||||
|
||||
##
|
||||
# Global system variables.
|
||||
##
|
||||
|
||||
# Operating system.
|
||||
platform = '';
|
||||
if sys.platform == 'win32':
|
||||
platform = 'windows'
|
||||
elif sys.platform == 'darwin':
|
||||
platform = 'macosx'
|
||||
elif sys.platform.startswith('linux'):
|
||||
platform = 'linux'
|
||||
|
||||
# Script directory.
|
||||
script_dir = os.path.dirname(__file__)
|
||||
|
||||
# Script extension.
|
||||
if platform == 'windows':
|
||||
script_ext = '.bat'
|
||||
else:
|
||||
script_ext = '.sh'
|
||||
|
||||
|
||||
##
|
||||
# Helper functions.
|
||||
##
|
||||
|
||||
def msg(message):
|
||||
""" Output a message. """
|
||||
sys.stdout.write('--> ' + message + "\n")
|
||||
|
||||
def run(command_line, working_dir, depot_tools_dir=None, output_file=None):
|
||||
""" Runs the specified command. """
|
||||
# add depot_tools to the path
|
||||
env = os.environ
|
||||
if not depot_tools_dir is None:
|
||||
env['PATH'] = depot_tools_dir+os.pathsep+env['PATH']
|
||||
|
||||
sys.stdout.write('-------- Running "'+command_line+'" in "'+\
|
||||
working_dir+'"...'+"\n")
|
||||
if not options.dryrun:
|
||||
args = shlex.split(command_line.replace('\\', '\\\\'))
|
||||
|
||||
if not output_file:
|
||||
return subprocess.check_call(args, cwd=working_dir, env=env,
|
||||
shell=(sys.platform == 'win32'))
|
||||
with open(output_file, "w") as f:
|
||||
return subprocess.check_call(args, cwd=working_dir, env=env,
|
||||
shell=(sys.platform == 'win32'),
|
||||
stderr=subprocess.STDOUT, stdout=f)
|
||||
|
||||
def create_directory(path):
|
||||
""" Creates a directory if it doesn't already exist. """
|
||||
if not os.path.exists(path):
|
||||
msg("Creating directory %s" % (path));
|
||||
if not options.dryrun:
|
||||
os.makedirs(path)
|
||||
|
||||
def delete_directory(path):
|
||||
""" Removes an existing directory. """
|
||||
if os.path.exists(path):
|
||||
msg("Removing directory %s" % (path));
|
||||
if not options.dryrun:
|
||||
shutil.rmtree(path, onerror=onerror)
|
||||
|
||||
def copy_directory(source, target, allow_overwrite=False):
|
||||
""" Copies a directory from source to target. """
|
||||
if not options.dryrun and os.path.exists(target):
|
||||
if not allow_overwrite:
|
||||
raise Exception("Directory %s already exists" % (target))
|
||||
remove_directory(target)
|
||||
if os.path.exists(source):
|
||||
msg("Copying directory %s to %s" % (source, target));
|
||||
if not options.dryrun:
|
||||
shutil.copytree(source, target)
|
||||
|
||||
def move_directory(source, target, allow_overwrite=False):
|
||||
""" Copies a directory from source to target. """
|
||||
if not options.dryrun and os.path.exists(target):
|
||||
if not allow_overwrite:
|
||||
raise Exception("Directory %s already exists" % (target))
|
||||
remove_directory(target)
|
||||
if os.path.exists(source):
|
||||
msg("Moving directory %s to %s" % (source, target));
|
||||
if not options.dryrun:
|
||||
shutil.move(source, target)
|
||||
|
||||
def is_git_checkout(path):
|
||||
""" Returns true if the path represents a git checkout. """
|
||||
return os.path.exists(os.path.join(path, '.git'))
|
||||
|
||||
def is_svn_checkout(path):
|
||||
""" Returns true if the path represents an svn checkout. """
|
||||
return os.path.exists(os.path.join(path, '.svn'))
|
||||
|
||||
def exec_cmd(cmd, path):
|
||||
""" Execute the specified command and return the result. """
|
||||
out = ''
|
||||
err = ''
|
||||
sys.stdout.write("-------- Running \"%s\" in \"%s\"...\n" % (cmd, path))
|
||||
parts = cmd.split()
|
||||
try:
|
||||
process = subprocess.Popen(parts, cwd=path,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
shell=(sys.platform == 'win32'))
|
||||
out, err = process.communicate()
|
||||
except IOError, (errno, strerror):
|
||||
raise
|
||||
except:
|
||||
raise
|
||||
return {'out': out, 'err': err}
|
||||
|
||||
def get_git_hash(path, branch):
|
||||
""" Returns the git hash for the specified branch/tag/hash. """
|
||||
cmd = "%s rev-parse %s" % (git_exe, branch)
|
||||
result = exec_cmd(cmd, path)
|
||||
if result['out'] != '':
|
||||
return result['out'].strip()
|
||||
return 'Unknown'
|
||||
|
||||
def get_git_url(path):
|
||||
""" Returns the origin url for the specified path. """
|
||||
cmd = "%s config --get remote.origin.url" % (git_exe)
|
||||
result = exec_cmd(cmd, path)
|
||||
if result['out'] != '':
|
||||
return result['out'].strip()
|
||||
return 'Unknown'
|
||||
|
||||
def get_git_svn_revision(path, branch):
|
||||
""" Returns the SVN revision associated with the specified path and git
|
||||
branch/tag/hash. """
|
||||
svn_rev = "None"
|
||||
cmd = "%s log --grep=^git-svn-id: -n 1 %s" % (git_exe, branch)
|
||||
result = exec_cmd(cmd, path)
|
||||
if result['err'] == '':
|
||||
for line in result['out'].split('\n'):
|
||||
if line.find("git-svn-id") > 0:
|
||||
svn_rev = line.split("@")[1].split()[0]
|
||||
break
|
||||
return svn_rev
|
||||
|
||||
def get_svn_info(path):
|
||||
""" Retrieves the URL and revision from svn info. """
|
||||
url = 'None'
|
||||
rev = 'None'
|
||||
cmd = "%s info --xml %s" % (svn_exe, path)
|
||||
is_http = path[0:4] == 'http'
|
||||
if is_http or os.path.exists(path):
|
||||
result = exec_cmd(cmd, path if not is_http else '.')
|
||||
if result['err'] == '':
|
||||
tree = ET.ElementTree(ET.fromstring(result['out']))
|
||||
entry = tree.getroot().find('entry')
|
||||
url = entry.find('url').text
|
||||
rev = entry.attrib['revision']
|
||||
else:
|
||||
raise Exception("Failed to execute svn info: %s" % (result['err']))
|
||||
return {'url': url, 'revision': rev}
|
||||
|
||||
def download_and_extract(src, target, contents_prefix):
|
||||
""" Extracts the contents of src, which may be a URL or local file, to the
|
||||
target directory. """
|
||||
temporary = False
|
||||
|
||||
if src[:4] == 'http':
|
||||
# Attempt to download a URL.
|
||||
opener = urllib.FancyURLopener({})
|
||||
response = opener.open(src)
|
||||
|
||||
temporary = True
|
||||
handle, archive_path = tempfile.mkstemp(suffix = '.zip')
|
||||
os.write(handle, response.read())
|
||||
os.close(handle)
|
||||
elif os.path.exists(src):
|
||||
# Use a local file.
|
||||
archive_path = src
|
||||
else:
|
||||
raise Exception('Path type is unsupported or does not exist: ' + src)
|
||||
|
||||
if not zipfile.is_zipfile(archive_path):
|
||||
raise Exception('Not a valid zip archive: ' + src)
|
||||
|
||||
def remove_prefix(zip, prefix):
|
||||
offset = len(prefix)
|
||||
for zipinfo in zip.infolist():
|
||||
name = zipinfo.filename
|
||||
if len(name) > offset and name[:offset] == prefix:
|
||||
zipinfo.filename = name[offset:]
|
||||
yield zipinfo
|
||||
|
||||
# Attempt to extract the archive file.
|
||||
try:
|
||||
os.makedirs(target)
|
||||
zf = zipfile.ZipFile(archive_path, 'r')
|
||||
zf.extractall(target, remove_prefix(zf, contents_prefix))
|
||||
except:
|
||||
shutil.rmtree(target, onerror=onerror)
|
||||
raise
|
||||
zf.close()
|
||||
|
||||
# Delete the archive file if temporary.
|
||||
if temporary and os.path.exists(archive_path):
|
||||
os.remove(archive_path)
|
||||
|
||||
def read_config_file(path):
|
||||
""" Read a configuration file. """
|
||||
if os.path.exists(path):
|
||||
fp = open(path, 'r')
|
||||
data = fp.read()
|
||||
fp.close()
|
||||
else:
|
||||
raise Exception("Path does not exist: %s" % (path))
|
||||
|
||||
# Parse the contents.
|
||||
return eval(data, {'__builtins__': None}, None)
|
||||
|
||||
def write_config_file(path, contents):
|
||||
""" Write a configuration file. """
|
||||
msg('Writing file: %s' % path)
|
||||
if not options.dryrun:
|
||||
fp = open(path, 'w')
|
||||
fp.write("{\n")
|
||||
for key in sorted(contents.keys()):
|
||||
fp.write(" '%s': '%s',\n" % (key, contents[key]))
|
||||
fp.write("}\n")
|
||||
fp.close()
|
||||
|
||||
def read_branch_config_file(path):
|
||||
""" Read the CEF branch from the specified path. """
|
||||
config_file = os.path.join(path, 'cef.branch')
|
||||
if os.path.isfile(config_file):
|
||||
contents = read_config_file(config_file)
|
||||
if 'branch' in contents:
|
||||
return contents['branch']
|
||||
return ''
|
||||
|
||||
def write_branch_config_file(path, branch):
|
||||
""" Write the CEF branch to the specified path. """
|
||||
config_file = os.path.join(path, 'cef.branch')
|
||||
if not os.path.isfile(config_file):
|
||||
write_config_file(config_file, {'branch': branch})
|
||||
|
||||
def remove_deps_entry(path, entry):
|
||||
""" Remove an entry from the DEPS file at the specified path. """
|
||||
msg('Updating DEPS file: %s' % path)
|
||||
if not options.dryrun:
|
||||
if not os.path.isfile(path):
|
||||
raise Exception('Path does not exist: %s' % path)
|
||||
|
||||
# Read the DEPS file.
|
||||
fp = open(path, 'r')
|
||||
lines = fp.readlines()
|
||||
fp.close()
|
||||
|
||||
# Write the DEPS file.
|
||||
# Each entry takes 2 lines. Skip both lines if found.
|
||||
fp = open(path, 'w')
|
||||
skip_next = False
|
||||
for line in lines:
|
||||
if skip_next:
|
||||
skip_next = False
|
||||
continue
|
||||
elif line.find(entry) >= 0:
|
||||
skip_next = True
|
||||
continue
|
||||
fp.write(line)
|
||||
fp.close()
|
||||
|
||||
def onerror(func, path, exc_info):
|
||||
"""
|
||||
Error handler for ``shutil.rmtree``.
|
||||
|
||||
If the error is due to an access error (read only file)
|
||||
it attempts to add write permission and then retries.
|
||||
|
||||
If the error is for another reason it re-raises the error.
|
||||
|
||||
Usage : ``shutil.rmtree(path, onerror=onerror)``
|
||||
"""
|
||||
import stat
|
||||
if not os.access(path, os.W_OK):
|
||||
# Is the error an access error ?
|
||||
os.chmod(path, stat.S_IWUSR)
|
||||
func(path)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
##
|
||||
# Program entry point.
|
||||
##
|
||||
|
||||
# Cannot be loaded as a module.
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
# Parse command-line options.
|
||||
disc = """
|
||||
This utility implements automation for the download, update, build and
|
||||
distribution of CEF.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
|
||||
# Setup options.
|
||||
parser.add_option('--download-dir', dest='downloaddir', metavar='DIR',
|
||||
help='Download directory with no spaces [required].')
|
||||
parser.add_option('--depot-tools-dir', dest='depottoolsdir', metavar='DIR',
|
||||
help='Download directory for depot_tools.', default='')
|
||||
parser.add_option('--depot-tools-archive', dest='depottoolsarchive',
|
||||
help='Zip archive file that contains a single top-level '+\
|
||||
'depot_tools directory.', default='')
|
||||
parser.add_option('--branch', dest='branch',
|
||||
help='Branch of CEF to build (trunk, 1916, ...). This '+\
|
||||
'will be used to name the CEF download directory and '+\
|
||||
'to identify the correct URL if --url is not '+\
|
||||
'specified. The default value is trunk.',
|
||||
default='trunk')
|
||||
parser.add_option('--url', dest='url',
|
||||
help='CEF download URL. If not specified the default URL '+\
|
||||
'will be used for the chosen branch.',
|
||||
default='')
|
||||
parser.add_option('--checkout', dest='checkout',
|
||||
help='Version of CEF to checkout. If not specified the '+\
|
||||
'most recent version of the branch will be used. '+\
|
||||
'If --use-git is specified this should be a Git '+\
|
||||
'branch/hash/tag instead of an SVN revision number.',
|
||||
default='')
|
||||
parser.add_option('--use-svn',
|
||||
action='store_true', dest='usesvn', default=False,
|
||||
help="Download CEF source code using SVN instead of Git.")
|
||||
parser.add_option('--chromium-checkout', dest='chromiumcheckout',
|
||||
help='Version of Chromium to checkout (Git '+\
|
||||
'branch/hash/tag). This overrides the value specified '+\
|
||||
'by CEF in CHROMIUM_BUILD_COMPATIBILITY.txt.',
|
||||
default='')
|
||||
|
||||
# Miscellaneous options.
|
||||
parser.add_option('--force-config',
|
||||
action='store_true', dest='forceconfig', default=False,
|
||||
help='Force creation of a new gclient config file.')
|
||||
parser.add_option('--force-clean',
|
||||
action='store_true', dest='forceclean', default=False,
|
||||
help='Force a clean checkout of Chromium and CEF. This will'+\
|
||||
' trigger a new update, build and distribution.')
|
||||
parser.add_option('--force-clean-deps',
|
||||
action='store_true', dest='forcecleandeps', default=False,
|
||||
help='Force a clean checkout of Chromium dependencies. Used'+\
|
||||
' in combination with --force-clean.')
|
||||
parser.add_option('--dry-run',
|
||||
action='store_true', dest='dryrun', default=False,
|
||||
help="Output commands without executing them.")
|
||||
|
||||
# Update-related options.
|
||||
parser.add_option('--force-update',
|
||||
action='store_true', dest='forceupdate', default=False,
|
||||
help='Force a Chromium and CEF update. This will trigger a '+\
|
||||
'new build and distribution.')
|
||||
parser.add_option('--no-update',
|
||||
action='store_true', dest='noupdate', default=False,
|
||||
help='Do not update Chromium or CEF. Pass --force-build or '+\
|
||||
'--force-distrib if you desire a new build or '+\
|
||||
'distribution.')
|
||||
|
||||
# Build-related options.
|
||||
parser.add_option('--force-build',
|
||||
action='store_true', dest='forcebuild', default=False,
|
||||
help='Force CEF debug and release builds. This builds '+\
|
||||
'cefclient on all platforms and chrome_sandbox on '+\
|
||||
'Linux.')
|
||||
parser.add_option('--no-build',
|
||||
action='store_true', dest='nobuild', default=False,
|
||||
help='Do not build CEF.')
|
||||
parser.add_option('--build-tests',
|
||||
action='store_true', dest='buildtests', default=False,
|
||||
help='Also build the cef_unittests target.')
|
||||
parser.add_option('--no-debug-build',
|
||||
action='store_true', dest='nodebugbuild', default=False,
|
||||
help="Don't perform the CEF debug build.")
|
||||
parser.add_option('--no-release-build',
|
||||
action='store_true', dest='noreleasebuild', default=False,
|
||||
help="Don't perform the CEF release build.")
|
||||
parser.add_option('--verbose-build',
|
||||
action='store_true', dest='verbosebuild', default=False,
|
||||
help='Show all command lines while building.')
|
||||
parser.add_option('--build-log-file',
|
||||
action='store_true', dest='buildlogfile', default=False,
|
||||
help='Write build logs to file. The file will be named '+\
|
||||
'"build-[branch]-[debug|release].log" in the download '+\
|
||||
'directory.')
|
||||
parser.add_option('--x64-build',
|
||||
action='store_true', dest='x64build', default=False,
|
||||
help='Build for 64-bit systems (Windows and Mac OS X only).')
|
||||
|
||||
# Distribution-related options.
|
||||
parser.add_option('--force-distrib',
|
||||
action='store_true', dest='forcedistrib', default=False,
|
||||
help='Force creation of a CEF binary distribution.')
|
||||
parser.add_option('--no-distrib',
|
||||
action='store_true', dest='nodistrib', default=False,
|
||||
help="Don't create a CEF binary distribution.")
|
||||
parser.add_option('--minimal-distrib',
|
||||
action='store_true', dest='minimaldistrib', default=False,
|
||||
help='Create a minimal CEF binary distribution.')
|
||||
parser.add_option('--minimal-distrib-only',
|
||||
action='store_true', dest='minimaldistribonly', default=False,
|
||||
help='Create a minimal CEF binary distribution only.')
|
||||
parser.add_option('--client-distrib',
|
||||
action='store_true', dest='clientdistrib', default=False,
|
||||
help='Create a client CEF binary distribution.')
|
||||
parser.add_option('--client-distrib-only',
|
||||
action='store_true', dest='clientdistribonly', default=False,
|
||||
help='Create a client CEF binary distribution only.')
|
||||
parser.add_option('--no-distrib-docs',
|
||||
action='store_true', dest='nodistribdocs', default=False,
|
||||
help="Don't create CEF documentation.")
|
||||
parser.add_option('--no-distrib-archive',
|
||||
action='store_true', dest='nodistribarchive', default=False,
|
||||
help="Don't create archives for output directories.")
|
||||
parser.add_option('--clean-artifacts',
|
||||
action='store_true', dest='cleanartifacts', default=False,
|
||||
help='Clean the artifacts output directory.')
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if options.downloaddir is None:
|
||||
print "The --download-dir option is required."
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
if options.noupdate and options.forceupdate or \
|
||||
options.nobuild and options.forcebuild or \
|
||||
options.nodistrib and options.forcedistrib:
|
||||
print "Invalid combination of options."
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
if (options.noreleasebuild and \
|
||||
(options.minimaldistrib or options.minimaldistribonly or \
|
||||
options.clientdistrib or options.clientdistribonly)) or \
|
||||
(options.minimaldistribonly and options.clientdistribonly):
|
||||
print 'Invalid combination of options.'
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
if options.x64build and platform != 'windows' and platform != 'macosx':
|
||||
print 'The x64 build option is only used on Windows and Mac OS X.'
|
||||
sys.exit()
|
||||
|
||||
if platform == 'windows' and not 'GYP_MSVS_VERSION' in os.environ.keys():
|
||||
print 'You must set the GYP_MSVS_VERSION environment variable on Windows.'
|
||||
sys.exit()
|
||||
|
||||
# Options that force the sources to change.
|
||||
force_change = options.forceclean or options.forceupdate
|
||||
|
||||
|
||||
##
|
||||
# Manage the download directory.
|
||||
##
|
||||
|
||||
# Create the download directory if necessary.
|
||||
download_dir = os.path.abspath(options.downloaddir)
|
||||
create_directory(download_dir)
|
||||
|
||||
msg("Download Directory: %s" % (download_dir))
|
||||
|
||||
|
||||
##
|
||||
# Manage the depot_tools directory.
|
||||
##
|
||||
|
||||
# Check if the depot_tools directory exists.
|
||||
if options.depottoolsdir != '':
|
||||
depot_tools_dir = os.path.abspath(options.depottoolsdir)
|
||||
else:
|
||||
depot_tools_dir = os.path.join(download_dir, 'depot_tools')
|
||||
|
||||
msg("Depot Tools Directory: %s" % (depot_tools_dir))
|
||||
|
||||
if not os.path.exists(depot_tools_dir):
|
||||
if platform == 'windows' and options.depottoolsarchive == '':
|
||||
# On Windows download depot_tools as an archive file since we can't assume
|
||||
# that git is already installed.
|
||||
options.depottoolsarchive = depot_tools_archive_url
|
||||
|
||||
if options.depottoolsarchive != '':
|
||||
# Extract depot_tools from an archive file.
|
||||
msg('Extracting %s to %s.' % \
|
||||
(options.depottoolsarchive, depot_tools_dir))
|
||||
if not options.dryrun:
|
||||
download_and_extract(options.depottoolsarchive, depot_tools_dir, \
|
||||
'depot_tools/')
|
||||
else:
|
||||
# On Linux and OS X check out depot_tools using Git.
|
||||
run('git clone '+depot_tools_url+' '+depot_tools_dir, download_dir)
|
||||
|
||||
if not options.noupdate:
|
||||
# Update depot_tools.
|
||||
# On Windows this will download required python and git binaries.
|
||||
if platform == 'windows':
|
||||
run('update_depot_tools.bat', depot_tools_dir, depot_tools_dir);
|
||||
else:
|
||||
run('update_depot_tools', depot_tools_dir, depot_tools_dir);
|
||||
|
||||
# Determine the svn/git executables to use.
|
||||
if platform == 'windows':
|
||||
# Force use of the version bundled with depot_tools.
|
||||
svn_exe = os.path.join(depot_tools_dir, 'svn.bat')
|
||||
git_exe = os.path.join(depot_tools_dir, 'git.bat')
|
||||
if options.dryrun and (not os.path.exists(svn_exe) or \
|
||||
not os.path.exists(git_exe)):
|
||||
sys.stdout.write("WARNING: --dry-run assumes that depot_tools" \
|
||||
" is already in your PATH. If it isn't\nplease" \
|
||||
" specify a --depot-tools-dir value.\n")
|
||||
svn_exe = 'svn.bat'
|
||||
git_exe = 'git.bat'
|
||||
else:
|
||||
svn_exe = 'svn'
|
||||
git_exe = 'git'
|
||||
|
||||
|
||||
##
|
||||
# Manage the cef directory.
|
||||
##
|
||||
|
||||
# Validate the branch value.
|
||||
if options.branch != 'trunk' and not options.branch.isdigit():
|
||||
raise Exception("Invalid branch value: %s" % (options.branch))
|
||||
cef_branch = options.branch
|
||||
|
||||
cef_dir = os.path.join(download_dir, 'cef_' + cef_branch)
|
||||
|
||||
# Delete the existing CEF directory if requested.
|
||||
if options.forceclean and os.path.exists(cef_dir):
|
||||
delete_directory(cef_dir)
|
||||
|
||||
# Determine the type of CEF checkout to use.
|
||||
if os.path.exists(cef_dir):
|
||||
if is_git_checkout(cef_dir):
|
||||
cef_use_git = True
|
||||
elif is_svn_checkout(cef_dir):
|
||||
cef_use_git = False
|
||||
else:
|
||||
raise Exception("Not a valid CEF checkout: %s" % (cef_dir))
|
||||
|
||||
if cef_use_git == options.usesvn:
|
||||
raise Exception(
|
||||
"The existing and requested CEF checkout types do not match")
|
||||
else:
|
||||
cef_use_git = not options.usesvn
|
||||
|
||||
# Determine the CEF download URL to use.
|
||||
if options.url == '':
|
||||
if cef_branch == 'trunk':
|
||||
if cef_use_git:
|
||||
cef_url = cef_git_trunk_url
|
||||
else:
|
||||
cef_url = cef_svn_trunk_url
|
||||
else:
|
||||
if cef_use_git:
|
||||
cef_url = cef_git_branch_url
|
||||
else:
|
||||
cef_url = cef_svn_branch_url
|
||||
cef_url = cef_url.replace('%1', cef_branch)
|
||||
else:
|
||||
cef_url = options.url
|
||||
|
||||
# Verify that the requested CEF URL matches the existing checkout.
|
||||
if os.path.exists(cef_dir):
|
||||
if cef_use_git:
|
||||
cef_existing_url = get_git_url(cef_dir)
|
||||
else:
|
||||
cef_existing_url = get_svn_info(cef_dir)['url']
|
||||
if cef_url != cef_existing_url:
|
||||
raise Exception('Requested CEF checkout URL %s does not match existing '+\
|
||||
'URL %s' % (cef_url, cef_existing_url))
|
||||
|
||||
msg("CEF Branch: %s" % (cef_branch))
|
||||
msg("CEF URL: %s" % (cef_url))
|
||||
msg("CEF Source Directory: %s" % (cef_dir))
|
||||
|
||||
# Determine the CEF SVN revision or Git checkout to use.
|
||||
if options.checkout == '':
|
||||
# Use the CEF head revision.
|
||||
if cef_use_git:
|
||||
cef_checkout = 'origin/master'
|
||||
else:
|
||||
cef_checkout = get_svn_info(cef_url)['revision']
|
||||
else:
|
||||
cef_checkout = options.checkout
|
||||
if not cef_use_git and not cef_checkout.isdigit():
|
||||
raise Exception("Invalid SVN revision number: %s" % (cef_checkout))
|
||||
|
||||
# Create the CEF checkout if necessary.
|
||||
if not options.noupdate and not os.path.exists(cef_dir):
|
||||
cef_checkout_new = True
|
||||
if cef_use_git:
|
||||
run('%s clone %s %s' % (git_exe, cef_url, cef_dir), download_dir, \
|
||||
depot_tools_dir)
|
||||
else:
|
||||
run('%s checkout %s -r %s %s' % (svn_exe, cef_url, cef_checkout, cef_dir), \
|
||||
download_dir, depot_tools_dir)
|
||||
else:
|
||||
cef_checkout_new = False
|
||||
|
||||
# Verify the CEF checkout.
|
||||
if not options.dryrun:
|
||||
if cef_use_git and not is_git_checkout(cef_dir):
|
||||
raise Exception('Not a valid git checkout: %s' % (cef_dir))
|
||||
if not cef_use_git and not is_svn_checkout(cef_dir):
|
||||
raise Exception('Not a valid svn checkout: %s' % (cef_dir))
|
||||
|
||||
# Update the CEF checkout if necessary.
|
||||
if not options.noupdate and os.path.exists(cef_dir):
|
||||
if cef_use_git:
|
||||
cef_current_hash = get_git_hash(cef_dir, 'HEAD')
|
||||
|
||||
if not cef_checkout_new:
|
||||
# Fetch new sources.
|
||||
run('%s fetch' % (git_exe), cef_dir, depot_tools_dir)
|
||||
|
||||
cef_desired_hash = get_git_hash(cef_dir, cef_checkout)
|
||||
cef_checkout_changed = cef_checkout_new or force_change or \
|
||||
cef_current_hash != cef_desired_hash
|
||||
|
||||
msg("CEF Current Checkout: %s" % (cef_current_hash))
|
||||
msg("CEF Current Revision: %s" % \
|
||||
(get_git_svn_revision(cef_dir, cef_current_hash)))
|
||||
msg("CEF Desired Checkout: %s (%s)" % (cef_desired_hash, cef_checkout))
|
||||
msg("CEF Desired Revision: %s" % \
|
||||
(get_git_svn_revision(cef_dir, cef_desired_hash)))
|
||||
|
||||
if cef_checkout_changed:
|
||||
# Checkout the requested branch.
|
||||
run('%s checkout %s%s' %
|
||||
(git_exe, ('--force ' if options.forceclean else ''), cef_checkout), \
|
||||
cef_dir, depot_tools_dir)
|
||||
else:
|
||||
cef_current_info = get_svn_info(cef_dir)
|
||||
if cef_current_info['url'] != cef_url:
|
||||
raise Exception("CEF URL does not match; found %s, expected %s" %
|
||||
(cef_current_info['url'], cef_url))
|
||||
|
||||
cef_checkout_changed = cef_checkout_new or force_change or \
|
||||
cef_current_info['revision'] != cef_checkout
|
||||
|
||||
msg("CEF Current Revision: %s" % (cef_current_info['revision']))
|
||||
msg("CEF Desired Revision: %s" % (cef_checkout))
|
||||
|
||||
if cef_checkout_changed and not cef_checkout_new:
|
||||
# Update to the requested revision.
|
||||
run('%s update -r %s' % (svn_exe, cef_checkout), cef_dir, depot_tools_dir)
|
||||
else:
|
||||
cef_checkout_changed = False
|
||||
|
||||
|
||||
##
|
||||
# Manage the out directory.
|
||||
##
|
||||
|
||||
out_dir = os.path.join(download_dir, 'out_' + cef_branch)
|
||||
|
||||
# Delete the existing out directory if requested.
|
||||
if options.forceclean and os.path.exists(out_dir):
|
||||
delete_directory(out_dir)
|
||||
|
||||
msg("CEF Output Directory: %s" % (out_dir))
|
||||
|
||||
|
||||
##
|
||||
# Manage the chromium directory.
|
||||
##
|
||||
|
||||
# Create the chromium directory if necessary.
|
||||
chromium_dir = os.path.join(download_dir, 'chromium')
|
||||
create_directory(chromium_dir)
|
||||
|
||||
chromium_src_dir = os.path.join(chromium_dir, 'src')
|
||||
cef_src_dir = os.path.join(chromium_src_dir, 'cef')
|
||||
out_src_dir = os.path.join(chromium_src_dir, 'out')
|
||||
|
||||
# Create gclient configuration file.
|
||||
gclient_file = os.path.join(chromium_dir, '.gclient')
|
||||
if not os.path.exists(gclient_file) or options.forceconfig:
|
||||
# Exclude unnecessary directories. Intentionally written without newlines.
|
||||
gclient_spec = \
|
||||
"solutions = [{"+\
|
||||
"u'managed': False,"+\
|
||||
"u'name': u'src', "+\
|
||||
"u'url': u'https://chromium.googlesource.com/chromium/src.git', "+\
|
||||
"u'custom_deps': {"+\
|
||||
"u'build': None, "+\
|
||||
"u'build/scripts/command_wrapper/bin': None, "+\
|
||||
"u'build/scripts/gsd_generate_index': None, "+\
|
||||
"u'build/scripts/private/data/reliability': None, "+\
|
||||
"u'build/third_party/lighttpd': None, "+\
|
||||
"u'commit-queue': None, "+\
|
||||
"u'depot_tools': None, "+\
|
||||
"u'src/chrome_frame/tools/test/reference_build/chrome': None, "+\
|
||||
"u'src/chrome/tools/test/reference_build/chrome_linux': None, "+\
|
||||
"u'src/chrome/tools/test/reference_build/chrome_mac': None, "+\
|
||||
"u'src/chrome/tools/test/reference_build/chrome_win': None, "+\
|
||||
"}, "+\
|
||||
"u'deps_file': u'.DEPS.git', "+\
|
||||
"u'safesync_url': u''"+\
|
||||
"}]"
|
||||
|
||||
msg('Writing file: %s' % gclient_file)
|
||||
if not options.dryrun:
|
||||
fp = open(gclient_file, 'w')
|
||||
fp.write(gclient_spec)
|
||||
fp.close()
|
||||
|
||||
# Initial Chromium checkout.
|
||||
if not options.noupdate and not os.path.exists(chromium_src_dir):
|
||||
chromium_checkout_new = True
|
||||
run("gclient sync --nohooks --with_branch_heads --jobs 16", chromium_dir, \
|
||||
depot_tools_dir)
|
||||
else:
|
||||
chromium_checkout_new = False
|
||||
|
||||
# Verify the Chromium checkout.
|
||||
if not options.dryrun and not is_git_checkout(chromium_src_dir):
|
||||
raise Exception('Not a valid git checkout: %s' % (chromium_src_dir))
|
||||
|
||||
if os.path.exists(chromium_src_dir):
|
||||
msg("Chromium URL: %s" % (get_git_url(chromium_src_dir)))
|
||||
|
||||
# Determine the Chromium checkout options required by CEF.
|
||||
chromium_nohooks = False
|
||||
if options.chromiumcheckout == '':
|
||||
# Read the build compatibility file to identify the checkout name.
|
||||
compat_path = os.path.join(cef_dir, 'CHROMIUM_BUILD_COMPATIBILITY.txt')
|
||||
config = read_config_file(compat_path)
|
||||
|
||||
if 'chromium_checkout' in config:
|
||||
chromium_checkout = config['chromium_checkout']
|
||||
else:
|
||||
raise Exception("Missing chromium_checkout value in %s" % (compat_path))
|
||||
|
||||
# Some branches run hooks using CEF instead of Chromium.
|
||||
if 'chromium_nohooks' in config:
|
||||
chromium_nohooks = config['chromium_nohooks']
|
||||
else:
|
||||
chromium_checkout = options.chromiumcheckout
|
||||
|
||||
# Determine if the Chromium checkout needs to change.
|
||||
if not options.noupdate and os.path.exists(chromium_src_dir):
|
||||
chromium_current_hash = get_git_hash(chromium_src_dir, 'HEAD')
|
||||
chromium_desired_hash = get_git_hash(chromium_src_dir, chromium_checkout)
|
||||
chromium_checkout_changed = chromium_checkout_new or force_change or \
|
||||
chromium_current_hash != chromium_desired_hash
|
||||
|
||||
msg("Chromium Current Checkout: %s" % (chromium_current_hash))
|
||||
msg("Chromium Current Revision: %s" % \
|
||||
(get_git_svn_revision(chromium_src_dir, chromium_current_hash)))
|
||||
msg("Chromium Desired Checkout: %s (%s)" % \
|
||||
(chromium_desired_hash, chromium_checkout))
|
||||
msg("Chromium Desired Revision: %s" % \
|
||||
(get_git_svn_revision(chromium_src_dir, chromium_desired_hash)))
|
||||
else:
|
||||
chromium_checkout_changed = options.dryrun
|
||||
|
||||
# Delete the existing src/cef directory. It will be re-copied from the download
|
||||
# directory later.
|
||||
if cef_checkout_changed and os.path.exists(cef_src_dir):
|
||||
delete_directory(cef_src_dir)
|
||||
|
||||
# Delete the existing src/out directory if requested.
|
||||
if options.forceclean and os.path.exists(out_src_dir):
|
||||
delete_directory(out_src_dir)
|
||||
|
||||
# Move the existing src/out directory to the correct location in the download
|
||||
# directory. It will be moved back from the download directory later.
|
||||
if os.path.exists(out_src_dir):
|
||||
old_branch = read_branch_config_file(out_src_dir)
|
||||
if chromium_checkout_changed or old_branch != cef_branch:
|
||||
old_out_dir = os.path.join(download_dir, 'out_' + old_branch)
|
||||
move_directory(out_src_dir, old_out_dir)
|
||||
|
||||
# Update the Chromium checkout.
|
||||
if chromium_checkout_changed:
|
||||
if not chromium_checkout_new:
|
||||
if options.forceclean and options.forcecleandeps:
|
||||
# Remove all local changes including third-party git checkouts managed by
|
||||
# gclient.
|
||||
run("%s clean -dffx" % (git_exe), chromium_src_dir, depot_tools_dir)
|
||||
else:
|
||||
# Revert all changes in the Chromium checkout.
|
||||
run("gclient revert --nohooks", chromium_dir, depot_tools_dir)
|
||||
|
||||
# Fetch new sources.
|
||||
run("%s fetch" % (git_exe), chromium_src_dir, depot_tools_dir)
|
||||
# Also fetch tags, which are required for release branch builds.
|
||||
run("%s fetch --tags" % (git_exe), chromium_src_dir, depot_tools_dir)
|
||||
|
||||
# Checkout the requested branch.
|
||||
run("%s checkout %s%s" % \
|
||||
(git_exe, ('--force ' if options.forceclean else ''), chromium_checkout), \
|
||||
chromium_src_dir, depot_tools_dir)
|
||||
|
||||
if cef_branch != 'trunk':
|
||||
# Remove the 'src' entry from .DEPS.git for release branches.
|
||||
# Otherwise, `gclient sync` will fail.
|
||||
deps_path = os.path.join(chromium_src_dir, '.DEPS.git')
|
||||
remove_deps_entry(deps_path, "'src'")
|
||||
|
||||
# Update third-party dependencies including branch/tag information.
|
||||
run("gclient sync %s%s--with_branch_heads --jobs 16" % \
|
||||
(('--reset ' if options.forceclean else ''), \
|
||||
('--nohooks ' if chromium_nohooks else '')), \
|
||||
chromium_dir, depot_tools_dir)
|
||||
|
||||
# Delete the src/out directory created by `gclient sync`.
|
||||
delete_directory(out_src_dir)
|
||||
|
||||
# Restore the src/cef directory.
|
||||
if os.path.exists(cef_dir) and not os.path.exists(cef_src_dir):
|
||||
copy_directory(cef_dir, cef_src_dir)
|
||||
|
||||
# Restore the src/out directory.
|
||||
if os.path.exists(out_dir) and not os.path.exists(out_src_dir):
|
||||
move_directory(out_dir, out_src_dir)
|
||||
|
||||
|
||||
##
|
||||
# Build CEF.
|
||||
##
|
||||
|
||||
if not options.nobuild and (chromium_checkout_changed or \
|
||||
cef_checkout_changed or options.forcebuild or \
|
||||
not os.path.exists(out_src_dir)):
|
||||
# Building should also force a distribution.
|
||||
options.forcedistrib = True
|
||||
|
||||
# Run the cef_create_projects script to generate Ninja project files.
|
||||
os.environ['GYP_GENERATORS'] = 'ninja'
|
||||
if options.x64build:
|
||||
if 'GYP_DEFINES' in os.environ.keys():
|
||||
os.environ['GYP_DEFINES'] = os.environ['GYP_DEFINES'] + ' ' + \
|
||||
'target_arch=x64'
|
||||
else:
|
||||
os.environ['GYP_DEFINES'] = 'target_arch=x64'
|
||||
path = os.path.join(cef_src_dir, 'cef_create_projects'+script_ext)
|
||||
run(path, cef_src_dir, depot_tools_dir)
|
||||
|
||||
# Write the config file for identifying the branch.
|
||||
write_branch_config_file(out_src_dir, cef_branch)
|
||||
|
||||
# Build using Ninja.
|
||||
command = 'ninja -C '
|
||||
if options.verbosebuild:
|
||||
command = 'ninja -v -C'
|
||||
target = ' cefclient'
|
||||
if options.buildtests:
|
||||
target = target + ' cef_unittests'
|
||||
if platform == 'linux':
|
||||
target = target + ' chrome_sandbox'
|
||||
build_dir_suffix = ''
|
||||
if platform == 'windows' and options.x64build:
|
||||
build_dir_suffix = '_x64'
|
||||
|
||||
if not options.nodebugbuild:
|
||||
# Make a CEF Debug build.
|
||||
run(command + os.path.join('out', 'Debug' + build_dir_suffix) + target, \
|
||||
chromium_src_dir, depot_tools_dir,
|
||||
os.path.join(download_dir, 'build-%s-debug.log' % (cef_branch)) \
|
||||
if options.buildlogfile else None)
|
||||
|
||||
if not options.noreleasebuild:
|
||||
# Make a CEF Release build.
|
||||
run(command + os.path.join('out', 'Release' + build_dir_suffix) + target, \
|
||||
chromium_src_dir, depot_tools_dir,
|
||||
os.path.join(download_dir, 'build-%s-release.log' % (cef_branch)) \
|
||||
if options.buildlogfile else None)
|
||||
|
||||
|
||||
##
|
||||
# Create the CEF binary distribution.
|
||||
##
|
||||
|
||||
if not options.nodistrib and (chromium_checkout_changed or \
|
||||
cef_checkout_changed or options.forcedistrib):
|
||||
if not options.forceclean and options.cleanartifacts:
|
||||
# Clean the artifacts output directory.
|
||||
artifacts_path = os.path.join(cef_src_dir, 'binary_distrib')
|
||||
delete_directory(artifacts_path)
|
||||
|
||||
# Determine the requested distribution types.
|
||||
distrib_types = []
|
||||
if options.minimaldistribonly:
|
||||
distrib_types.append('minimal')
|
||||
elif options.clientdistribonly:
|
||||
distrib_types.append('client')
|
||||
else:
|
||||
distrib_types.append('standard')
|
||||
if options.minimaldistrib:
|
||||
distrib_types.append('minimal')
|
||||
if options.clientdistrib:
|
||||
distrib_types.append('client')
|
||||
|
||||
cef_tools_dir = os.path.join(cef_src_dir, 'tools')
|
||||
|
||||
# Create the requested distribution types.
|
||||
first_type = True
|
||||
for type in distrib_types:
|
||||
path = os.path.join(cef_tools_dir, 'make_distrib'+script_ext)
|
||||
if options.nodebugbuild or options.noreleasebuild or type != 'standard':
|
||||
path = path + ' --allow-partial'
|
||||
path = path + ' --ninja-build'
|
||||
if options.x64build:
|
||||
path = path + ' --x64-build'
|
||||
|
||||
if type == 'minimal':
|
||||
path = path + ' --minimal'
|
||||
elif type == 'client':
|
||||
path = path + ' --client'
|
||||
|
||||
if first_type:
|
||||
if options.nodistribdocs:
|
||||
path = path + ' --no-docs'
|
||||
if options.nodistribarchive:
|
||||
path = path + ' --no-archive'
|
||||
first_type = False
|
||||
else:
|
||||
# Don't create the symbol archives or documentation more than once.
|
||||
path = path + ' --no-symbols --no-docs'
|
||||
|
||||
# Create the distribution.
|
||||
run(path, cef_tools_dir, depot_tools_dir)
|
241
tools/automate/gitsvnmirror.py
Normal file
241
tools/automate/gitsvnmirror.py
Normal file
@@ -0,0 +1,241 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2013 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import httplib
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib
|
||||
import urlparse
|
||||
|
||||
# Cannot be loaded as a module.
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
def run(command_line, working_dir):
|
||||
""" Run the specified command line. """
|
||||
if not options.quiet:
|
||||
print '-------- Running "%s" in "%s"...' % (command_line, working_dir)
|
||||
if not options.dryrun:
|
||||
args = shlex.split(command_line.replace('\\', '\\\\'))
|
||||
return subprocess.check_call(args, cwd=working_dir, env=os.environ,
|
||||
shell=(sys.platform == 'win32'))
|
||||
|
||||
def fail(message):
|
||||
""" Exit the script due to an execution failure. """
|
||||
print message
|
||||
sys.exit(1)
|
||||
|
||||
def url_request(url, method, headers, body, expected_response):
|
||||
""" Execute an arbitrary request. """
|
||||
parsed_url = urlparse.urlparse(url)
|
||||
if parsed_url.scheme == "http":
|
||||
connection = httplib.HTTPConnection(parsed_url.netloc)
|
||||
elif parsed_url.scheme == "https":
|
||||
connection = httplib.HTTPSConnection(parsed_url.netloc)
|
||||
else:
|
||||
print 'Unsupported URL format for %s' % url
|
||||
return None
|
||||
|
||||
connection.putrequest(method, url)
|
||||
|
||||
if not headers is None:
|
||||
for key, val in headers.iteritems():
|
||||
connection.putheader(key, val)
|
||||
if not body is None:
|
||||
connection.putheader('Content-Length', len(body))
|
||||
connection.endheaders()
|
||||
|
||||
if not body is None:
|
||||
connection.send(body)
|
||||
|
||||
response = connection.getresponse()
|
||||
if response.status == expected_response:
|
||||
return response.read()
|
||||
else:
|
||||
print 'URL %s returned unexpected response code %d' % \
|
||||
(url, response.status)
|
||||
return None
|
||||
|
||||
def url_propfind(url, depth, body):
|
||||
""" Execute a PROPFIND request. """
|
||||
return url_request(url, 'PROPFIND',
|
||||
{'Depth': depth, 'Content-Type': 'text/xml'}, body, 207)
|
||||
|
||||
def url_get(url):
|
||||
""" Execute a GET request. """
|
||||
return url_request(url, 'GET', None, None, 200)
|
||||
|
||||
def extract_string(str, start, end):
|
||||
""" Returns the string between start and end. """
|
||||
s = str.find(start)
|
||||
if s < 0:
|
||||
return None
|
||||
slen = len(start)
|
||||
e = str.find(end, s + slen)
|
||||
if e < 0:
|
||||
return None
|
||||
return str[s + slen:e]
|
||||
|
||||
def extract_int(str, start, end):
|
||||
""" Returns the integer between start and end. """
|
||||
val = extract_string(str, start, end)
|
||||
if not val is None and re.match('^[0-9]{1,}$', val):
|
||||
return int(val)
|
||||
return None
|
||||
|
||||
def read_file(name, normalize = True):
|
||||
""" Read a file. """
|
||||
try:
|
||||
f = open(name, 'r')
|
||||
# read the data
|
||||
data = f.read()
|
||||
if normalize:
|
||||
# normalize line endings
|
||||
data = data.replace("\r\n", "\n")
|
||||
return data
|
||||
except IOError, (errno, strerror):
|
||||
print 'Failed to read file %s: %s' % (name, strerror)
|
||||
else:
|
||||
f.close()
|
||||
return None
|
||||
|
||||
def write_file(name, data):
|
||||
""" Write a file. """
|
||||
try:
|
||||
f = open(name, 'w')
|
||||
f.write(data)
|
||||
return True
|
||||
except IOError, (errno, strerror):
|
||||
print 'Failed to write file %s: %s' % (name, strerror)
|
||||
else:
|
||||
f.close()
|
||||
return True
|
||||
|
||||
def read_cache_file(name, args):
|
||||
""" Read and parse a cache file (key=value pairs, one per line). """
|
||||
content = read_file(name)
|
||||
if content is None:
|
||||
return False
|
||||
lines = content.split("\n")
|
||||
for line in lines:
|
||||
parts = line.split('=', 1)
|
||||
if len(parts) == 2:
|
||||
args[parts[0]] = parts[1]
|
||||
return True
|
||||
|
||||
def write_cache_file(name, args):
|
||||
""" Write a cache file (key=value pairs, one per line). """
|
||||
data = ''
|
||||
for key, val in args.iteritems():
|
||||
data = data + key + '=' + str(val) + "\n"
|
||||
return write_file(name, data)
|
||||
|
||||
|
||||
# Parse command-line options.
|
||||
disc = """This utility creates and synchronizes git-svn clones of CEF SVN
|
||||
repositories."""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--storage-dir', dest='storagedir', metavar='DIR',
|
||||
help='local directory where data will be stored')
|
||||
parser.add_option('--branch', dest='branch',
|
||||
help='CEF branch to clone ' +
|
||||
'(trunk/cef3, branches/1453/cef3, etc)')
|
||||
parser.add_option('--git-repo', dest='gitrepo',
|
||||
help='remote repo where the git data will be pushed ' +
|
||||
'(user@domain:path/to/repo.git)')
|
||||
parser.add_option('--force',
|
||||
action='store_true', dest='force', default=False,
|
||||
help="force the run even if the revision hasn't changed")
|
||||
parser.add_option('--dry-run',
|
||||
action='store_true', dest='dryrun', default=False,
|
||||
help="output commands without executing them")
|
||||
parser.add_option('-q', '--quiet',
|
||||
action='store_true', dest='quiet', default=False,
|
||||
help='do not output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# Required options.
|
||||
if options.storagedir is None or options.branch is None or \
|
||||
options.gitrepo is None:
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Validate the git repo format. Should be user@domain:path/to/repo.git
|
||||
if not re.match(
|
||||
'^[a-zA-Z0-9_\-]{1,}@[a-zA-Z0-9\-\.]{1,}:[a-zA-Z0-9\-_/]{1,}\.git$',
|
||||
options.gitrepo):
|
||||
fail('Invalid git repo format: %s' % options.gitrepo)
|
||||
|
||||
svn_url = 'https://chromiumembedded.googlecode.com/svn/' + options.branch
|
||||
|
||||
# Verify that the requested branch is valid CEF root directory.
|
||||
value = url_get(svn_url + '/CHROMIUM_BUILD_COMPATIBILITY.txt')
|
||||
if value is None:
|
||||
fail('Invalid branch "%s"' % options.branch)
|
||||
|
||||
# Retrieve the most recent revision for the branch.
|
||||
revision = None
|
||||
request = '<?xml version="1.0" encoding="utf-8"?><propfind xmlns="DAV:">' + \
|
||||
'<prop><version-name xmlns="DAV:"/></prop></propfind>'
|
||||
value = url_propfind(svn_url, 0, request)
|
||||
if not value is None:
|
||||
revision = extract_int(value, '<lp1:version-name>', '</lp1:version-name>')
|
||||
if revision is None:
|
||||
fail('Failed to discover revision for branch "%s"' % options.branch)
|
||||
|
||||
branch_path_comp = options.branch.replace('/', '-')
|
||||
|
||||
# Create the branch storage directory if it doesn't already exist.
|
||||
branch_dir = os.path.join(options.storagedir, branch_path_comp)
|
||||
if not os.path.exists(branch_dir):
|
||||
os.makedirs(branch_dir)
|
||||
|
||||
# Default cache configuration.
|
||||
cache_config = {
|
||||
'last_revision': 0,
|
||||
}
|
||||
|
||||
# Create the authors.txt file if it doesn't already exist
|
||||
authors_file_path = os.path.join(options.storagedir, 'authors.txt')
|
||||
if not os.path.exists(authors_file_path):
|
||||
content = 'magreenblatt@gmail.com = ' + \
|
||||
'Marshall Greenblatt <magreenblatt@gmail.com>'
|
||||
if not write_file(authors_file_path, content):
|
||||
fail('Failed to create authors.txt file: %s' % authors_file_path)
|
||||
|
||||
# Read the cache file if it exists.
|
||||
cache_file_path = os.path.join(branch_dir, 'cache.txt')
|
||||
if os.path.exists(cache_file_path):
|
||||
if not read_cache_file(cache_file_path, cache_config):
|
||||
print 'Failed to read cache.txt file %s' % cache_file_path
|
||||
|
||||
# Check if the revision has changed.
|
||||
if not options.force and int(cache_config['last_revision']) == revision:
|
||||
if not options.quiet:
|
||||
print 'Already at revision %d' % revision
|
||||
sys.exit()
|
||||
|
||||
repo_dir = os.path.join(branch_dir, branch_path_comp)
|
||||
if not os.path.exists(repo_dir):
|
||||
# Create the git repository.
|
||||
run('git svn clone -A %s %s %s' % (authors_file_path, svn_url, repo_dir),
|
||||
branch_dir)
|
||||
run('git remote add origin %s' % options.gitrepo, repo_dir)
|
||||
else:
|
||||
# Rebase the git repository.
|
||||
run('git svn rebase --fetch-all -A %s' % authors_file_path, repo_dir)
|
||||
|
||||
run('git push origin --all', repo_dir)
|
||||
|
||||
# Write the cache file.
|
||||
cache_config['last_revision'] = revision
|
||||
if not write_cache_file(cache_file_path, cache_config):
|
||||
print 'Failed to write cache file %s' % cache_file_path
|
245
tools/cef_api_hash.py
Normal file
245
tools/cef_api_hash.py
Normal file
@@ -0,0 +1,245 @@
|
||||
# Copyright (c) 2013 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from file_util import *
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import string
|
||||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
import itertools
|
||||
import hashlib
|
||||
|
||||
|
||||
class cef_api_hash:
|
||||
""" CEF API hash calculator """
|
||||
|
||||
def __init__(self, headerdir, debugdir = None, verbose = False):
|
||||
if headerdir is None or len(headerdir) == 0:
|
||||
raise AssertionError("headerdir is not specified")
|
||||
|
||||
self.__headerdir = headerdir;
|
||||
self.__debugdir = debugdir;
|
||||
self.__verbose = verbose;
|
||||
self.__debug_enabled = not (self.__debugdir is None) and len(self.__debugdir) > 0;
|
||||
|
||||
self.platforms = [ "windows", "macosx", "linux" ];
|
||||
|
||||
self.platform_files = {
|
||||
"windows": [
|
||||
"internal/cef_types_win.h",
|
||||
],
|
||||
"macosx": [
|
||||
"internal/cef_types_mac.h",
|
||||
],
|
||||
"linux": [
|
||||
"internal/cef_types_linux.h",
|
||||
]
|
||||
};
|
||||
|
||||
self.included_files = [
|
||||
];
|
||||
|
||||
self.excluded_files = [
|
||||
"cef_version.h",
|
||||
"internal/cef_tuple.h",
|
||||
"internal/cef_types_wrappers.h",
|
||||
"internal/cef_string_wrappers.h",
|
||||
"internal/cef_win.h",
|
||||
"internal/cef_mac.h",
|
||||
"internal/cef_linux.h",
|
||||
];
|
||||
|
||||
def calculate(self):
|
||||
filenames = [filename for filename in self.__get_filenames() if not filename in self.excluded_files]
|
||||
|
||||
objects = []
|
||||
for filename in filenames:
|
||||
if self.__verbose:
|
||||
print "Processing " + filename + "..."
|
||||
content = read_file(os.path.join(self.__headerdir, filename), True)
|
||||
platforms = list([p for p in self.platforms if self.__is_platform_filename(filename, p)])
|
||||
|
||||
# Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration
|
||||
content_objects = None
|
||||
if filename == "internal/cef_string.h":
|
||||
content_objects = self.__parse_string_type(content)
|
||||
else:
|
||||
content_objects = self.__parse_objects(content)
|
||||
|
||||
for o in content_objects:
|
||||
o["text"] = self.__prepare_text(o["text"])
|
||||
o["platforms"] = platforms
|
||||
o["filename"] = filename
|
||||
objects.append(o)
|
||||
|
||||
# objects will be sorted including filename, to make stable universal hashes
|
||||
objects = sorted(objects, key = lambda o: o["name"] + "@" + o["filename"])
|
||||
|
||||
if self.__debug_enabled:
|
||||
namelen = max([len(o["name"]) for o in objects])
|
||||
filenamelen = max([len(o["filename"]) for o in objects])
|
||||
dumpsig = [];
|
||||
for o in objects:
|
||||
dumpsig.append(format(o["name"], str(namelen) + "s") + "|" + format(o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"]);
|
||||
self.__write_debug_file("objects.txt", dumpsig)
|
||||
|
||||
revisions = { };
|
||||
|
||||
for platform in itertools.chain(["universal"], self.platforms):
|
||||
sig = self.__get_final_sig(objects, platform)
|
||||
if self.__debug_enabled:
|
||||
self.__write_debug_file(platform + ".sig", sig)
|
||||
rev = hashlib.sha1(sig).digest();
|
||||
revstr = ''.join(format(ord(i),'0>2x') for i in rev)
|
||||
revisions[platform] = revstr
|
||||
|
||||
return revisions
|
||||
|
||||
def __parse_objects(self, content):
|
||||
""" Returns array of objects in content file. """
|
||||
objects = []
|
||||
content = re.sub("//.*\n", "", content)
|
||||
|
||||
# function declarations
|
||||
for m in re.finditer("\nCEF_EXPORT\s+?.*?\s+?(\w+)\s*?\(.*?\)\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# structs
|
||||
for m in re.finditer("\ntypedef\s+?struct\s+?(\w+)\s+?\{.*?\}\s+?(\w+)\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(2),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# enums
|
||||
for m in re.finditer("\nenum\s+?(\w+)\s+?\{.*?\}\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# typedefs
|
||||
for m in re.finditer("\ntypedef\s+?.*?\s+(\w+);", content, flags = 0):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
return objects
|
||||
|
||||
def __parse_string_type(self, content):
|
||||
""" Grab defined CEF_STRING_TYPE_xxx """
|
||||
objects = []
|
||||
for m in re.finditer("\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content, flags = 0):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0),
|
||||
}
|
||||
objects.append(object)
|
||||
return objects
|
||||
|
||||
def __prepare_text(self, text):
|
||||
text = text.strip()
|
||||
text = re.sub("\s+", " ", text);
|
||||
text = re.sub("\(\s+", "(", text);
|
||||
return text
|
||||
|
||||
def __get_final_sig(self, objects, platform):
|
||||
sig = []
|
||||
|
||||
for o in objects:
|
||||
if platform == "universal" or platform in o["platforms"]:
|
||||
sig.append(o["text"])
|
||||
|
||||
return "\n".join(sig)
|
||||
|
||||
def __get_filenames(self):
|
||||
""" Returns file names to be processed, relative to headerdir """
|
||||
headers = [os.path.join(self.__headerdir, filename) for filename in self.included_files];
|
||||
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "capi", "*.h")))
|
||||
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "internal", "*.h")))
|
||||
|
||||
for v in self.platform_files.values():
|
||||
headers = itertools.chain(headers, [os.path.join(self.__headerdir, f) for f in v])
|
||||
|
||||
normalized = [os.path.relpath(filename, self.__headerdir) for filename in headers];
|
||||
normalized = [f.replace('\\', '/').lower() for f in normalized];
|
||||
|
||||
return list(set(normalized));
|
||||
|
||||
def __is_platform_filename(self, filename, platform):
|
||||
if platform == "universal":
|
||||
return True
|
||||
if not platform in self.platform_files:
|
||||
return False
|
||||
listed = False
|
||||
for p in self.platforms:
|
||||
if filename in self.platform_files[p]:
|
||||
if p == platform:
|
||||
return True
|
||||
else:
|
||||
listed = True
|
||||
return not listed
|
||||
|
||||
def __write_debug_file(self, filename, content):
|
||||
make_dir(self.__debugdir);
|
||||
outfile = os.path.join(self.__debugdir, filename);
|
||||
dir = os.path.dirname(outfile);
|
||||
make_dir(dir);
|
||||
if not isinstance(content, basestring):
|
||||
content = "\n".join(content)
|
||||
write_file(outfile, content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from optparse import OptionParser
|
||||
import time
|
||||
|
||||
disc = """
|
||||
This utility calculates CEF API hash.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--cpp-header-dir', dest='cppheaderdir', metavar='DIR',
|
||||
help='input directory for C++ header files [required]')
|
||||
parser.add_option('--debug-dir', dest='debugdir', metavar='DIR',
|
||||
help='intermediate directory for easy debugging')
|
||||
parser.add_option('-v', '--verbose',
|
||||
action='store_true', dest='verbose', default=False,
|
||||
help='output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the cppheader option is required
|
||||
if options.cppheaderdir is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
# calculate
|
||||
c_start_time = time.time()
|
||||
|
||||
calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose);
|
||||
revisions = calc.calculate();
|
||||
|
||||
c_completed_in = time.time() - c_start_time
|
||||
|
||||
print "{"
|
||||
for k in sorted(revisions.keys()):
|
||||
print format("\"" + k + "\"", ">12s") + ": \"" + revisions[k] + "\""
|
||||
print "}"
|
||||
# print
|
||||
# print 'Completed in: ' + str(c_completed_in)
|
||||
# print
|
||||
|
||||
# print "Press any key to continue...";
|
||||
# sys.stdin.readline();
|
1870
tools/cef_parser.py
Normal file
1870
tools/cef_parser.py
Normal file
File diff suppressed because it is too large
Load Diff
2
tools/check_style.bat
Normal file
2
tools/check_style.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
python.bat check_style.py %*
|
129
tools/check_style.py
Normal file
129
tools/check_style.py
Normal file
@@ -0,0 +1,129 @@
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors.
|
||||
# Portions copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os, re, string, sys
|
||||
from file_util import *
|
||||
import git_util as git
|
||||
import svn_util as svn
|
||||
|
||||
# script directory
|
||||
script_dir = os.path.dirname(__file__)
|
||||
|
||||
# CEF root directory
|
||||
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
|
||||
|
||||
# Valid extensions for files we want to lint.
|
||||
DEFAULT_LINT_WHITELIST_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)"
|
||||
DEFAULT_LINT_BLACKLIST_REGEX = r"$^"
|
||||
|
||||
try:
|
||||
# depot_tools may already be in the import path.
|
||||
import cpplint
|
||||
import cpplint_chromium
|
||||
except ImportError, e:
|
||||
# Search the PATH environment variable to find the depot_tools folder.
|
||||
depot_tools = None;
|
||||
paths = os.environ.get('PATH').split(os.pathsep)
|
||||
for path in paths:
|
||||
if os.path.exists(os.path.join(path, 'cpplint_chromium.py')):
|
||||
depot_tools = path
|
||||
break
|
||||
|
||||
if depot_tools is None:
|
||||
print >> sys.stderr, 'Error: could not find depot_tools in PATH.'
|
||||
sys.exit(2)
|
||||
|
||||
# Add depot_tools to import path.
|
||||
sys.path.append(depot_tools)
|
||||
import cpplint
|
||||
import cpplint_chromium
|
||||
|
||||
# The default implementation of FileInfo.RepositoryName looks for the top-most
|
||||
# directory that contains a .git or .svn folder. This is a problem for CEF
|
||||
# because the CEF root folder (which may have an arbitrary name) lives inside
|
||||
# the Chromium src folder. Reimplement in a dumb but sane way.
|
||||
def patch_RepositoryName(self):
|
||||
fullname = self.FullName()
|
||||
project_dir = os.path.dirname(fullname)
|
||||
if os.path.exists(fullname):
|
||||
root_dir = project_dir
|
||||
while os.path.basename(project_dir) != "src":
|
||||
project_dir = os.path.dirname(project_dir)
|
||||
prefix = os.path.commonprefix([root_dir, project_dir])
|
||||
components = fullname[len(prefix) + 1:].split('/')
|
||||
return string.join(["cef"] + components[1:], '/')
|
||||
return fullname
|
||||
|
||||
def check_style(args, white_list = None, black_list = None):
|
||||
""" Execute cpplint with the specified arguments. """
|
||||
|
||||
# Apply patches.
|
||||
cpplint.FileInfo.RepositoryName = patch_RepositoryName
|
||||
|
||||
# Process cpplint arguments.
|
||||
filenames = cpplint.ParseArguments(args)
|
||||
|
||||
if not white_list:
|
||||
white_list = DEFAULT_LINT_WHITELIST_REGEX
|
||||
white_regex = re.compile(white_list)
|
||||
if not black_list:
|
||||
black_list = DEFAULT_LINT_BLACKLIST_REGEX
|
||||
black_regex = re.compile(black_list)
|
||||
|
||||
extra_check_functions = [cpplint_chromium.CheckPointerDeclarationWhitespace]
|
||||
|
||||
for filename in filenames:
|
||||
if white_regex.match(filename):
|
||||
if black_regex.match(filename):
|
||||
print "Ignoring file %s" % filename
|
||||
else:
|
||||
cpplint.ProcessFile(filename, cpplint._cpplint_state.verbose_level,
|
||||
extra_check_functions)
|
||||
else:
|
||||
print "Skipping file %s" % filename
|
||||
|
||||
print "Total errors found: %d\n" % cpplint._cpplint_state.error_count
|
||||
return 1
|
||||
|
||||
def get_changed_files():
|
||||
""" Retrieve the list of changed files. """
|
||||
try:
|
||||
return svn.get_changed_files(cef_dir)
|
||||
except:
|
||||
return git.get_changed_files(cef_dir)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Start with the default parameters.
|
||||
args = [
|
||||
# * Disable the 'build/class' test because it errors uselessly with C
|
||||
# structure pointers and template declarations.
|
||||
# * Disable the 'runtime/references' test because CEF allows non-const
|
||||
# arguments passed by reference.
|
||||
# * Disable the 'runtime/sizeof' test because it has a high number of
|
||||
# false positives and adds marginal value.
|
||||
'--filter=-build/class,-runtime/references,-runtime/sizeof',
|
||||
]
|
||||
|
||||
# Add anything passed on the command-line.
|
||||
args += sys.argv[1:]
|
||||
|
||||
# Pre-process the arguments before passing to the linter.
|
||||
new_args = []
|
||||
changed = []
|
||||
for arg in args:
|
||||
if arg == '--changed':
|
||||
# Add any changed files.
|
||||
changed = get_changed_files()
|
||||
elif arg[:2] == '--' or not os.path.isdir(arg):
|
||||
# Pass argument unchanged.
|
||||
new_args.append(arg)
|
||||
else:
|
||||
# Add all files in the directory.
|
||||
new_args += get_files(os.path.join(arg, '*'))
|
||||
|
||||
if len(changed) > 0:
|
||||
new_args += changed
|
||||
|
||||
check_style(new_args)
|
2
tools/check_style.sh
Executable file
2
tools/check_style.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python check_style.py $@
|
118
tools/combine_libs.py
Normal file
118
tools/combine_libs.py
Normal file
@@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# TODO(slightlyoff): move to using shared version of this script.
|
||||
|
||||
'''This script makes it easy to combine libs and object files to a new lib,
|
||||
optionally removing some of the object files in the input libs by regular
|
||||
expression matching.
|
||||
For usage information, run the script with a --help argument.
|
||||
'''
|
||||
import optparse
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def Shell(*args):
|
||||
'''Runs the program and args in args, returns the output from the program.'''
|
||||
process = subprocess.Popen(args,
|
||||
stdin = None,
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.STDOUT)
|
||||
output = process.stdout.readlines()
|
||||
process.wait()
|
||||
retcode = process.returncode
|
||||
if retcode != 0:
|
||||
raise RuntimeError('%s exited with status %d' % (args[0], retcode))
|
||||
return output
|
||||
|
||||
|
||||
def CollectRemovals(remove_re, inputs):
|
||||
'''Returns a list of all object files in inputs that match remove_re.'''
|
||||
removals = []
|
||||
for input in inputs:
|
||||
output = Shell('lib.exe', '/list', input)
|
||||
|
||||
for line in output:
|
||||
line = line.rstrip()
|
||||
if remove_re.search(line):
|
||||
removals.append(line)
|
||||
|
||||
return removals
|
||||
|
||||
|
||||
def CombineLibraries(output, remove_re, inputs):
|
||||
'''Combines all the libraries and objects in inputs, while removing any
|
||||
object files that match remove_re.
|
||||
'''
|
||||
removals = []
|
||||
if remove_re:
|
||||
removals = CollectRemovals(remove_re, inputs)
|
||||
|
||||
if len(removals) > 0:
|
||||
print 'Removals: ', removals
|
||||
|
||||
args = ['lib.exe', '/out:%s' % output]
|
||||
args += ['/remove:%s' % obj for obj in removals]
|
||||
args += inputs
|
||||
Shell(*args)
|
||||
|
||||
|
||||
USAGE = '''usage: %prog [options] <lib or obj>+
|
||||
|
||||
Combines input libraries or objects into an output library, while removing
|
||||
any object file (in the input libraries) that matches a given regular
|
||||
expression.
|
||||
'''
|
||||
|
||||
def GetOptionParser():
|
||||
parser = optparse.OptionParser(USAGE)
|
||||
parser.add_option('-o', '--output', dest = 'output',
|
||||
help = 'write to this output library')
|
||||
parser.add_option('-r', '--remove', dest = 'remove',
|
||||
help = 'object files matching this regexp will be removed '
|
||||
'from the output library')
|
||||
return parser
|
||||
|
||||
|
||||
def Main():
|
||||
'''Main function for this script'''
|
||||
parser = GetOptionParser()
|
||||
(opt, args) = parser.parse_args()
|
||||
output = opt.output
|
||||
remove = opt.remove
|
||||
if not output:
|
||||
parser.error('You must specify an output file')
|
||||
|
||||
if not args:
|
||||
parser.error('You must specify at least one object or library')
|
||||
|
||||
output = output.strip()
|
||||
if remove:
|
||||
remove = remove.strip()
|
||||
|
||||
if remove:
|
||||
try:
|
||||
remove_re = re.compile(opt.remove)
|
||||
except:
|
||||
parser.error('%s is not a valid regular expression' % opt.remove)
|
||||
else:
|
||||
remove_re = None
|
||||
|
||||
if sys.platform != 'win32' and sys.platform != 'cygwin':
|
||||
parser.error('this script only works on Windows for now')
|
||||
|
||||
# If this is set, we can't capture lib.exe's output.
|
||||
if 'VS_UNICODE_OUTPUT' in os.environ:
|
||||
del os.environ['VS_UNICODE_OUTPUT']
|
||||
|
||||
CombineLibraries(output, remove_re, args)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(Main())
|
13
tools/date_util.py
Normal file
13
tools/date_util.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import datetime
|
||||
|
||||
def get_year():
|
||||
""" Returns the current year. """
|
||||
return str(datetime.datetime.now().year)
|
||||
|
||||
def get_date():
|
||||
""" Returns the current date. """
|
||||
return datetime.datetime.now().strftime('%B %d, %Y')
|
5
tools/distrib/README-TRANSFER.txt
Normal file
5
tools/distrib/README-TRANSFER.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Files in this directory have been copied from other locations in the Chromium
|
||||
source tree. They have been modified only to the extent necessary to work in
|
||||
the CEF Binary Distribution directory structure. Below is a listing of the
|
||||
original file locations.
|
||||
|
12
tools/distrib/README.client.txt
Normal file
12
tools/distrib/README.client.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
Release Contains a release build of the cefclient sample application.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
8
tools/distrib/README.footer.txt
Normal file
8
tools/distrib/README.footer.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
LICENSING
|
||||
---------
|
||||
|
||||
The CEF project is BSD licensed. Please read the LICENSE.txt file included with
|
||||
this binary distribution for licensing terms and conditions. Other software
|
||||
included in this distribution is provided under other licenses. Please visit
|
||||
"about:credits" in a CEF-based application for complete Chromium and third-party
|
||||
licensing information.
|
14
tools/distrib/README.header.txt
Normal file
14
tools/distrib/README.header.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
Chromium Embedded Framework (CEF) $DISTRIB_TYPE$ Binary Distribution for $PLATFORM$
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
Date: $DATE$
|
||||
|
||||
CEF Version: $CEF_VER$
|
||||
CEF URL: $CEF_URL$
|
||||
@$CEF_REV$
|
||||
|
||||
Chromium Verison: $CHROMIUM_VER$
|
||||
Chromium URL: $CHROMIUM_URL$
|
||||
@$CHROMIUM_REV$
|
||||
|
||||
$DISTRIB_DESC$
|
705
tools/distrib/cefclient.gyp
Normal file
705
tools/distrib/cefclient.gyp
Normal file
@@ -0,0 +1,705 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'chromium_code': 1,
|
||||
'framework_name': 'Chromium Embedded Framework',
|
||||
'linux_use_gold_binary': 0,
|
||||
'linux_use_gold_flags': 0,
|
||||
# Don't use clang with CEF binary releases due to Chromium tree structure dependency.
|
||||
'clang': 0,
|
||||
'conditions': [
|
||||
['sysroot!=""', {
|
||||
'pkg-config': './pkg-config-wrapper "<(sysroot)" "<(target_arch)"',
|
||||
}, {
|
||||
'pkg-config': 'pkg-config'
|
||||
}],
|
||||
[ 'OS=="win"', {
|
||||
'multi_threaded_dll%': 0,
|
||||
}],
|
||||
]
|
||||
},
|
||||
'includes': [
|
||||
# Bring in the source file lists for cefclient.
|
||||
'cef_paths2.gypi',
|
||||
],
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'cefclient',
|
||||
'type': 'executable',
|
||||
'mac_bundle': 1,
|
||||
'msvs_guid': '6617FED9-C5D4-4907-BF55-A90062A6683F',
|
||||
'dependencies': [
|
||||
'libcef_dll_wrapper',
|
||||
],
|
||||
'defines': [
|
||||
'USING_CEF_SHARED',
|
||||
],
|
||||
'include_dirs': [
|
||||
'.',
|
||||
],
|
||||
'sources': [
|
||||
'<@(includes_common)',
|
||||
'<@(includes_wrapper)',
|
||||
'<@(cefclient_sources_common)',
|
||||
],
|
||||
'mac_bundle_resources': [
|
||||
'<@(cefclient_bundle_resources_mac)',
|
||||
],
|
||||
'mac_bundle_resources!': [
|
||||
# TODO(mark): Come up with a fancier way to do this (mac_info_plist?)
|
||||
# that automatically sets the correct INFOPLIST_FILE setting and adds
|
||||
# the file to a source group.
|
||||
'cefclient/mac/Info.plist',
|
||||
],
|
||||
'xcode_settings': {
|
||||
'INFOPLIST_FILE': 'cefclient/mac/Info.plist',
|
||||
# Target build path.
|
||||
'SYMROOT': 'xcodebuild',
|
||||
},
|
||||
'conditions': [
|
||||
['OS=="win"', {
|
||||
'variables': {
|
||||
'win_exe_compatibility_manifest': 'cefclient/compatibility.manifest',
|
||||
},
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'copy_resources',
|
||||
'msvs_cygwin_shell': 0,
|
||||
'inputs': [],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/copy_resources.stamp',
|
||||
],
|
||||
'action': [
|
||||
'xcopy /efy',
|
||||
'Resources\*',
|
||||
'$(OutDir)',
|
||||
],
|
||||
},
|
||||
{
|
||||
'action_name': 'copy_executables',
|
||||
'msvs_cygwin_shell': 0,
|
||||
'inputs': [],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/copy_executables.stamp',
|
||||
],
|
||||
'action': [
|
||||
'xcopy /efy',
|
||||
'$(ConfigurationName)\*.exe',
|
||||
'$(OutDir)',
|
||||
],
|
||||
},
|
||||
{
|
||||
'action_name': 'copy_libraries',
|
||||
'msvs_cygwin_shell': 0,
|
||||
'inputs': [],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/copy_libraries.stamp',
|
||||
],
|
||||
'action': [
|
||||
'xcopy /efy',
|
||||
'$(ConfigurationName)\*.dll',
|
||||
'$(OutDir)',
|
||||
],
|
||||
},
|
||||
],
|
||||
'msvs_settings': {
|
||||
'VCLinkerTool': {
|
||||
# Set /SUBSYSTEM:WINDOWS.
|
||||
'SubSystem': '2',
|
||||
},
|
||||
'VCManifestTool': {
|
||||
'AdditionalManifestFiles': [
|
||||
'cefclient/cefclient.exe.manifest',
|
||||
],
|
||||
},
|
||||
},
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'-lcomctl32.lib',
|
||||
'-lshlwapi.lib',
|
||||
'-lrpcrt4.lib',
|
||||
'-lopengl32.lib',
|
||||
'-lglu32.lib',
|
||||
'-l$(ConfigurationName)/libcef.lib',
|
||||
],
|
||||
},
|
||||
'library_dirs': [
|
||||
# Needed to find cef_sandbox.lib using #pragma comment(lib, ...).
|
||||
'$(ConfigurationName)',
|
||||
],
|
||||
'sources': [
|
||||
'<@(includes_win)',
|
||||
'<@(cefclient_sources_win)',
|
||||
],
|
||||
}],
|
||||
[ 'OS=="win" and multi_threaded_dll', {
|
||||
'configurations': {
|
||||
'Debug': {
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'RuntimeLibrary': 3,
|
||||
'WarnAsError': 'false',
|
||||
},
|
||||
},
|
||||
},
|
||||
'Release': {
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'RuntimeLibrary': 2,
|
||||
'WarnAsError': 'false',
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}],
|
||||
[ 'OS=="mac"', {
|
||||
'product_name': 'cefclient',
|
||||
'dependencies': [
|
||||
'cefclient_helper_app',
|
||||
],
|
||||
'copies': [
|
||||
{
|
||||
# Add libraries and helper app.
|
||||
'destination': '<(PRODUCT_DIR)/cefclient.app/Contents/Frameworks',
|
||||
'files': [
|
||||
'<(PRODUCT_DIR)/cefclient Helper.app',
|
||||
],
|
||||
},
|
||||
],
|
||||
'postbuilds': [
|
||||
{
|
||||
'postbuild_name': 'Add framework',
|
||||
'action': [
|
||||
'cp',
|
||||
'-Rf',
|
||||
'${CONFIGURATION}/<(framework_name).framework',
|
||||
'${BUILT_PRODUCTS_DIR}/${PRODUCT_NAME}.app/Contents/Frameworks/'
|
||||
],
|
||||
},
|
||||
{
|
||||
'postbuild_name': 'Fix Framework Link',
|
||||
'action': [
|
||||
'install_name_tool',
|
||||
'-change',
|
||||
'@executable_path/<(framework_name)',
|
||||
'@executable_path/../Frameworks/<(framework_name).framework/<(framework_name)',
|
||||
'${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}'
|
||||
],
|
||||
},
|
||||
{
|
||||
# This postbuid step is responsible for creating the following
|
||||
# helpers:
|
||||
#
|
||||
# cefclient Helper EH.app and cefclient Helper NP.app are created
|
||||
# from cefclient Helper.app.
|
||||
#
|
||||
# The EH helper is marked for an executable heap. The NP helper
|
||||
# is marked for no PIE (ASLR).
|
||||
'postbuild_name': 'Make More Helpers',
|
||||
'action': [
|
||||
'tools/make_more_helpers.sh',
|
||||
'Frameworks',
|
||||
'cefclient',
|
||||
],
|
||||
},
|
||||
],
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
|
||||
'$(SDKROOT)/System/Library/Frameworks/OpenGL.framework',
|
||||
'$(CONFIGURATION)/<(framework_name).framework/<(framework_name)',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(includes_mac)',
|
||||
'<@(cefclient_sources_mac)',
|
||||
],
|
||||
}],
|
||||
[ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
|
||||
'copies': [
|
||||
{
|
||||
'destination': '<(PRODUCT_DIR)/files',
|
||||
'files': [
|
||||
'<@(cefclient_bundle_resources_linux)',
|
||||
],
|
||||
},
|
||||
{
|
||||
'destination': '<(PRODUCT_DIR)/',
|
||||
'files': [
|
||||
'Resources/cef.pak',
|
||||
'Resources/cef_100_percent.pak',
|
||||
'Resources/cef_200_percent.pak',
|
||||
'Resources/devtools_resources.pak',
|
||||
'Resources/icudtl.dat',
|
||||
'Resources/locales/',
|
||||
'Resources/natives_blob.bin',
|
||||
'Resources/snapshot_blob.bin',
|
||||
'$(BUILDTYPE)/chrome-sandbox',
|
||||
'$(BUILDTYPE)/libcef.so',
|
||||
'$(BUILDTYPE)/libffmpegsumo.so',
|
||||
'$(BUILDTYPE)/libpdf.so',
|
||||
],
|
||||
},
|
||||
],
|
||||
'dependencies': [
|
||||
'gtk',
|
||||
'gtkglext',
|
||||
],
|
||||
'link_settings': {
|
||||
'ldflags': [
|
||||
# Look for libcef.so in the current directory. Path can also be
|
||||
# specified using the LD_LIBRARY_PATH environment variable.
|
||||
'-Wl,-rpath,.',
|
||||
],
|
||||
'libraries': [
|
||||
"$(BUILDTYPE)/libcef.so",
|
||||
"-lX11",
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(includes_linux)',
|
||||
'<@(cefclient_sources_linux)',
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'cefsimple',
|
||||
'type': 'executable',
|
||||
'mac_bundle': 1,
|
||||
'msvs_guid': '5390D142-473F-49A0-BC5E-5F6C609EEDB6',
|
||||
'dependencies': [
|
||||
'libcef_dll_wrapper',
|
||||
],
|
||||
'defines': [
|
||||
'USING_CEF_SHARED',
|
||||
],
|
||||
'include_dirs': [
|
||||
'.',
|
||||
],
|
||||
'sources': [
|
||||
'<@(includes_common)',
|
||||
'<@(includes_wrapper)',
|
||||
'<@(cefsimple_sources_common)',
|
||||
],
|
||||
'mac_bundle_resources': [
|
||||
'<@(cefsimple_bundle_resources_mac)',
|
||||
],
|
||||
'mac_bundle_resources!': [
|
||||
# TODO(mark): Come up with a fancier way to do this (mac_info_plist?)
|
||||
# that automatically sets the correct INFOPLIST_FILE setting and adds
|
||||
# the file to a source group.
|
||||
'cefsimple/mac/Info.plist',
|
||||
],
|
||||
'xcode_settings': {
|
||||
'INFOPLIST_FILE': 'cefsimple/mac/Info.plist',
|
||||
# Target build path.
|
||||
'SYMROOT': 'xcodebuild',
|
||||
},
|
||||
'conditions': [
|
||||
['OS=="win"', {
|
||||
'variables': {
|
||||
'win_exe_compatibility_manifest': 'cefsimple/compatibility.manifest',
|
||||
},
|
||||
'actions': [
|
||||
{
|
||||
'action_name': 'copy_resources',
|
||||
'msvs_cygwin_shell': 0,
|
||||
'inputs': [],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/copy_resources.stamp',
|
||||
],
|
||||
'action': [
|
||||
'xcopy /efy',
|
||||
'Resources\*',
|
||||
'$(OutDir)',
|
||||
],
|
||||
},
|
||||
{
|
||||
'action_name': 'copy_executables',
|
||||
'msvs_cygwin_shell': 0,
|
||||
'inputs': [],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/copy_executables.stamp',
|
||||
],
|
||||
'action': [
|
||||
'xcopy /efy',
|
||||
'$(ConfigurationName)\*.exe',
|
||||
'$(OutDir)',
|
||||
],
|
||||
},
|
||||
{
|
||||
'action_name': 'copy_libraries',
|
||||
'msvs_cygwin_shell': 0,
|
||||
'inputs': [],
|
||||
'outputs': [
|
||||
'<(PRODUCT_DIR)/copy_libraries.stamp',
|
||||
],
|
||||
'action': [
|
||||
'xcopy /efy',
|
||||
'$(ConfigurationName)\*.dll',
|
||||
'$(OutDir)',
|
||||
],
|
||||
},
|
||||
],
|
||||
'msvs_settings': {
|
||||
'VCLinkerTool': {
|
||||
# Set /SUBSYSTEM:WINDOWS.
|
||||
'SubSystem': '2',
|
||||
},
|
||||
'VCManifestTool': {
|
||||
'AdditionalManifestFiles': [
|
||||
'cefsimple/cefsimple.exe.manifest',
|
||||
],
|
||||
},
|
||||
},
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'-lcomctl32.lib',
|
||||
'-lshlwapi.lib',
|
||||
'-lrpcrt4.lib',
|
||||
'-l$(ConfigurationName)/libcef.lib',
|
||||
],
|
||||
},
|
||||
'library_dirs': [
|
||||
# Needed to find cef_sandbox.lib using #pragma comment(lib, ...).
|
||||
'$(ConfigurationName)',
|
||||
],
|
||||
'sources': [
|
||||
'<@(includes_win)',
|
||||
'<@(cefsimple_sources_win)',
|
||||
],
|
||||
}],
|
||||
[ 'OS=="win" and multi_threaded_dll', {
|
||||
'configurations': {
|
||||
'Debug': {
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'RuntimeLibrary': 3,
|
||||
'WarnAsError': 'false',
|
||||
},
|
||||
},
|
||||
},
|
||||
'Release': {
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'RuntimeLibrary': 2,
|
||||
'WarnAsError': 'false',
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}],
|
||||
[ 'OS=="mac"', {
|
||||
'product_name': 'cefsimple',
|
||||
'dependencies': [
|
||||
'cefsimple_helper_app',
|
||||
],
|
||||
'copies': [
|
||||
{
|
||||
# Add libraries and helper app.
|
||||
'destination': '<(PRODUCT_DIR)/cefsimple.app/Contents/Frameworks',
|
||||
'files': [
|
||||
'<(PRODUCT_DIR)/cefsimple Helper.app',
|
||||
],
|
||||
},
|
||||
],
|
||||
'postbuilds': [
|
||||
{
|
||||
'postbuild_name': 'Add framework',
|
||||
'action': [
|
||||
'cp',
|
||||
'-Rf',
|
||||
'${CONFIGURATION}/<(framework_name).framework',
|
||||
'${BUILT_PRODUCTS_DIR}/${PRODUCT_NAME}.app/Contents/Frameworks/'
|
||||
],
|
||||
},
|
||||
{
|
||||
'postbuild_name': 'Fix Framework Link',
|
||||
'action': [
|
||||
'install_name_tool',
|
||||
'-change',
|
||||
'@executable_path/<(framework_name)',
|
||||
'@executable_path/../Frameworks/<(framework_name).framework/<(framework_name)',
|
||||
'${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}'
|
||||
],
|
||||
},
|
||||
{
|
||||
# This postbuid step is responsible for creating the following
|
||||
# helpers:
|
||||
#
|
||||
# cefsimple Helper EH.app and cefsimple Helper NP.app are created
|
||||
# from cefsimple Helper.app.
|
||||
#
|
||||
# The EH helper is marked for an executable heap. The NP helper
|
||||
# is marked for no PIE (ASLR).
|
||||
'postbuild_name': 'Make More Helpers',
|
||||
'action': [
|
||||
'tools/make_more_helpers.sh',
|
||||
'Frameworks',
|
||||
'cefsimple',
|
||||
],
|
||||
},
|
||||
],
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
|
||||
'$(CONFIGURATION)/<(framework_name).framework/<(framework_name)',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(includes_mac)',
|
||||
'<@(cefsimple_sources_mac)',
|
||||
],
|
||||
}],
|
||||
[ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
|
||||
'copies': [
|
||||
{
|
||||
'destination': '<(PRODUCT_DIR)/',
|
||||
'files': [
|
||||
'Resources/cef.pak',
|
||||
'Resources/cef_100_percent.pak',
|
||||
'Resources/cef_200_percent.pak',
|
||||
'Resources/devtools_resources.pak',
|
||||
'Resources/icudtl.dat',
|
||||
'Resources/locales/',
|
||||
'Resources/natives_blob.bin',
|
||||
'Resources/snapshot_blob.bin',
|
||||
'$(BUILDTYPE)/chrome-sandbox',
|
||||
'$(BUILDTYPE)/libcef.so',
|
||||
'$(BUILDTYPE)/libffmpegsumo.so',
|
||||
'$(BUILDTYPE)/libpdf.so',
|
||||
],
|
||||
},
|
||||
],
|
||||
'link_settings': {
|
||||
'ldflags': [
|
||||
# Look for libcef.so in the current directory. Path can also be
|
||||
# specified using the LD_LIBRARY_PATH environment variable.
|
||||
'-Wl,-rpath,.',
|
||||
],
|
||||
'libraries': [
|
||||
"$(BUILDTYPE)/libcef.so",
|
||||
"-lX11",
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(includes_linux)',
|
||||
'<@(cefsimple_sources_linux)',
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'libcef_dll_wrapper',
|
||||
'type': 'static_library',
|
||||
'msvs_guid': 'A9D6DC71-C0DC-4549-AEA0-3B15B44E86A9',
|
||||
'defines': [
|
||||
'USING_CEF_SHARED',
|
||||
],
|
||||
'include_dirs': [
|
||||
'.',
|
||||
],
|
||||
'sources': [
|
||||
'<@(includes_common)',
|
||||
'<@(includes_capi)',
|
||||
'<@(includes_wrapper)',
|
||||
'<@(libcef_dll_wrapper_sources_common)',
|
||||
],
|
||||
'xcode_settings': {
|
||||
# Target build path.
|
||||
'SYMROOT': 'xcodebuild',
|
||||
},
|
||||
'conditions': [
|
||||
[ 'OS=="win" and multi_threaded_dll', {
|
||||
'configurations': {
|
||||
'Debug': {
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'RuntimeLibrary': 3,
|
||||
'WarnAsError': 'false',
|
||||
},
|
||||
},
|
||||
},
|
||||
'Release': {
|
||||
'msvs_settings': {
|
||||
'VCCLCompilerTool': {
|
||||
'RuntimeLibrary': 2,
|
||||
'WarnAsError': 'false',
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}],
|
||||
],
|
||||
},
|
||||
],
|
||||
'conditions': [
|
||||
['OS=="mac"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'cefclient_helper_app',
|
||||
'type': 'executable',
|
||||
'variables': { 'enable_wexit_time_destructors': 1, },
|
||||
'product_name': 'cefclient Helper',
|
||||
'mac_bundle': 1,
|
||||
'dependencies': [
|
||||
'libcef_dll_wrapper',
|
||||
],
|
||||
'defines': [
|
||||
'USING_CEF_SHARED',
|
||||
],
|
||||
'include_dirs': [
|
||||
'.',
|
||||
],
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
|
||||
'$(CONFIGURATION)/<(framework_name).framework/<(framework_name)',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(cefclient_sources_mac_helper)',
|
||||
],
|
||||
# TODO(mark): Come up with a fancier way to do this. It should only
|
||||
# be necessary to list helper-Info.plist once, not the three times it
|
||||
# is listed here.
|
||||
'mac_bundle_resources!': [
|
||||
'cefclient/mac/helper-Info.plist',
|
||||
],
|
||||
# TODO(mark): For now, don't put any resources into this app. Its
|
||||
# resources directory will be a symbolic link to the browser app's
|
||||
# resources directory.
|
||||
'mac_bundle_resources/': [
|
||||
['exclude', '.*'],
|
||||
],
|
||||
'xcode_settings': {
|
||||
'INFOPLIST_FILE': 'cefclient/mac/helper-Info.plist',
|
||||
},
|
||||
'postbuilds': [
|
||||
{
|
||||
# The framework defines its load-time path
|
||||
# (DYLIB_INSTALL_NAME_BASE) relative to the main executable
|
||||
# (chrome). A different relative path needs to be used in
|
||||
# cefclient_helper_app.
|
||||
'postbuild_name': 'Fix Framework Link',
|
||||
'action': [
|
||||
'install_name_tool',
|
||||
'-change',
|
||||
'@executable_path/<(framework_name)',
|
||||
'@executable_path/../../../../Frameworks/<(framework_name).framework/<(framework_name)',
|
||||
'${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}'
|
||||
],
|
||||
},
|
||||
],
|
||||
}, # target cefclient_helper_app
|
||||
{
|
||||
'target_name': 'cefsimple_helper_app',
|
||||
'type': 'executable',
|
||||
'variables': { 'enable_wexit_time_destructors': 1, },
|
||||
'product_name': 'cefsimple Helper',
|
||||
'mac_bundle': 1,
|
||||
'dependencies': [
|
||||
'libcef_dll_wrapper',
|
||||
],
|
||||
'defines': [
|
||||
'USING_CEF_SHARED',
|
||||
],
|
||||
'include_dirs': [
|
||||
'.',
|
||||
],
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
|
||||
'$(CONFIGURATION)/<(framework_name).framework/<(framework_name)',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(cefsimple_sources_mac_helper)',
|
||||
],
|
||||
# TODO(mark): Come up with a fancier way to do this. It should only
|
||||
# be necessary to list helper-Info.plist once, not the three times it
|
||||
# is listed here.
|
||||
'mac_bundle_resources!': [
|
||||
'cefsimple/mac/helper-Info.plist',
|
||||
],
|
||||
# TODO(mark): For now, don't put any resources into this app. Its
|
||||
# resources directory will be a symbolic link to the browser app's
|
||||
# resources directory.
|
||||
'mac_bundle_resources/': [
|
||||
['exclude', '.*'],
|
||||
],
|
||||
'xcode_settings': {
|
||||
'INFOPLIST_FILE': 'cefsimple/mac/helper-Info.plist',
|
||||
},
|
||||
'postbuilds': [
|
||||
{
|
||||
# The framework defines its load-time path
|
||||
# (DYLIB_INSTALL_NAME_BASE) relative to the main executable
|
||||
# (chrome). A different relative path needs to be used in
|
||||
# cefsimple_helper_app.
|
||||
'postbuild_name': 'Fix Framework Link',
|
||||
'action': [
|
||||
'install_name_tool',
|
||||
'-change',
|
||||
'@executable_path/<(framework_name)',
|
||||
'@executable_path/../../../../Frameworks/<(framework_name).framework/<(framework_name)',
|
||||
'${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}'
|
||||
],
|
||||
},
|
||||
],
|
||||
}, # target cefsimple_helper_app
|
||||
],
|
||||
}], # OS=="mac"
|
||||
[ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'gtk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
# gtk requires gmodule, but it does not list it as a dependency
|
||||
# in some misconfigured systems.
|
||||
'gtk_packages': 'gmodule-2.0 gtk+-2.0 gthread-2.0 gtk+-unix-print-2.0',
|
||||
},
|
||||
'direct_dependent_settings': {
|
||||
'cflags': [
|
||||
'$(shell <(pkg-config) --cflags <(gtk_packages))',
|
||||
],
|
||||
},
|
||||
'link_settings': {
|
||||
'ldflags': [
|
||||
'$(shell <(pkg-config) --libs-only-L --libs-only-other <(gtk_packages))',
|
||||
],
|
||||
'libraries': [
|
||||
'$(shell <(pkg-config) --libs-only-l <(gtk_packages))',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
'target_name': 'gtkglext',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
# gtkglext is required by the cefclient OSR example.
|
||||
'gtk_packages': 'gtkglext-1.0',
|
||||
},
|
||||
'direct_dependent_settings': {
|
||||
'cflags': [
|
||||
'$(shell <(pkg-config) --cflags <(gtk_packages))',
|
||||
],
|
||||
},
|
||||
'link_settings': {
|
||||
'ldflags': [
|
||||
'$(shell <(pkg-config) --libs-only-L --libs-only-other <(gtk_packages))',
|
||||
],
|
||||
'libraries': [
|
||||
'$(shell <(pkg-config) --libs-only-l <(gtk_packages))',
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
}], # OS=="linux" or OS=="freebsd" or OS=="openbsd"
|
||||
],
|
||||
}
|
17
tools/distrib/linux/README.minimal.txt
Normal file
17
tools/distrib/linux/README.minimal.txt
Normal file
@@ -0,0 +1,17 @@
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
Release Contains libcef.so and other components required to run the release
|
||||
version of CEF-based applications. By default these files should be
|
||||
placed in the same directory as the executable.
|
||||
|
||||
Resources Contains resources required by libcef.so. By default these files
|
||||
should be placed in the same directory as libcef.so.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
47
tools/distrib/linux/README.redistrib.txt
Normal file
47
tools/distrib/linux/README.redistrib.txt
Normal file
@@ -0,0 +1,47 @@
|
||||
REDISTRIBUTION
|
||||
--------------
|
||||
|
||||
This binary distribution contains the below components. Components listed under
|
||||
the "required" section must be redistributed with all applications using CEF.
|
||||
Components listed under the "optional" section may be excluded if the related
|
||||
features will not be used.
|
||||
|
||||
Required components:
|
||||
|
||||
* CEF core library
|
||||
libcef.so
|
||||
|
||||
* Unicode support
|
||||
icudtl.dat
|
||||
|
||||
* V8 initial snapshot
|
||||
natives_blob.bin
|
||||
snapshot_blob.bin
|
||||
|
||||
Optional components:
|
||||
|
||||
* Localized resources
|
||||
locales/
|
||||
Note: Contains localized strings for WebKit UI controls. A .pak file is loaded
|
||||
from this folder based on the value of environment variables which are read
|
||||
with the following precedence order: LANGUAGE, LC_ALL, LC_MESSAGES and LANG.
|
||||
Only configured locales need to be distributed. If no locale is configured the
|
||||
default locale of "en-US" will be used. Locale file loading can be disabled
|
||||
completely using CefSettings.pack_loading_disabled. The locales folder path
|
||||
can be customized using CefSettings.locales_dir_path.
|
||||
|
||||
* Other resources
|
||||
cef.pak
|
||||
cef_100_percent.pak
|
||||
cef_200_percent.pak
|
||||
devtools_resources.pak
|
||||
Note: Contains WebKit image and inspector resources. Pack file loading can be
|
||||
disabled completely using CefSettings.pack_loading_disabled. The resources
|
||||
directory path can be customized using CefSettings.resources_dir_path.
|
||||
|
||||
* FFmpeg audio and video support
|
||||
libffmpegsumo.so
|
||||
Note: Without this component HTML5 audio and video will not function.
|
||||
|
||||
* PDF support
|
||||
libpdf.so
|
41
tools/distrib/linux/README.standard.txt
Normal file
41
tools/distrib/linux/README.standard.txt
Normal file
@@ -0,0 +1,41 @@
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
cefclient Contains the cefclient sample application configured to build
|
||||
using the files in this distribution. This application demonstrates
|
||||
a wide range of CEF functionalities.
|
||||
|
||||
cefsimple Contains the cefsimple sample application configured to build
|
||||
using the files in this distribution. This application demonstrates
|
||||
the minimal functionality required to create a browser window.
|
||||
|
||||
Debug Contains libcef.so and other components required to run the debug
|
||||
version of CEF-based applications. By default these files should be
|
||||
placed in the same directory as the executable and will be copied
|
||||
there as part of the build process.
|
||||
|
||||
include Contains all required CEF header files.
|
||||
|
||||
libcef_dll Contains the source code for the libcef_dll_wrapper static library
|
||||
that all applications using the CEF C++ API must link against.
|
||||
|
||||
Release Contains libcef.so and other components required to run the release
|
||||
version of CEF-based applications. By default these files should be
|
||||
placed in the same directory as the executable and will be copied
|
||||
there as part of the build process.
|
||||
|
||||
Resources Contains resources required by libcef.so. By default these files
|
||||
should be placed in the same directory as libcef.so and will be
|
||||
copied there as part of the build process.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Building using CMake:
|
||||
CMake can be used to generate project files in many different formats. See
|
||||
usage instructions at the top of the CMakeLists.txt file.
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
12
tools/distrib/linux/build.sh
Executable file
12
tools/distrib/linux/build.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
if [ -z "$1" ]; then
|
||||
echo "ERROR: Please specify a build target: Debug or Release"
|
||||
else
|
||||
make -j8 cefclient cefsimple BUILDTYPE=$1
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Giving SUID permissions to chrome-sandbox..."
|
||||
echo "(using sudo so you may be asked for your password)"
|
||||
sudo -- chown root:root "out/$1/chrome-sandbox" &&
|
||||
sudo -- chmod 4755 "out/$1/chrome-sandbox"
|
||||
fi
|
||||
fi
|
14
tools/distrib/mac/README.minimal.txt
Normal file
14
tools/distrib/mac/README.minimal.txt
Normal file
@@ -0,0 +1,14 @@
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
Release Contains the "Chromium Embedded Framework.framework" and other
|
||||
components required to run the release version of CEF-based
|
||||
applications.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
113
tools/distrib/mac/README.redistrib.txt
Normal file
113
tools/distrib/mac/README.redistrib.txt
Normal file
@@ -0,0 +1,113 @@
|
||||
REDISTRIBUTION
|
||||
--------------
|
||||
|
||||
This binary distribution contains the below components. Components listed under
|
||||
the "required" section must be redistributed with all applications using CEF.
|
||||
Components listed under the "optional" section may be excluded if the related
|
||||
features will not be used.
|
||||
|
||||
Applications using CEF on OS X must follow a specific app bundle structure.
|
||||
Replace "cefclient" in the below example with your application name.
|
||||
|
||||
cefclient.app/
|
||||
Contents/
|
||||
Frameworks/
|
||||
Chromium Embedded Framework.framework/
|
||||
Chromium Embedded Framework <= main application library
|
||||
Libraries/
|
||||
ffmpegsumo.so <= HTML5 audio/video support library
|
||||
PDF.plugin <= Pepper plugin for PDF support
|
||||
Resources/
|
||||
cef.pak <= non-localized resources and strings
|
||||
cef_100_percent.pak <====^
|
||||
cef_200_percent.pak <====^
|
||||
devtools_resources.pak <=^
|
||||
crash_inspector, crash_report_sender <= breakpad support
|
||||
icudtl.dat <= unicode support
|
||||
natives_blob.bin, snapshot_blob.bin <= V8 initial snapshot
|
||||
en.lproj/, ... <= locale-specific resources and strings
|
||||
Info.plist
|
||||
cefclient Helper.app/
|
||||
Contents/
|
||||
Info.plist
|
||||
MacOS/
|
||||
cefclient Helper <= helper executable
|
||||
Pkginfo
|
||||
cefclient Helper EH.app/
|
||||
Contents/
|
||||
Info.plist
|
||||
MacOS/
|
||||
cefclient Helper EH <= helper executable
|
||||
Pkginfo
|
||||
cefclient Helper NP.app/
|
||||
Contents/
|
||||
Info.plist
|
||||
MacOS/
|
||||
cefclient Helper NP <= helper executable
|
||||
Pkginfo
|
||||
Info.plist
|
||||
MacOS/
|
||||
cefclient <= cefclient application executable
|
||||
Pkginfo
|
||||
Resources/
|
||||
binding.html, ... <= cefclient application resources
|
||||
|
||||
The "Chromium Embedded Framework.framework" is an unversioned framework that
|
||||
contains CEF binaries and resources. Executables (cefclient, cefclient Helper,
|
||||
etc) are linked to the "Chromium Embedded Framework" library using
|
||||
install_name_tool and a path relative to @executable_path.
|
||||
|
||||
The "cefclient Helper" apps are used for executing separate processes
|
||||
(renderer, plugin, etc) with different characteristics. They need to have
|
||||
separate app bundles and Info.plist files so that, among other things, they
|
||||
don't show dock icons. The "EH" helper, which is used when launching plugin
|
||||
processes, has the MH_NO_HEAP_EXECUTION bit cleared to allow an executable
|
||||
heap. The "NP" helper, which is used when launching NaCl plugin processes
|
||||
only, has the MH_PIE bit cleared to disable ASLR. This is set up as part of
|
||||
the build process using scripts from the tools/ directory. Examine the Xcode
|
||||
project included with the binary distribution or the originating cefclient.gyp
|
||||
file for a better idea of the script dependencies.
|
||||
|
||||
Required components:
|
||||
|
||||
* CEF framework library
|
||||
Chromium Embedded Framework.framework/Chromium Embedded Framework
|
||||
|
||||
* Unicode support
|
||||
Chromium Embedded Framework.framework/Resources/icudtl.dat
|
||||
|
||||
* V8 initial snapshot
|
||||
Chromium Embedded Framework.framework/Resources/natives_blob.bin
|
||||
Chromium Embedded Framework.framework/Resources/snapshot_blob.bin
|
||||
|
||||
Optional components:
|
||||
|
||||
* Localized resources
|
||||
Chromium Embedded Framework.framework/Resources/*.lproj/
|
||||
Note: Contains localized strings for WebKit UI controls. A .pak file is loaded
|
||||
from this folder based on the CefSettings.locale value. Only configured
|
||||
locales need to be distributed. If no locale is configured the default locale
|
||||
of "en" will be used. Locale file loading can be disabled completely using
|
||||
CefSettings.pack_loading_disabled.
|
||||
|
||||
* Other resources
|
||||
Chromium Embedded Framework.framework/Resources/cef.pak
|
||||
Chromium Embedded Framework.framework/Resources/cef_100_percent.pak
|
||||
Chromium Embedded Framework.framework/Resources/cef_200_percent.pak
|
||||
Chromium Embedded Framework.framework/Resources/devtools_resources.pak
|
||||
Note: Contains WebKit image and inspector resources. Pack file loading can be
|
||||
disabled completely using CefSettings.pack_loading_disabled. The resources
|
||||
directory path can be customized using CefSettings.resources_dir_path.
|
||||
|
||||
* FFmpeg audio and video support
|
||||
Chromium Embedded Framework.framework/Libraries/ffmpegsumo.so
|
||||
Note: Without this component HTML5 audio and video will not function.
|
||||
|
||||
* PDF support
|
||||
Chromium Embedded Framework.framework/Libraries/PDF.plugin
|
||||
|
||||
* Breakpad support
|
||||
Chromium Embedded Framework.framework/Resources/crash_inspector
|
||||
Chromium Embedded Framework.framework/Resources/crash_report_sender
|
||||
Chromium Embedded Framework.framework/Resources/Info.plist
|
||||
Note: Without these components breakpad support will not function.
|
37
tools/distrib/mac/README.standard.txt
Normal file
37
tools/distrib/mac/README.standard.txt
Normal file
@@ -0,0 +1,37 @@
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
cefclient Contains the cefclient sample application configured to build
|
||||
using the files in this distribution. This application demonstrates
|
||||
a wide range of CEF functionalities.
|
||||
|
||||
cefsimple Contains the cefsimple sample application configured to build
|
||||
using the files in this distribution. This application demonstrates
|
||||
the minimal functionality required to create a browser window.
|
||||
|
||||
Debug Contains the "Chromium Embedded Framework.framework" and other
|
||||
components required to run the debug version of CEF-based
|
||||
applications.
|
||||
|
||||
include Contains all required CEF header files.
|
||||
|
||||
libcef_dll Contains the source code for the libcef_dll_wrapper static library
|
||||
that all applications using the CEF C++ API must link against.
|
||||
|
||||
Release Contains the "Chromium Embedded Framework.framework" and other
|
||||
components required to run the release version of CEF-based
|
||||
applications.
|
||||
|
||||
tools Scripts that perform post-processing on Mac release targets.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Building using CMake:
|
||||
CMake can be used to generate project files in many different formats. See
|
||||
usage instructions at the top of the CMakeLists.txt file.
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
33
tools/distrib/mac/transfer.cfg
Normal file
33
tools/distrib/mac/transfer.cfg
Normal file
@@ -0,0 +1,33 @@
|
||||
# Additional handling of transfer files.
|
||||
# target: Target location relative to the target release directory. This
|
||||
# value is required.
|
||||
# source: Source location relative to the CEF root directory. This value
|
||||
# is optional. If specified the target will be copied to this location
|
||||
# and a TRANSFER-README.txt file will be created.
|
||||
# post-process: Post-processing operation to perform. This value is
|
||||
# optional and may be any one of the following:
|
||||
# 'normalize_headers': Replace fully-qualified project header paths with
|
||||
# the optionally specified 'new_header_path' value.
|
||||
|
||||
[
|
||||
{
|
||||
'source' : '../build/mac/change_mach_o_flags_from_xcode.sh',
|
||||
'target' : 'tools/change_mach_o_flags_from_xcode.sh',
|
||||
},
|
||||
{
|
||||
'source' : '../build/mac/change_mach_o_flags.py',
|
||||
'target' : 'tools/change_mach_o_flags.py',
|
||||
},
|
||||
{
|
||||
'source' : '../build/mac/strip_from_xcode',
|
||||
'target' : 'tools/strip_from_xcode',
|
||||
},
|
||||
{
|
||||
'source' : '../build/mac/strip_save_dsym',
|
||||
'target' : 'tools/strip_save_dsym',
|
||||
},
|
||||
{
|
||||
'source' : '../build/mac/make_more_helpers.sh',
|
||||
'target' : 'tools/make_more_helpers.sh',
|
||||
},
|
||||
]
|
13
tools/distrib/transfer.cfg
Normal file
13
tools/distrib/transfer.cfg
Normal file
@@ -0,0 +1,13 @@
|
||||
# Additional handling of transfer files.
|
||||
# target: Target location relative to the target release directory. This
|
||||
# value is required.
|
||||
# source: Source location relative to the CEF root directory. This value
|
||||
# is optional. If specified the target will be copied to this location
|
||||
# and a TRANSFER-README.txt file will be created.
|
||||
# post-process: Post-processing operation to perform. This value is
|
||||
# optional and may be any one of the following:
|
||||
# 'normalize_headers': Replace fully-qualified project header paths with
|
||||
# the optionally specified 'new_header_path' value.
|
||||
|
||||
[
|
||||
]
|
19
tools/distrib/win/README.minimal.txt
Normal file
19
tools/distrib/win/README.minimal.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
Release Contains libcef.dll, libcef.lib and other components required to
|
||||
build and run the release version of CEF-based applications. By
|
||||
default these files should be placed in the same directory as the
|
||||
executable.
|
||||
|
||||
Resources Contains resources required by libcef.dll. By default these files
|
||||
should be placed in the same directory as libcef.dll. By default
|
||||
these files should be placed in the same directory as libcef.dll.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
56
tools/distrib/win/README.redistrib.txt
Normal file
56
tools/distrib/win/README.redistrib.txt
Normal file
@@ -0,0 +1,56 @@
|
||||
REDISTRIBUTION
|
||||
--------------
|
||||
|
||||
This binary distribution contains the below components. Components listed under
|
||||
the "required" section must be redistributed with all applications using CEF.
|
||||
Components listed under the "optional" section may be excluded if the related
|
||||
features will not be used.
|
||||
|
||||
Required components:
|
||||
|
||||
* CEF core library
|
||||
libcef.dll
|
||||
|
||||
* Unicode support
|
||||
icudtl.dat
|
||||
|
||||
Optional components:
|
||||
|
||||
* Localized resources
|
||||
locales/
|
||||
Note: Contains localized strings for WebKit UI controls. A .pak file is loaded
|
||||
from this folder based on the CefSettings.locale value. Only configured
|
||||
locales need to be distributed. If no locale is configured the default locale
|
||||
of "en-US" will be used. Locale file loading can be disabled completely using
|
||||
CefSettings.pack_loading_disabled. The locales folder path can be customized
|
||||
using CefSettings.locales_dir_path.
|
||||
|
||||
* Other resources
|
||||
cef.pak
|
||||
cef_100_percent.pak
|
||||
cef_200_percent.pak
|
||||
devtools_resources.pak
|
||||
Note: Contains WebKit image and inspector resources. Pack file loading can be
|
||||
disabled completely using CefSettings.pack_loading_disabled. The resources
|
||||
directory path can be customized using CefSettings.resources_dir_path.
|
||||
|
||||
* FFmpeg audio and video support
|
||||
ffmpegsumo.dll
|
||||
Note: Without this component HTML5 audio and video will not function.
|
||||
|
||||
* PDF support
|
||||
pdf.dll
|
||||
Note: Without this component printing will not function.
|
||||
|
||||
* Angle and Direct3D support
|
||||
d3dcompiler_43.dll (required for Windows XP)
|
||||
d3dcompiler_47.dll (required for Windows Vista and newer)
|
||||
libEGL.dll
|
||||
libGLESv2.dll
|
||||
Note: Without these components HTML5 accelerated content like 2D canvas, 3D
|
||||
CSS and WebGL will not function.
|
||||
|
||||
* Windows Vista 64-bit sandbox support (32-bit distributions only)
|
||||
wow_helper.exe
|
||||
Note: Without this component the 32-bit build of CEF will not run on 64-bit
|
||||
Vista machines with the sandbox enabled.
|
42
tools/distrib/win/README.standard.txt
Normal file
42
tools/distrib/win/README.standard.txt
Normal file
@@ -0,0 +1,42 @@
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
cefclient Contains the cefclient sample application configured to build
|
||||
using the files in this distribution. This application demonstrates
|
||||
a wide range of CEF functionalities.
|
||||
|
||||
cefsimple Contains the cefsimple sample application configured to build
|
||||
using the files in this distribution. This application demonstrates
|
||||
the minimal functionality required to create a browser window.
|
||||
|
||||
Debug Contains libcef.dll, libcef.lib and other components required to
|
||||
build and run the debug version of CEF-based applications. By
|
||||
default these files should be placed in the same directory as the
|
||||
executable and will be copied there as part of the build process.
|
||||
|
||||
include Contains all required CEF header files.
|
||||
|
||||
libcef_dll Contains the source code for the libcef_dll_wrapper static library
|
||||
that all applications using the CEF C++ API must link against.
|
||||
|
||||
Release Contains libcef.dll, libcef.lib and other components required to
|
||||
build and run the release version of CEF-based applications. By
|
||||
default these files should be placed in the same directory as the
|
||||
executable and will be copied there as part of the build process.
|
||||
|
||||
Resources Contains resources required by libcef.dll. By default these files
|
||||
should be placed in the same directory as libcef.dll. By default
|
||||
these files should be placed in the same directory as libcef.dll
|
||||
and will be copied there as part of the build process.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Building using CMake:
|
||||
CMake can be used to generate project files in many different formats. See
|
||||
usage instructions at the top of the CMakeLists.txt file.
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
BIN
tools/distrib/win/d3dcompiler_43.dll
Normal file
BIN
tools/distrib/win/d3dcompiler_43.dll
Normal file
Binary file not shown.
21
tools/distrib/win/transfer.cfg
Normal file
21
tools/distrib/win/transfer.cfg
Normal file
@@ -0,0 +1,21 @@
|
||||
# Additional handling of transfer files.
|
||||
# target: Target location relative to the target release directory. This
|
||||
# value is required.
|
||||
# source: Source location relative to the CEF root directory. This value
|
||||
# is optional. If specified the target will be copied to this location
|
||||
# and a TRANSFER-README.txt file will be created.
|
||||
# post-process: Post-processing operation to perform. This value is
|
||||
# optional and may be any one of the following:
|
||||
# 'normalize_headers': Replace fully-qualified project header paths with
|
||||
# the optionally specified 'new_header_path' value.
|
||||
|
||||
[
|
||||
{
|
||||
'source' : '../build/win/compatibility.manifest',
|
||||
'target' : 'cefclient/compatibility.manifest',
|
||||
},
|
||||
{
|
||||
'source' : '../build/win/compatibility.manifest',
|
||||
'target' : 'cefsimple/compatibility.manifest',
|
||||
},
|
||||
]
|
27
tools/exec_util.py
Normal file
27
tools/exec_util.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright (c) 2014 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
import sys
|
||||
|
||||
def exec_cmd(cmd, path, input_file=None):
|
||||
""" Execute the specified command and return the result. """
|
||||
out = ''
|
||||
err = ''
|
||||
parts = cmd.split()
|
||||
try:
|
||||
if not input_file:
|
||||
process = Popen(parts, cwd=path, stdout=PIPE, stderr=PIPE,
|
||||
shell=(sys.platform == 'win32'))
|
||||
else:
|
||||
with open(input_file, 'rb') as f:
|
||||
process = Popen(parts, cwd=path, stdout=PIPE, stderr=PIPE,
|
||||
stdin=f,
|
||||
shell=(sys.platform == 'win32'))
|
||||
out, err = process.communicate()
|
||||
except IOError, (errno, strerror):
|
||||
raise
|
||||
except:
|
||||
raise
|
||||
return {'out': out, 'err': err}
|
141
tools/file_util.py
Normal file
141
tools/file_util.py
Normal file
@@ -0,0 +1,141 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from glob import iglob
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
|
||||
def read_file(name, normalize = True):
|
||||
""" Read a file. """
|
||||
try:
|
||||
f = open(name, 'r')
|
||||
# read the data
|
||||
data = f.read()
|
||||
if normalize:
|
||||
# normalize line endings
|
||||
data = data.replace("\r\n", "\n")
|
||||
return data
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read file '+name+': '+strerror)
|
||||
raise
|
||||
else:
|
||||
f.close()
|
||||
|
||||
def write_file(name, data):
|
||||
""" Write a file. """
|
||||
try:
|
||||
f = open(name, 'w')
|
||||
# write the data
|
||||
f.write(data)
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to write file '+name+': '+strerror)
|
||||
raise
|
||||
else:
|
||||
f.close()
|
||||
|
||||
def path_exists(name):
|
||||
""" Returns true if the path currently exists. """
|
||||
return os.path.exists(name)
|
||||
|
||||
def backup_file(name):
|
||||
""" Rename the file to a name that includes the current time stamp. """
|
||||
move_file(name, name+'.'+time.strftime('%Y-%m-%d-%H-%M-%S'))
|
||||
|
||||
def copy_file(src, dst, quiet = True):
|
||||
""" Copy a file. """
|
||||
try:
|
||||
shutil.copy(src, dst)
|
||||
if not quiet:
|
||||
sys.stdout.write('Transferring '+src+' file.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to copy file from '+src+' to '+dst+': '+strerror)
|
||||
raise
|
||||
|
||||
def move_file(src, dst, quiet = True):
|
||||
""" Move a file. """
|
||||
try:
|
||||
shutil.move(src, dst)
|
||||
if not quiet:
|
||||
sys.stdout.write('Moving '+src+' file.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to move file from '+src+' to '+dst+': '+strerror)
|
||||
raise
|
||||
|
||||
def copy_files(src_glob, dst_folder, quiet = True):
|
||||
""" Copy multiple files. """
|
||||
for fname in iglob(src_glob):
|
||||
dst = os.path.join(dst_folder, os.path.basename(fname))
|
||||
if os.path.isdir(fname):
|
||||
copy_dir(fname, dst, quiet)
|
||||
else:
|
||||
copy_file(fname, dst, quiet)
|
||||
|
||||
def remove_file(name, quiet = True):
|
||||
""" Remove the specified file. """
|
||||
try:
|
||||
if path_exists(name):
|
||||
os.remove(name)
|
||||
if not quiet:
|
||||
sys.stdout.write('Removing '+name+' file.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to remove file '+name+': '+strerror)
|
||||
raise
|
||||
|
||||
def copy_dir(src, dst, quiet = True):
|
||||
""" Copy a directory tree. """
|
||||
try:
|
||||
remove_dir(dst, quiet)
|
||||
shutil.copytree(src, dst)
|
||||
if not quiet:
|
||||
sys.stdout.write('Transferring '+src+' directory.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to copy directory from '+src+' to '+dst+': '+strerror)
|
||||
raise
|
||||
|
||||
def remove_dir(name, quiet = True):
|
||||
""" Remove the specified directory. """
|
||||
try:
|
||||
if path_exists(name):
|
||||
shutil.rmtree(name)
|
||||
if not quiet:
|
||||
sys.stdout.write('Removing '+name+' directory.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to remove directory '+name+': '+strerror)
|
||||
raise
|
||||
|
||||
def make_dir(name, quiet = True):
|
||||
""" Create the specified directory. """
|
||||
try:
|
||||
if not path_exists(name):
|
||||
if not quiet:
|
||||
sys.stdout.write('Creating '+name+' directory.\n')
|
||||
os.makedirs(name)
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to create directory '+name+': '+strerror)
|
||||
raise
|
||||
|
||||
def get_files(search_glob):
|
||||
""" Returns all files matching the search glob. """
|
||||
# Sort the result for consistency across platforms.
|
||||
return sorted(iglob(search_glob))
|
||||
|
||||
def read_version_file(file, args):
|
||||
""" Read and parse a version file (key=value pairs, one per line). """
|
||||
lines = read_file(file).split("\n")
|
||||
for line in lines:
|
||||
parts = line.split('=', 1)
|
||||
if len(parts) == 2:
|
||||
args[parts[0]] = parts[1]
|
||||
|
||||
def eval_file(src):
|
||||
""" Loads and evaluates the contents of the specified file. """
|
||||
return eval(read_file(src), {'__builtins__': None}, None)
|
||||
|
||||
def normalize_path(path):
|
||||
""" Normalizes the path separator to match the Unix standard. """
|
||||
if sys.platform == 'win32':
|
||||
return path.replace('\\', '/')
|
||||
return path
|
106
tools/gclient_hook.py
Normal file
106
tools/gclient_hook.py
Normal file
@@ -0,0 +1,106 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors.
|
||||
# Portions copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from gclient_util import *
|
||||
import os, sys
|
||||
|
||||
# The CEF directory is the parent directory of _this_ script.
|
||||
cef_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||
# The src directory is the parent directory of the CEF directory.
|
||||
src_dir = os.path.abspath(os.path.join(cef_dir, os.pardir))
|
||||
|
||||
print "\nGenerating CEF version header file..."
|
||||
gyper = [ 'python', 'tools/make_version_header.py',
|
||||
'--header', 'include/cef_version.h',
|
||||
'--cef_version', 'VERSION',
|
||||
'--chrome_version', '../chrome/VERSION',
|
||||
'--cpp_header_dir', 'include' ]
|
||||
RunAction(cef_dir, gyper)
|
||||
|
||||
print "\nPatching build configuration and source files for CEF..."
|
||||
patcher = [ 'python', 'tools/patcher.py',
|
||||
'--patch-config', 'patch/patch.cfg' ]
|
||||
RunAction(cef_dir, patcher)
|
||||
|
||||
print "\nGenerating CEF project files..."
|
||||
|
||||
# depot_tools currently bundles VS2013 Express Update 1 which causes linker
|
||||
# errors with Debug builds (see issue #1304). Don't use the bundled version
|
||||
# unless explicitly requested.
|
||||
if not 'DEPOT_TOOLS_WIN_TOOLCHAIN' in os.environ.keys():
|
||||
os.environ['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0'
|
||||
|
||||
# By default GYP+Ninja on Windows expects Visual Studio to be installed on the
|
||||
# local machine. To build when Visual Studio is extracted to a directory but not
|
||||
# installed (e.g. via a custom toolchain) you have two options:
|
||||
#
|
||||
# 1. Set up the environment using only environment variables:
|
||||
# set WIN_CUSTOM_TOOLCHAIN=1
|
||||
# set VS_ROOT=<VS root directory>
|
||||
# set SDK_ROOT=<Platform SDK root directory>
|
||||
# set INCLUDE=<VS include paths>
|
||||
# set PATH=<VS executable paths>
|
||||
# set LIB=<VS library paths>
|
||||
#
|
||||
# 2. Set up the environment using a combination of environment variables and the
|
||||
# "%GYP_MSVS_OVERRIDE_PATH%\VC\vcvarsall.bat" script:
|
||||
# set GYP_MSVS_OVERRIDE_PATH=<VS root directory>
|
||||
# set GYP_DEFINES="windows_sdk_path=<Platform SDK root directory>"
|
||||
#
|
||||
# The following environment variables must also be set:
|
||||
# set DEPOT_TOOLS_WIN_TOOLCHAIN=0
|
||||
# set GYP_MSVS_VERSION=<VS version>
|
||||
# set CEF_VCVARS=<empty .bat file>
|
||||
custom_toolchain = False
|
||||
if bool(int(os.environ.get('WIN_CUSTOM_TOOLCHAIN', '0'))):
|
||||
required_vars = [
|
||||
'GYP_MSVS_VERSION',
|
||||
'VS_ROOT',
|
||||
'SDK_ROOT',
|
||||
'INCLUDE',
|
||||
'PATH',
|
||||
'LIB',
|
||||
]
|
||||
for var in required_vars:
|
||||
if not var in os.environ.keys():
|
||||
raise Exception('%s environment variable must be set' % var)
|
||||
|
||||
custom_toolchain = True
|
||||
|
||||
# Set windows_sdk_path via GYP_DEFINES.
|
||||
gyp_defines = ''
|
||||
if 'GYP_DEFINES' in os.environ.keys():
|
||||
gyp_defines = os.environ['GYP_DEFINES'] + ' '
|
||||
gyp_defines = gyp_defines + \
|
||||
'windows_sdk_path=' + os.environ['SDK_ROOT'].replace('\\', '/')
|
||||
os.environ['GYP_DEFINES'] = gyp_defines
|
||||
|
||||
# Necessary to return correct VS version information via GetVSVersion in
|
||||
# src/tools/gyp/pylib/gyp/msvs_emulation.py.
|
||||
os.environ['GYP_MSVS_OVERRIDE_PATH'] = os.environ['VS_ROOT']
|
||||
|
||||
# Generate environment files (environment.x64, environment.x86) in each
|
||||
# build output directory.
|
||||
# When using the default toolchain this is done by GenerateEnvironmentFiles
|
||||
# in src/tools/gyp/pylib/gyp/msvs_emulation.py.
|
||||
setup_script = os.path.join(cef_dir, 'tools/setup_toolchain.py')
|
||||
win_tool_script = os.path.join(src_dir, 'tools/gyp/pylib/gyp/win_tool.py')
|
||||
out_dirs = ['Debug', 'Debug_x64', 'Release', 'Release_x64']
|
||||
for out_dir in out_dirs:
|
||||
out_dir_abs = os.path.join(src_dir, 'out', out_dir)
|
||||
if not os.path.exists(out_dir_abs):
|
||||
os.makedirs(out_dir_abs)
|
||||
cmd = ['python', setup_script,
|
||||
os.environ['VS_ROOT'], win_tool_script, os.environ['SDK_ROOT']]
|
||||
RunAction(out_dir_abs, cmd)
|
||||
|
||||
os.environ['CEF_DIRECTORY'] = os.path.basename(cef_dir)
|
||||
gyper = [ 'python', '../build/gyp_chromium', 'cef.gyp', '-I', 'cef.gypi' ]
|
||||
if custom_toolchain:
|
||||
# Disable GYP's auto-detection of the VS install.
|
||||
gyper.extend(['-G', 'ninja_use_custom_environment_files'])
|
||||
if 'GYP_ARGUMENTS' in os.environ.keys():
|
||||
gyper.extend(os.environ['GYP_ARGUMENTS'].split(' '))
|
||||
RunAction(cef_dir, gyper)
|
45
tools/gclient_util.py
Normal file
45
tools/gclient_util.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors.
|
||||
# Portions copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os, sys
|
||||
|
||||
try:
|
||||
# depot_tools may already be in the import path.
|
||||
import gclient_utils
|
||||
except ImportError, e:
|
||||
# Search the PATH environment variable to find the depot_tools folder.
|
||||
depot_tools = None;
|
||||
paths = os.environ.get('PATH').split(os.pathsep)
|
||||
for path in paths:
|
||||
if os.path.exists(os.path.join(path, 'gclient_utils.py')):
|
||||
depot_tools = path
|
||||
break
|
||||
|
||||
if depot_tools is None:
|
||||
print >> sys.stderr, 'Error: could not find depot_tools in PATH.'
|
||||
sys.exit(2)
|
||||
|
||||
# Add depot_tools to import path.
|
||||
sys.path.append(depot_tools)
|
||||
import gclient_utils
|
||||
|
||||
# Copied from gclient.py python code.
|
||||
def RunAction(dir, command):
|
||||
"""Runs the action."""
|
||||
if command[0] == 'python':
|
||||
# If the hook specified "python" as the first item, the action is a
|
||||
# Python script. Run it by starting a new copy of the same
|
||||
# interpreter.
|
||||
command[0] = sys.executable
|
||||
|
||||
try:
|
||||
gclient_utils.CheckCallAndFilterAndHeader(
|
||||
command, cwd=dir, always=True)
|
||||
except gclient_utils.Error, e:
|
||||
# Use a discrete exit status code of 2 to indicate that a hook action
|
||||
# failed. Users of this script may wish to treat hook action failures
|
||||
# differently from VC failures.
|
||||
print >> sys.stderr, 'Error: %s' % str(e)
|
||||
sys.exit(2)
|
44
tools/git_util.py
Normal file
44
tools/git_util.py
Normal file
@@ -0,0 +1,44 @@
|
||||
# Copyright (c) 2014 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file
|
||||
|
||||
from exec_util import exec_cmd
|
||||
import os
|
||||
|
||||
def is_checkout(path):
|
||||
""" Returns true if the path represents a git checkout. """
|
||||
return os.path.exists(os.path.join(path, '.git'))
|
||||
|
||||
def get_hash(path = '.', branch = 'HEAD'):
|
||||
""" Returns the git hash for the specified branch/tag/hash. """
|
||||
cmd = "git rev-parse %s" % (branch)
|
||||
result = exec_cmd(cmd, path)
|
||||
if result['out'] != '':
|
||||
return result['out'].strip()
|
||||
return 'Unknown'
|
||||
|
||||
def get_url(path = '.'):
|
||||
""" Returns the origin url for the specified path. """
|
||||
cmd = "git config --get remote.origin.url"
|
||||
result = exec_cmd(cmd, path)
|
||||
if result['out'] != '':
|
||||
return result['out'].strip()
|
||||
return 'Unknown'
|
||||
|
||||
def get_svn_revision(path = '.', branch = 'HEAD'):
|
||||
""" Returns the SVN revision associated with the specified path and git
|
||||
branch/tag/hash. """
|
||||
svn_rev = "None"
|
||||
cmd = "git log --grep=^git-svn-id: -n 1 %s" % (branch)
|
||||
result = exec_cmd(cmd, path)
|
||||
if result['err'] == '':
|
||||
for line in result['out'].split('\n'):
|
||||
if line.find("git-svn-id") > 0:
|
||||
svn_rev = line.split("@")[1].split()[0]
|
||||
break
|
||||
return svn_rev
|
||||
|
||||
def get_changed_files(path = '.'):
|
||||
""" Retrieves the list of changed files. """
|
||||
# not implemented
|
||||
return []
|
202
tools/make_capi_header.py
Normal file
202
tools/make_capi_header.py
Normal file
@@ -0,0 +1,202 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
from date_util import *
|
||||
|
||||
def make_capi_global_funcs(funcs, defined_names, translate_map, indent):
|
||||
result = ''
|
||||
first = True
|
||||
for func in funcs:
|
||||
comment = func.get_comment()
|
||||
if first or len(comment) > 0:
|
||||
result += '\n'+format_comment(comment, indent, translate_map);
|
||||
if func.get_retval().get_type().is_result_string():
|
||||
result += indent+'// The resulting string must be freed by calling cef_string_userfree_free().\n'
|
||||
result += wrap_code(indent+'CEF_EXPORT '+
|
||||
func.get_capi_proto(defined_names)+';')
|
||||
if first:
|
||||
first = False
|
||||
return result
|
||||
|
||||
def make_capi_member_funcs(funcs, defined_names, translate_map, indent):
|
||||
result = ''
|
||||
first = True
|
||||
for func in funcs:
|
||||
comment = func.get_comment()
|
||||
if first or len(comment) > 0:
|
||||
result += '\n'+format_comment(comment, indent, translate_map)
|
||||
if func.get_retval().get_type().is_result_string():
|
||||
result += indent+'// The resulting string must be freed by calling cef_string_userfree_free().\n'
|
||||
parts = func.get_capi_parts()
|
||||
result += wrap_code(indent+parts['retval']+' (CEF_CALLBACK *'+
|
||||
parts['name']+')('+
|
||||
string.join(parts['args'], ', ')+');')
|
||||
if first:
|
||||
first = False
|
||||
return result
|
||||
|
||||
def make_capi_header(header, filename):
|
||||
# structure names that have already been defined
|
||||
defined_names = header.get_defined_structs()
|
||||
|
||||
# map of strings that will be changed in C++ comments
|
||||
translate_map = header.get_capi_translations()
|
||||
|
||||
# header string
|
||||
result = \
|
||||
"""// Copyright (c) $YEAR$ Marshall A. Greenblatt. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the name Chromium Embedded
|
||||
// Framework nor the names of its contributors may be used to endorse
|
||||
// or promote products derived from this software without specific prior
|
||||
// written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// ---------------------------------------------------------------------------
|
||||
//
|
||||
// This file was generated by the CEF translator tool and should not edited
|
||||
// by hand. See the translator.README.txt file in the tools directory for
|
||||
// more information.
|
||||
//
|
||||
|
||||
#ifndef $GUARD$
|
||||
#define $GUARD$
|
||||
#pragma once
|
||||
|
||||
"""
|
||||
classes = header.get_classes(filename)
|
||||
|
||||
# identify all includes and forward declarations
|
||||
all_includes = set([])
|
||||
all_declares = set([])
|
||||
for cls in classes:
|
||||
includes = cls.get_includes()
|
||||
for include in includes:
|
||||
all_includes.add(include)
|
||||
declares = cls.get_forward_declares()
|
||||
for declare in declares:
|
||||
all_declares.add(header.get_class(declare).get_capi_name())
|
||||
|
||||
# output includes
|
||||
if len(all_includes) > 0:
|
||||
sorted_includes = sorted(all_includes)
|
||||
for include in sorted_includes:
|
||||
result += '#include "include/capi/' + include + '_capi.h"\n'
|
||||
else:
|
||||
result += '#include "include/capi/cef_base_capi.h"\n'
|
||||
|
||||
result += \
|
||||
"""
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
"""
|
||||
|
||||
# output forward declarations
|
||||
if len(all_declares) > 0:
|
||||
sorted_declares = sorted(all_declares)
|
||||
for declare in sorted_declares:
|
||||
result += 'struct _' + declare + ';\n'
|
||||
|
||||
# output classes
|
||||
for cls in classes:
|
||||
# virtual functions are inside the structure
|
||||
classname = cls.get_capi_name()
|
||||
result += '\n'+format_comment(cls.get_comment(), '', translate_map);
|
||||
result += 'typedef struct _'+classname+ \
|
||||
' {\n ///\n // Base structure.\n ///\n cef_base_t base;\n'
|
||||
funcs = cls.get_virtual_funcs()
|
||||
result += make_capi_member_funcs(funcs, defined_names,
|
||||
translate_map, ' ')
|
||||
result += '} '+classname+';\n\n'
|
||||
|
||||
defined_names.append(cls.get_capi_name())
|
||||
|
||||
# static functions become global
|
||||
funcs = cls.get_static_funcs()
|
||||
if len(funcs) > 0:
|
||||
result += make_capi_global_funcs(funcs, defined_names,
|
||||
translate_map, '')+'\n'
|
||||
|
||||
# output global functions
|
||||
funcs = header.get_funcs(filename)
|
||||
if len(funcs) > 0:
|
||||
result += make_capi_global_funcs(funcs, defined_names, translate_map, '')
|
||||
|
||||
# footer string
|
||||
result += \
|
||||
"""
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // $GUARD$
|
||||
"""
|
||||
|
||||
# add the copyright year
|
||||
result = result.replace('$YEAR$', get_year())
|
||||
# add the guard string
|
||||
guard = 'CEF_INCLUDE_CAPI_'+string.upper(filename.replace('.', '_capi_'))+'_'
|
||||
result = result.replace('$GUARD$', guard)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def write_capi_header(header, filepath, backup):
|
||||
capi_path = get_capi_file_name(filepath)
|
||||
if path_exists(capi_path):
|
||||
oldcontents = read_file(capi_path)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
filename = os.path.split(filepath)[1]
|
||||
newcontents = make_capi_header(header, filename)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(capi_path)
|
||||
write_file(capi_path, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 2:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# dump the result to stdout
|
||||
filename = os.path.split(sys.argv[1])[1]
|
||||
sys.stdout.write(make_capi_header(header, filename))
|
249
tools/make_cmake.py
Normal file
249
tools/make_cmake.py
Normal file
@@ -0,0 +1,249 @@
|
||||
# Copyright (c) 2014 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import os
|
||||
from file_util import *
|
||||
import sys
|
||||
|
||||
# script directory.
|
||||
script_dir = os.path.dirname(__file__)
|
||||
|
||||
# CEF root directory.
|
||||
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
|
||||
|
||||
def get_files_for_variable(cmake_path, variables, variable):
|
||||
""" Returns the path values associated with |variable| and relative to the
|
||||
|cmake_path| directory. """
|
||||
if not variable in variables:
|
||||
raise Exception('Variable %s does not exist' % variable)
|
||||
|
||||
# Cmake file directory.
|
||||
cmake_dirname = os.path.dirname(cmake_path) + '/'
|
||||
|
||||
# Return path values relative to the cmake file directory.
|
||||
# Example 1:
|
||||
# cmake file = "/path/to/libcef_dll/CMakeLists.txt"
|
||||
# include path = "/path/to/libcef_dll/wrapper/cef_browser_info_map.h"
|
||||
# return path = "wrapper/cef_browser_info_map.h"
|
||||
# Example 2:
|
||||
# cmake file = "/path/to/libcef_dll/CMakeLists.txt"
|
||||
# include path = "/path/to/include/internal/cef_export.h"
|
||||
# return path = "../include/internal/cef_export.h"
|
||||
new_paths = []
|
||||
paths = variables[variable]
|
||||
for path in paths:
|
||||
if path[0] == '<':
|
||||
# Skip gyp include variables
|
||||
continue
|
||||
|
||||
abspath = os.path.join(cef_dir, path)
|
||||
newpath = normalize_path(os.path.relpath(abspath, cmake_dirname))
|
||||
new_paths.append(newpath)
|
||||
return new_paths
|
||||
|
||||
def format_cmake_set(name, values):
|
||||
result = 'set(%s\n' % name
|
||||
for value in values:
|
||||
result += ' %s\n' % value
|
||||
return result + ' )\n'
|
||||
|
||||
def format_cmake_group(cmake_path, name, files, platform_sep, append_macro):
|
||||
platforms = {}
|
||||
common = []
|
||||
|
||||
# Folder will be the cmake parent directory name combined with the path to
|
||||
# first file in the files list.
|
||||
# Example 1:
|
||||
# cmake file = "/path/to/libcef_dll/CMakeLists.txt"
|
||||
# include path = "wrapper/cef_browser_info_map.h"
|
||||
# folder = "libcef_dll\\\\wrapper"
|
||||
# Example 2:
|
||||
# cmake file = "/path/to/libcef_dll/CMakeLists.txt"
|
||||
# include path = "../include/internal/cef_export.h"
|
||||
# folder = "include\\\\internal"
|
||||
folder = os.path.basename(os.path.dirname(cmake_path))
|
||||
folder = os.path.dirname(os.path.normpath(os.path.join(folder, files[0])))
|
||||
folder = normalize_path(folder).replace('/', '\\\\\\\\')
|
||||
|
||||
# Group the files by platform.
|
||||
for file in files:
|
||||
parts = file.split(platform_sep)
|
||||
file = parts[0]
|
||||
if len(parts) > 1:
|
||||
# Add the file under the platform.
|
||||
platform = parts[1]
|
||||
if not platform in platforms:
|
||||
platforms[platform] = []
|
||||
platforms[platform].append(file)
|
||||
else:
|
||||
common.append(file)
|
||||
|
||||
result = ''
|
||||
if len(common) > 0:
|
||||
result += format_cmake_set(name, common)
|
||||
|
||||
if len(platforms) > 0:
|
||||
keys = sorted(platforms.keys())
|
||||
for key in keys:
|
||||
result += format_cmake_set(name + '_' + key, platforms[key])
|
||||
result += '%s(%s)\n' % (append_macro, name)
|
||||
|
||||
result += 'source_group(%s FILES ${%s})\n\n' % (folder, name)
|
||||
return result
|
||||
|
||||
def format_cmake_library(name, group_names):
|
||||
result = 'add_library(%s\n' % name
|
||||
for group in group_names:
|
||||
result += ' ${%s}\n' % group
|
||||
return result + ' )\n\n'
|
||||
|
||||
def process_cmake_template_segment(segment, segment_ct, cmake_path, variables):
|
||||
prefix = None
|
||||
library = None
|
||||
set = None
|
||||
includes = []
|
||||
suffix = '_SRCS' # Appended to each group name before the platform name.
|
||||
platform_sep = ':' # Used to separate value from platform name.
|
||||
append_macro = 'APPEND_PLATFORM_SOURCES' # CMake macro name.
|
||||
|
||||
# Extract values from |segment|. Example |segment| contents:
|
||||
# 'prefix': 'cefsimple',
|
||||
# 'includes': [
|
||||
# 'cefsimple_sources_common',
|
||||
# 'cefsimple_sources_win:WINDOWS',
|
||||
# 'cefsimple_sources_mac:MACOSX',
|
||||
# 'cefsimple_sources_linux:LINUX',
|
||||
# ],
|
||||
values = eval('{' + segment + '}', {'__builtins__': None}, None)
|
||||
if 'prefix' in values:
|
||||
prefix = values['prefix']
|
||||
else:
|
||||
raise Exception('Missing prefix value in segment %d' % segment_ct)
|
||||
|
||||
if 'library' in values:
|
||||
library = values['library']
|
||||
|
||||
if 'set' in values:
|
||||
set = values['set']
|
||||
|
||||
if 'append_macro' in values:
|
||||
append_macro = values['append_macro']
|
||||
|
||||
if 'includes' in values and len(values['includes']) > 0:
|
||||
for include in values['includes']:
|
||||
parts = include.strip().split(platform_sep)
|
||||
files = get_files_for_variable(cmake_path, variables, parts[0])
|
||||
if len(parts) == 2:
|
||||
# Append the platform to each file path.
|
||||
files = [file + platform_sep + parts[1] for file in files]
|
||||
includes.extend(files)
|
||||
else:
|
||||
raise Exception('Missing includes value in segment %d' % segment_ct)
|
||||
|
||||
# Sort the file paths alphabetically.
|
||||
includes.sort()
|
||||
|
||||
# Group files by path.
|
||||
# For example, '../include/base/foo.h' and '../include/base/bar.h' will be
|
||||
# grouped as 'PREFIX_INCLUDE_BASE'.
|
||||
groups = {}
|
||||
for include in includes:
|
||||
paths = include.split('/')
|
||||
label = prefix
|
||||
for path in paths[0:-1]:
|
||||
if path == '..':
|
||||
continue
|
||||
label += '_' + path
|
||||
label = label.replace('.', '_').upper()
|
||||
if not label in groups:
|
||||
groups[label] = []
|
||||
groups[label].append(include)
|
||||
|
||||
# Create the output results.
|
||||
result = ''
|
||||
|
||||
keys = sorted(groups.keys())
|
||||
for key in keys:
|
||||
# Add a group of files that share the same path.
|
||||
result += format_cmake_group(cmake_path, key + suffix, groups[key], \
|
||||
platform_sep, append_macro)
|
||||
|
||||
if not library is None:
|
||||
# Add the library declaration if requested.
|
||||
result += format_cmake_library(library, [key + suffix for key in keys])
|
||||
|
||||
if not set is None:
|
||||
# Add the set declaration if requested.
|
||||
result += format_cmake_set(set, \
|
||||
['${' + key + suffix + '}' for key in keys])
|
||||
|
||||
return result.strip()
|
||||
|
||||
def process_cmake_template(input, output, variables, quiet = False):
|
||||
""" Reads the |input| template, parses variable substitution sections and
|
||||
writes |output|. """
|
||||
if not quiet:
|
||||
sys.stdout.write('Processing "%s" to "%s"...\n' % (input, output))
|
||||
|
||||
if not os.path.exists(input):
|
||||
raise Exception('File %s does not exist' % input)
|
||||
|
||||
cmake_path = normalize_path(os.path.abspath(input))
|
||||
template = read_file(cmake_path)
|
||||
|
||||
delim_start = '{{'
|
||||
delim_end = '}}'
|
||||
|
||||
# Process the template file, replacing segments delimited by |delim_start|
|
||||
# and |delim_end|.
|
||||
result = ''
|
||||
end = 0
|
||||
segment_ct = 0
|
||||
while True:
|
||||
start = template.find(delim_start, end)
|
||||
if start == -1:
|
||||
break
|
||||
result += template[end:start]
|
||||
end = template.find(delim_end, start + len(delim_start))
|
||||
if end == -1:
|
||||
break
|
||||
segment = template[start + len(delim_start):end]
|
||||
segment_ct = segment_ct + 1
|
||||
result += process_cmake_template_segment(segment, segment_ct, \
|
||||
cmake_path, variables)
|
||||
end += len(delim_end)
|
||||
result += template[end:]
|
||||
|
||||
# Only write the output file if the contents have changed.
|
||||
changed = True
|
||||
if os.path.exists(output):
|
||||
existing = read_file(output)
|
||||
changed = result != existing
|
||||
if changed:
|
||||
write_file(output, result)
|
||||
|
||||
def read_gypi_variables(source):
|
||||
""" Read the |source| gypi file and extract the variables section. """
|
||||
path = os.path.join(cef_dir, source + '.gypi')
|
||||
if not os.path.exists(path):
|
||||
raise Exception('File %s does not exist' % path)
|
||||
contents = eval_file(path)
|
||||
if not 'variables' in contents:
|
||||
raise Exception('File %s does not have a variables section' % path)
|
||||
return contents['variables']
|
||||
|
||||
# File entry point.
|
||||
if __name__ == "__main__":
|
||||
# Verify that the correct number of command-line arguments are provided.
|
||||
if len(sys.argv) != 3:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <outfile>')
|
||||
sys.exit()
|
||||
|
||||
# Read the gypi files and combine into a single dictionary.
|
||||
variables1 = read_gypi_variables('cef_paths')
|
||||
variables2 = read_gypi_variables('cef_paths2')
|
||||
variables = dict(variables1.items() + variables2.items())
|
||||
|
||||
# Process the cmake template.
|
||||
process_cmake_template(sys.argv[1], sys.argv[2], variables)
|
18
tools/make_cppdocs.bat
Normal file
18
tools/make_cppdocs.bat
Normal file
@@ -0,0 +1,18 @@
|
||||
@echo off
|
||||
setlocal
|
||||
|
||||
if "%1"=="" (
|
||||
set CPPDOC_EXE="C:\Program Files (x86)\richfeit\CppDoc\CppDoc.exe"
|
||||
set CPPDOC_REV="XXX"
|
||||
) else (
|
||||
set CPPDOC_EXE="C:\Program Files (x86)\richfeit\CppDoc\cppdoc_cmd.exe"
|
||||
set CPPDOC_REV="%1"
|
||||
)
|
||||
|
||||
if not exist %CPPDOC_EXE% (
|
||||
echo ERROR: Please install CppDoc from http://www.cppdoc.com/
|
||||
) else (
|
||||
%CPPDOC_EXE% -overwrite -title="CEF3 C++ API Docs - Revision %CPPDOC_REV%" -footer="<center><a href="http://code.google.com/p/chromiumembedded" target="_top">Chromium Embedded Framework (CEF)</a> Copyright © 2012 Marshall A. Greenblatt</center>" -namespace-as-project -comment-format="///;//;///" -classdir=projects -module="cppdoc-standard" -extensions=h -languages="c=cpp,cc=cpp,cpp=cpp,cs=csharp,cxx=cpp,h=cpp,hpp=cpp,hxx=cpp,java=java" -D"OS_WIN" -D"USING_CEF_SHARED" -D"__cplusplus" -D"CEF_STRING_TYPE_UTF16" -enable-author=false -enable-deprecations=true -enable-since=true -enable-version=false -file-links-for-globals=false -generate-deprecations-list=false -generate-hierarchy=true -header-background-dark="#ccccff" -header-background-light="#eeeeff" -include-private=false -include-protected=true -index-file-base=index -overview-html=overview.html -reduce-summary-font=true -selected-text-background=navy -selected-text-foreground=white -separate-index-pages=false -show-cppdoc-version=false -show-timestamp=false -summary-html=project.html -suppress-details=false -suppress-frames-links=false -table-background=white -wrap-long-lines=false ..\include #cef_runnable.h #cef_tuple.h #capi "..\docs\index.html"
|
||||
)
|
||||
|
||||
endlocal
|
105
tools/make_cpptoc_header.py
Normal file
105
tools/make_cpptoc_header.py
Normal file
@@ -0,0 +1,105 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_cpptoc_header(header, clsname):
|
||||
cls = header.get_class(clsname)
|
||||
if cls is None:
|
||||
raise Exception('Class does not exist: '+clsname)
|
||||
|
||||
dllside = cls.is_library_side()
|
||||
defname = string.upper(get_capi_name(clsname[3:], False))
|
||||
capiname = cls.get_capi_name()
|
||||
|
||||
result = get_copyright()
|
||||
|
||||
result += '#ifndef CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n'+ \
|
||||
'#define CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n' + \
|
||||
'#pragma once\n'
|
||||
|
||||
if dllside:
|
||||
result += """
|
||||
#ifndef BUILDING_CEF_SHARED
|
||||
#pragma message("Warning: "__FILE__" may be accessed DLL-side only")
|
||||
#else // BUILDING_CEF_SHARED
|
||||
"""
|
||||
else:
|
||||
result += """
|
||||
#ifndef USING_CEF_SHARED
|
||||
#pragma message("Warning: "__FILE__" may be accessed wrapper-side only")
|
||||
#else // USING_CEF_SHARED
|
||||
"""
|
||||
|
||||
# include the headers for this class
|
||||
result += '\n#include "include/'+cls.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+cls.get_capi_file_name()+'"\n'
|
||||
|
||||
# include headers for any forward declared classes that are not in the same file
|
||||
declares = cls.get_forward_declares()
|
||||
for declare in declares:
|
||||
dcls = header.get_class(declare)
|
||||
if dcls.get_file_name() != cls.get_file_name():
|
||||
result += '#include "include/'+dcls.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+dcls.get_capi_file_name()+'"\n'
|
||||
|
||||
result += """#include "libcef_dll/cpptoc/cpptoc.h"
|
||||
|
||||
// Wrap a C++ class with a C structure.
|
||||
"""
|
||||
|
||||
if dllside:
|
||||
result += '// This class may be instantiated and accessed DLL-side only.\n'
|
||||
else:
|
||||
result += '// This class may be instantiated and accessed wrapper-side only.\n'
|
||||
|
||||
result += 'class '+clsname+'CppToC\n'+ \
|
||||
' : public CefCppToC<'+clsname+'CppToC, '+clsname+', '+capiname+'> {\n'+ \
|
||||
' public:\n'+ \
|
||||
' explicit '+clsname+'CppToC('+clsname+'* cls);\n'+ \
|
||||
'};\n\n'
|
||||
|
||||
if dllside:
|
||||
result += '#endif // BUILDING_CEF_SHARED\n'
|
||||
else:
|
||||
result += '#endif // USING_CEF_SHARED\n'
|
||||
|
||||
result += '#endif // CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n'
|
||||
|
||||
return wrap_code(result)
|
||||
|
||||
|
||||
def write_cpptoc_header(header, clsname, dir, backup):
|
||||
file = dir+os.sep+get_capi_name(clsname[3:], False)+'_cpptoc.h'
|
||||
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
newcontents = make_cpptoc_header(header, clsname)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 3:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_cpptoc_header(header, sys.argv[2]))
|
571
tools/make_cpptoc_impl.py
Normal file
571
tools/make_cpptoc_impl.py
Normal file
@@ -0,0 +1,571 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_cpptoc_impl_proto(name, func, parts):
|
||||
if isinstance(func, obj_function_virtual):
|
||||
proto = parts['retval']+' CEF_CALLBACK'
|
||||
else:
|
||||
proto = 'CEF_EXPORT '+parts['retval']
|
||||
|
||||
proto += ' '+name+'('+string.join(parts['args'], ', ')+')'
|
||||
return proto
|
||||
|
||||
def make_cpptoc_function_impl_existing(name, func, impl, defined_names):
|
||||
notify(name+' has manual edits')
|
||||
|
||||
# retrieve the C API prototype parts
|
||||
parts = func.get_capi_parts(defined_names)
|
||||
|
||||
changes = format_translation_changes(impl, parts)
|
||||
if len(changes) > 0:
|
||||
notify(name+' prototype changed')
|
||||
|
||||
return wrap_code(make_cpptoc_impl_proto(name, func, parts))+'{'+ \
|
||||
changes+impl['body']+'\n}\n'
|
||||
return result
|
||||
|
||||
def make_cpptoc_function_impl_new(name, func, defined_names):
|
||||
# retrieve the C API prototype parts
|
||||
parts = func.get_capi_parts(defined_names)
|
||||
result = make_cpptoc_impl_proto(name, func, parts)+' {'
|
||||
|
||||
invalid = []
|
||||
|
||||
# retrieve the function arguments
|
||||
args = func.get_arguments()
|
||||
|
||||
# determine the argument types
|
||||
for arg in args:
|
||||
if arg.get_arg_type() == 'invalid':
|
||||
invalid.append(arg.get_name())
|
||||
|
||||
# retrieve the function return value
|
||||
retval = func.get_retval()
|
||||
retval_type = retval.get_retval_type()
|
||||
if retval_type == 'invalid':
|
||||
invalid.append('(return value)')
|
||||
retval_default = ''
|
||||
else:
|
||||
retval_default = retval.get_retval_default(True)
|
||||
if len(retval_default) > 0:
|
||||
retval_default = ' '+retval_default;
|
||||
|
||||
if len(invalid) > 0:
|
||||
notify(name+' could not be autogenerated')
|
||||
# code could not be auto-generated
|
||||
result += '\n // BEGIN DELETE BEFORE MODIFYING'
|
||||
result += '\n // AUTO-GENERATED CONTENT'
|
||||
result += '\n // COULD NOT IMPLEMENT DUE TO: '+string.join(invalid, ', ')
|
||||
result += '\n #pragma message("Warning: "__FILE__": '+name+' is not implemented")'
|
||||
result += '\n // END DELETE BEFORE MODIFYING'
|
||||
result += '\n}\n\n'
|
||||
return wrap_code(result)
|
||||
|
||||
result += '\n // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING\n'
|
||||
|
||||
result_len = len(result)
|
||||
|
||||
optional = []
|
||||
|
||||
# parameter verification
|
||||
if isinstance(func, obj_function_virtual):
|
||||
result += '\n DCHECK(self);'\
|
||||
'\n if (!self)'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
# skip optional params
|
||||
optional_params = arg.parent.get_attrib_list('optional_param')
|
||||
if not optional_params is None and arg_name in optional_params:
|
||||
optional.append(arg_name)
|
||||
continue
|
||||
|
||||
comment = '\n // Verify param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byref' or arg_type == 'simple_byref_const' or \
|
||||
arg_type == 'simple_byaddr' or arg_type == 'bool_byref' or arg_type == 'bool_byaddr' or \
|
||||
arg_type == 'struct_byref_const' or arg_type == 'struct_byref' or \
|
||||
arg_type == 'string_byref_const' or arg_type == 'string_byref' or \
|
||||
arg_type == 'refptr_same' or arg_type == 'refptr_same_byref' or \
|
||||
arg_type == 'refptr_diff' or arg_type == 'refptr_diff_byref' or \
|
||||
arg_type == 'string_vec_byref' or arg_type == 'string_vec_byref_const' or \
|
||||
arg_type == 'string_map_single_byref' or arg_type == 'string_map_single_byref_const' or \
|
||||
arg_type == 'string_map_multi_byref' or arg_type == 'string_map_multi_byref_const':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+');'\
|
||||
'\n if (!'+arg_name+')'\
|
||||
'\n return'+retval_default+';'
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+'Count && (*'+arg_name+'Count == 0 || '+arg_name+'));'\
|
||||
'\n if (!'+arg_name+'Count || (*'+arg_name+'Count > 0 && !'+arg_name+'))'\
|
||||
'\n return'+retval_default+';'
|
||||
elif arg_type == 'simple_vec_byref_const' or arg_type == 'bool_vec_byref_const' or \
|
||||
arg_type == 'refptr_vec_same_byref_const' or arg_type == 'refptr_vec_diff_byref_const':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+'Count == 0 || '+arg_name+');'\
|
||||
'\n if ('+arg_name+'Count > 0 && !'+arg_name+')'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
# check index params
|
||||
index_params = arg.parent.get_attrib_list('index_param')
|
||||
if not index_params is None and arg_name in index_params:
|
||||
result += comment+\
|
||||
'\n DCHECK_GE('+arg_name+', 0);'\
|
||||
'\n if ('+arg_name+' < 0)'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
if len(optional) > 0:
|
||||
# Wrap the comment at 80 characters.
|
||||
str = '\n // Unverified params: ' + optional[0]
|
||||
for name in optional[1:]:
|
||||
str += ','
|
||||
if len(str) + len(name) + 1 > 80:
|
||||
result += str
|
||||
str = '\n //'
|
||||
str += ' ' + name
|
||||
result += str
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# parameter translation
|
||||
params = []
|
||||
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
comment = '\n // Translate param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byval' or arg_type == 'simple_byaddr':
|
||||
params.append(arg_name)
|
||||
elif arg_type == 'simple_byref' or arg_type == 'simple_byref_const':
|
||||
data_type = arg.get_type().get_type()
|
||||
default = arg.get_type().get_result_simple_default()
|
||||
result += comment+\
|
||||
'\n '+data_type+' '+arg_name+'Val = '+arg_name+'?*'+arg_name+':'+default+';'
|
||||
params.append(arg_name+'Val')
|
||||
elif arg_type == 'bool_byval':
|
||||
params.append(arg_name+'?true:false')
|
||||
elif arg_type == 'bool_byref' or arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n bool '+arg_name+'Bool = ('+arg_name+' && *'+arg_name+')?true:false;'
|
||||
if arg_type == 'bool_byref':
|
||||
params.append(arg_name+'Bool')
|
||||
else:
|
||||
params.append('&'+arg_name+'Bool')
|
||||
elif arg_type == 'struct_byref_const':
|
||||
struct_type = arg.get_type().get_type()
|
||||
result += comment+\
|
||||
'\n '+struct_type+' '+arg_name+'Obj;'\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n '+arg_name+'Obj.Set(*'+arg_name+', false);'
|
||||
params.append(arg_name+'Obj')
|
||||
elif arg_type == 'struct_byref':
|
||||
struct_type = arg.get_type().get_type()
|
||||
result += comment+\
|
||||
'\n '+struct_type+' '+arg_name+'Obj;'\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n '+arg_name+'Obj.AttachTo(*'+arg_name+');'
|
||||
params.append(arg_name+'Obj')
|
||||
elif arg_type == 'string_byref_const':
|
||||
params.append('CefString('+arg_name+')')
|
||||
elif arg_type == 'string_byref':
|
||||
result += comment+\
|
||||
'\n CefString '+arg_name+'Str('+arg_name+');'
|
||||
params.append(arg_name+'Str')
|
||||
elif arg_type == 'refptr_same' or arg_type == 'refptr_diff':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
if arg_type == 'refptr_same':
|
||||
params.append(refptr_class+'CppToC::Unwrap('+arg_name+')')
|
||||
else:
|
||||
params.append(refptr_class+'CToCpp::Wrap('+arg_name+')')
|
||||
elif arg_type == 'refptr_same_byref' or arg_type == 'refptr_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
if arg_type == 'refptr_same_byref':
|
||||
assign = refptr_class+'CppToC::Unwrap(*'+arg_name+')'
|
||||
else:
|
||||
assign = refptr_class+'CToCpp::Wrap(*'+arg_name+')'
|
||||
result += comment+\
|
||||
'\n CefRefPtr<'+refptr_class+'> '+arg_name+'Ptr;'\
|
||||
'\n if ('+arg_name+' && *'+arg_name+')'\
|
||||
'\n '+arg_name+'Ptr = '+assign+';'\
|
||||
'\n '+refptr_class+'* '+arg_name+'Orig = '+arg_name+'Ptr.get();'
|
||||
params.append(arg_name+'Ptr')
|
||||
elif arg_type == 'string_vec_byref' or arg_type == 'string_vec_byref_const':
|
||||
result += comment+\
|
||||
'\n std::vector<CefString> '+arg_name+'List;'\
|
||||
'\n transfer_string_list_contents('+arg_name+', '+arg_name+'List);'
|
||||
params.append(arg_name+'List')
|
||||
elif arg_type == 'string_map_single_byref' or arg_type == 'string_map_single_byref_const':
|
||||
result += comment+\
|
||||
'\n std::map<CefString, CefString> '+arg_name+'Map;'\
|
||||
'\n transfer_string_map_contents('+arg_name+', '+arg_name+'Map);'
|
||||
params.append(arg_name+'Map')
|
||||
elif arg_type == 'string_map_multi_byref' or arg_type == 'string_map_multi_byref_const':
|
||||
result += comment+\
|
||||
'\n std::multimap<CefString, CefString> '+arg_name+'Multimap;'\
|
||||
'\n transfer_string_multimap_contents('+arg_name+', '+arg_name+'Multimap);'
|
||||
params.append(arg_name+'Multimap')
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
vec_type = arg.get_type().get_vector_type()
|
||||
if arg_type == 'simple_vec_byref':
|
||||
assign = arg_name+'[i]'
|
||||
elif arg_type == 'bool_vec_byref':
|
||||
assign = arg_name+'[i]?true:false'
|
||||
elif arg_type == 'refptr_vec_same_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Unwrap('+arg_name+'[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Wrap('+arg_name+'[i])'
|
||||
result += comment+\
|
||||
'\n std::vector<'+vec_type+' > '+arg_name+'List;'\
|
||||
'\n if ('+arg_name+'Count && *'+arg_name+'Count > 0 && '+arg_name+') {'\
|
||||
'\n for (size_t i = 0; i < *'+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'List.push_back('+assign+');'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
params.append(arg_name+'List')
|
||||
elif arg_type == 'simple_vec_byref_const' or arg_type == 'bool_vec_byref_const' or \
|
||||
arg_type == 'refptr_vec_same_byref_const' or arg_type == 'refptr_vec_diff_byref_const':
|
||||
vec_type = arg.get_type().get_vector_type()
|
||||
if arg_type == 'simple_vec_byref_const':
|
||||
assign = arg_name+'[i]'
|
||||
elif arg_type == 'bool_vec_byref_const':
|
||||
assign = arg_name+'[i]?true:false'
|
||||
elif arg_type == 'refptr_vec_same_byref_const':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Unwrap('+arg_name+'[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref_const':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Wrap('+arg_name+'[i])'
|
||||
result += comment+\
|
||||
'\n std::vector<'+vec_type+' > '+arg_name+'List;'\
|
||||
'\n if ('+arg_name+'Count > 0) {'\
|
||||
'\n for (size_t i = 0; i < '+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'List.push_back('+assign+');'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
params.append(arg_name+'List')
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# execution
|
||||
result += '\n // Execute\n '
|
||||
|
||||
if retval_type != 'none':
|
||||
# has a return value
|
||||
if retval_type == 'simple':
|
||||
result += retval.get_type().get_result_simple_type()
|
||||
else:
|
||||
result += retval.get_type().get_type()
|
||||
result += ' _retval = '
|
||||
|
||||
if isinstance(func.parent, obj_class):
|
||||
# virtual and static class methods
|
||||
if isinstance(func, obj_function_virtual):
|
||||
result += func.parent.get_name()+'CppToC::Get(self)->'
|
||||
else:
|
||||
result += func.parent.get_name()+'::'
|
||||
result += func.get_name()+'('
|
||||
|
||||
if len(params) > 0:
|
||||
result += '\n '+string.join(params,',\n ')
|
||||
|
||||
result += ');\n'
|
||||
|
||||
result_len = len(result)
|
||||
|
||||
# parameter restoration
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
comment = '\n // Restore param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n *'+arg_name+' = '+arg_name+'Val;'
|
||||
elif arg_type == 'bool_byref' or arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n *'+arg_name+' = '+arg_name+'Bool?true:false;'
|
||||
elif arg_type == 'struct_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n '+arg_name+'Obj.DetachTo(*'+arg_name+');'
|
||||
elif arg_type == 'refptr_same_byref' or arg_type == 'refptr_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
if arg_type == 'refptr_same_byref':
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+'Ptr)'
|
||||
else:
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+'Ptr)'
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+') {'\
|
||||
'\n if ('+arg_name+'Ptr.get()) {'\
|
||||
'\n if ('+arg_name+'Ptr.get() != '+arg_name+'Orig) {'\
|
||||
'\n *'+arg_name+' = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n } else {'\
|
||||
'\n *'+arg_name+' = NULL;'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_vec_byref':
|
||||
result += comment+\
|
||||
'\n cef_string_list_clear('+arg_name+');'\
|
||||
'\n transfer_string_list_contents('+arg_name+'List, '+arg_name+');'
|
||||
elif arg_type == 'string_map_single_byref':
|
||||
result += comment+\
|
||||
'\n cef_string_map_clear('+arg_name+');'\
|
||||
'\n transfer_string_map_contents('+arg_name+'Map, '+arg_name+');'
|
||||
elif arg_type == 'string_map_multi_byref':
|
||||
result += comment+\
|
||||
'\n cef_string_multimap_clear('+arg_name+');'\
|
||||
'\n transfer_string_multimap_contents('+arg_name+'Multimap, '+arg_name+');'
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
if arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref':
|
||||
assign = arg_name+'List[i]'
|
||||
elif arg_type == 'refptr_vec_same_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+'List[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+'List[i])'
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Count && '+arg_name+') {'\
|
||||
'\n *'+arg_name+'Count = std::min('+arg_name+'List.size(), *'+arg_name+'Count);'\
|
||||
'\n if (*'+arg_name+'Count > 0) {'\
|
||||
'\n for (size_t i = 0; i < *'+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'[i] = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# special handling for the global cef_shutdown function
|
||||
if name == 'cef_shutdown' and isinstance(func.parent, obj_header):
|
||||
classes = func.parent.get_classes()
|
||||
|
||||
names = []
|
||||
for cls in classes:
|
||||
if cls.has_attrib('no_debugct_check'):
|
||||
continue;
|
||||
|
||||
if cls.is_library_side():
|
||||
names.append(cls.get_name()+'CppToC')
|
||||
else:
|
||||
names.append(cls.get_name()+'CToCpp')
|
||||
|
||||
if len(names) > 0:
|
||||
names = sorted(names)
|
||||
result += '\n#ifndef NDEBUG'\
|
||||
'\n // Check that all wrapper objects have been destroyed'
|
||||
for name in names:
|
||||
result += '\n DCHECK(base::AtomicRefCountIsZero(&'+name+'::DebugObjCt));';
|
||||
result += '\n#endif // !NDEBUG'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# return translation
|
||||
if retval_type != 'none':
|
||||
# has a return value
|
||||
result += '\n // Return type: '+retval_type
|
||||
if retval_type == 'simple' or retval_type == 'bool':
|
||||
result += '\n return _retval;'
|
||||
elif retval_type == 'string':
|
||||
result += '\n return _retval.DetachToUserFree();'
|
||||
elif retval_type == 'refptr_same':
|
||||
refptr_class = retval.get_type().get_refptr_type()
|
||||
result += '\n return '+refptr_class+'CppToC::Wrap(_retval);'
|
||||
elif retval_type == 'refptr_diff':
|
||||
refptr_class = retval.get_type().get_refptr_type()
|
||||
result += '\n return '+refptr_class+'CToCpp::Unwrap(_retval);'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
|
||||
result += '}\n'
|
||||
return wrap_code(result)
|
||||
|
||||
def make_cpptoc_function_impl(funcs, existing, prefixname, defined_names):
|
||||
impl = ''
|
||||
|
||||
for func in funcs:
|
||||
if not prefixname is None:
|
||||
name = prefixname+'_'+func.get_capi_name()
|
||||
else:
|
||||
name = func.get_capi_name()
|
||||
value = get_next_function_impl(existing, name)
|
||||
if not value is None \
|
||||
and value['body'].find('// AUTO-GENERATED CONTENT') < 0:
|
||||
# an implementation exists that was not auto-generated
|
||||
impl += make_cpptoc_function_impl_existing(name, func, value, defined_names)
|
||||
else:
|
||||
impl += make_cpptoc_function_impl_new(name, func, defined_names)
|
||||
|
||||
return impl
|
||||
|
||||
def make_cpptoc_class_impl(header, clsname, impl):
|
||||
# structure names that have already been defined
|
||||
defined_names = header.get_defined_structs()
|
||||
|
||||
# retrieve the class and populate the defined names
|
||||
cls = header.get_class(clsname, defined_names)
|
||||
if cls is None:
|
||||
raise Exception('Class does not exist: '+clsname)
|
||||
|
||||
capiname = cls.get_capi_name()
|
||||
prefixname = get_capi_name(clsname[3:], False)
|
||||
|
||||
# retrieve the existing virtual function implementations
|
||||
existing = get_function_impls(impl, 'CEF_CALLBACK')
|
||||
|
||||
# generate virtual functions
|
||||
virtualimpl = make_cpptoc_function_impl(cls.get_virtual_funcs(), existing, prefixname, defined_names)
|
||||
if len(virtualimpl) > 0:
|
||||
virtualimpl = '\n// MEMBER FUNCTIONS - Body may be edited by hand.\n\n'+virtualimpl
|
||||
|
||||
# the current class is already defined for static functions
|
||||
defined_names.append(cls.get_capi_name())
|
||||
|
||||
# retrieve the existing static function implementations
|
||||
existing = get_function_impls(impl, 'CEF_EXPORT')
|
||||
|
||||
# generate static functions
|
||||
staticimpl = make_cpptoc_function_impl(cls.get_static_funcs(), existing, None, defined_names)
|
||||
if len(staticimpl) > 0:
|
||||
staticimpl = '\n// GLOBAL FUNCTIONS - Body may be edited by hand.\n\n'+staticimpl
|
||||
|
||||
resultingimpl = staticimpl + virtualimpl
|
||||
|
||||
# determine what includes are required by identifying what translation
|
||||
# classes are being used
|
||||
includes = format_translation_includes(resultingimpl)
|
||||
|
||||
# build the final output
|
||||
result = get_copyright()
|
||||
|
||||
result += includes+'\n'+resultingimpl+'\n'
|
||||
|
||||
const = '// CONSTRUCTOR - Do not edit by hand.\n\n'+ \
|
||||
clsname+'CppToC::'+clsname+'CppToC('+clsname+'* cls)\n'+ \
|
||||
' : CefCppToC<'+clsname+'CppToC, '+clsname+', '+capiname+'>(cls) '+ \
|
||||
'{\n';
|
||||
|
||||
funcs = cls.get_virtual_funcs()
|
||||
for func in funcs:
|
||||
name = func.get_capi_name()
|
||||
const += ' struct_.struct_.'+name+' = '+prefixname+'_'+name+';\n'
|
||||
|
||||
const += '}\n\n'+ \
|
||||
'#ifndef NDEBUG\n'+ \
|
||||
'template<> base::AtomicRefCount CefCppToC<'+clsname+'CppToC, '+clsname+', '+capiname+'>::DebugObjCt = 0;\n'+ \
|
||||
'#endif\n'
|
||||
result += wrap_code(const)
|
||||
|
||||
return result
|
||||
|
||||
def make_cpptoc_global_impl(header, impl):
|
||||
# structure names that have already been defined
|
||||
defined_names = header.get_defined_structs()
|
||||
|
||||
# retrieve the existing global function implementations
|
||||
existing = get_function_impls(impl, 'CEF_EXPORT')
|
||||
|
||||
# generate global functions
|
||||
impl = make_cpptoc_function_impl(header.get_funcs(), existing, None, defined_names)
|
||||
if len(impl) > 0:
|
||||
impl = '\n// GLOBAL FUNCTIONS - Body may be edited by hand.\n\n'+impl
|
||||
|
||||
includes = ''
|
||||
|
||||
# include required headers for global functions
|
||||
filenames = []
|
||||
for func in header.get_funcs():
|
||||
filename = func.get_file_name()
|
||||
if not filename in filenames:
|
||||
includes += '#include "include/'+func.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+func.get_capi_file_name()+'"\n'
|
||||
filenames.append(filename)
|
||||
|
||||
# determine what includes are required by identifying what translation
|
||||
# classes are being used
|
||||
includes += format_translation_includes(impl)
|
||||
|
||||
# build the final output
|
||||
result = get_copyright()
|
||||
|
||||
result += includes+'\n'+impl
|
||||
|
||||
return result
|
||||
|
||||
def write_cpptoc_impl(header, clsname, dir, backup):
|
||||
if clsname is None:
|
||||
# global file
|
||||
file = dir
|
||||
else:
|
||||
# class file
|
||||
file = dir+os.sep+get_capi_name(clsname[3:], False)+'_cpptoc.cc'
|
||||
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
if clsname is None:
|
||||
newcontents = make_cpptoc_global_impl(header, oldcontents)
|
||||
else:
|
||||
newcontents = make_cpptoc_class_impl(header, clsname, oldcontents)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 4:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname> <existing_impl>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# read the existing implementation file into memory
|
||||
try:
|
||||
f = open(sys.argv[3], 'r')
|
||||
data = f.read()
|
||||
except IOError, (errno, strerror):
|
||||
raise Exception('Failed to read file '+sys.argv[3]+': '+strerror)
|
||||
else:
|
||||
f.close()
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_cpptoc_class_impl(header, sys.argv[2], data))
|
124
tools/make_ctocpp_header.py
Normal file
124
tools/make_ctocpp_header.py
Normal file
@@ -0,0 +1,124 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_ctocpp_header(header, clsname):
|
||||
cls = header.get_class(clsname)
|
||||
if cls is None:
|
||||
raise Exception('Class does not exist: '+clsname)
|
||||
|
||||
clientside = cls.is_client_side()
|
||||
defname = string.upper(get_capi_name(clsname[3:], False))
|
||||
capiname = cls.get_capi_name()
|
||||
|
||||
result = get_copyright()
|
||||
|
||||
result += '#ifndef CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n'+ \
|
||||
'#define CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n' + \
|
||||
'#pragma once\n'
|
||||
|
||||
if clientside:
|
||||
result += """
|
||||
#ifndef BUILDING_CEF_SHARED
|
||||
#pragma message("Warning: "__FILE__" may be accessed DLL-side only")
|
||||
#else // BUILDING_CEF_SHARED
|
||||
"""
|
||||
else:
|
||||
result += """
|
||||
#ifndef USING_CEF_SHARED
|
||||
#pragma message("Warning: "__FILE__" may be accessed wrapper-side only")
|
||||
#else // USING_CEF_SHARED
|
||||
"""
|
||||
|
||||
# build the function body
|
||||
func_body = ''
|
||||
funcs = cls.get_virtual_funcs()
|
||||
for func in funcs:
|
||||
if clientside:
|
||||
func_body += ' '+func.get_cpp_proto()+' override;\n'
|
||||
else:
|
||||
func_body += ' virtual '+func.get_cpp_proto()+' OVERRIDE;\n'
|
||||
|
||||
# include standard headers
|
||||
if func_body.find('std::map') > 0 or func_body.find('std::multimap') > 0:
|
||||
result += '\n#include <map>'
|
||||
if func_body.find('std::vector') > 0:
|
||||
result += '\n#include <vector>'
|
||||
|
||||
# include the headers for this class
|
||||
result += '\n#include "include/'+cls.get_file_name()+'"'+ \
|
||||
'\n#include "include/capi/'+cls.get_capi_file_name()+'"\n'
|
||||
|
||||
# include headers for any forward declared classes that are not in the same file
|
||||
declares = cls.get_forward_declares()
|
||||
for declare in declares:
|
||||
dcls = header.get_class(declare)
|
||||
if dcls.get_file_name() != cls.get_file_name():
|
||||
result += '#include "include/'+dcls.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+dcls.get_capi_file_name()+'"\n'
|
||||
|
||||
result += """#include "libcef_dll/ctocpp/ctocpp.h"
|
||||
|
||||
// Wrap a C structure with a C++ class.
|
||||
"""
|
||||
|
||||
if clientside:
|
||||
result += '// This class may be instantiated and accessed DLL-side only.\n'
|
||||
else:
|
||||
result += '// This class may be instantiated and accessed wrapper-side only.\n'
|
||||
|
||||
result += 'class '+clsname+'CToCpp\n'+ \
|
||||
' : public CefCToCpp<'+clsname+'CToCpp, '+clsname+', '+capiname+'> {\n'+ \
|
||||
' public:\n'+ \
|
||||
' explicit '+clsname+'CToCpp('+capiname+'* str)\n'+ \
|
||||
' : CefCToCpp<'+clsname+'CToCpp, '+clsname+', '+capiname+'>(str) {}\n\n'+ \
|
||||
' // '+clsname+' methods\n';
|
||||
|
||||
result += func_body
|
||||
result += '};\n\n'
|
||||
|
||||
if clientside:
|
||||
result += '#endif // BUILDING_CEF_SHARED\n'
|
||||
else:
|
||||
result += '#endif // USING_CEF_SHARED\n'
|
||||
|
||||
result += '#endif // CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n'
|
||||
|
||||
return wrap_code(result)
|
||||
|
||||
|
||||
def write_ctocpp_header(header, clsname, dir, backup):
|
||||
file = dir+os.sep+get_capi_name(clsname[3:], False)+'_ctocpp.h'
|
||||
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
newcontents = make_ctocpp_header(header, clsname)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 3:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_ctocpp_header(header, sys.argv[2]))
|
584
tools/make_ctocpp_impl.py
Normal file
584
tools/make_ctocpp_impl.py
Normal file
@@ -0,0 +1,584 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_ctocpp_impl_proto(clsname, name, func, parts):
|
||||
const = ''
|
||||
|
||||
if clsname is None:
|
||||
proto = 'CEF_GLOBAL '+parts['retval']+' '
|
||||
else:
|
||||
proto = parts['retval']+' '+clsname
|
||||
if isinstance(func, obj_function_virtual):
|
||||
proto += 'CToCpp'
|
||||
if func.is_const():
|
||||
const = ' const'
|
||||
|
||||
proto += '::'
|
||||
|
||||
proto += name+'('+string.join(parts['args'], ', ')+')'+const
|
||||
return proto
|
||||
|
||||
def make_ctocpp_function_impl_existing(clsname, name, func, impl):
|
||||
notify(name+' has manual edits')
|
||||
|
||||
# retrieve the C++ prototype parts
|
||||
parts = func.get_cpp_parts(True)
|
||||
|
||||
changes = format_translation_changes(impl, parts)
|
||||
if len(changes) > 0:
|
||||
notify(name+' prototype changed')
|
||||
|
||||
return wrap_code(make_ctocpp_impl_proto(clsname, name, func, parts))+'{'+ \
|
||||
changes+impl['body']+'\n}\n'
|
||||
|
||||
def make_ctocpp_function_impl_new(clsname, name, func):
|
||||
# build the C++ prototype
|
||||
parts = func.get_cpp_parts(True)
|
||||
result = make_ctocpp_impl_proto(clsname, name, func, parts)+' {'
|
||||
|
||||
invalid = []
|
||||
|
||||
# retrieve the function arguments
|
||||
args = func.get_arguments()
|
||||
|
||||
# determine the argument types
|
||||
for arg in args:
|
||||
if arg.get_arg_type() == 'invalid':
|
||||
invalid.append(arg.get_name())
|
||||
|
||||
# retrieve the function return value
|
||||
retval = func.get_retval()
|
||||
retval_type = retval.get_retval_type()
|
||||
if retval_type == 'invalid':
|
||||
invalid.append('(return value)')
|
||||
retval_default = ''
|
||||
else:
|
||||
retval_default = retval.get_retval_default(False)
|
||||
if len(retval_default) > 0:
|
||||
retval_default = ' '+retval_default;
|
||||
|
||||
# add API hash check
|
||||
if func.has_attrib('api_hash_check'):
|
||||
result += '\n const char* api_hash = cef_api_hash(0);'\
|
||||
'\n if (strcmp(api_hash, CEF_API_HASH_PLATFORM)) {'\
|
||||
'\n // The libcef API hash does not match the current header API hash.'\
|
||||
'\n NOTREACHED();'\
|
||||
'\n return'+retval_default+';'\
|
||||
'\n }\n'
|
||||
|
||||
if isinstance(func, obj_function_virtual):
|
||||
# add the structure size check
|
||||
result += '\n if (CEF_MEMBER_MISSING(struct_, '+func.get_capi_name()+'))'
|
||||
result += '\n return'+retval_default+';\n'
|
||||
|
||||
if len(invalid) > 0:
|
||||
notify(name+' could not be autogenerated')
|
||||
# code could not be auto-generated
|
||||
result += '\n // BEGIN DELETE BEFORE MODIFYING'
|
||||
result += '\n // AUTO-GENERATED CONTENT'
|
||||
result += '\n // COULD NOT IMPLEMENT DUE TO: '+string.join(invalid, ', ')
|
||||
result += '\n #pragma message("Warning: "__FILE__": '+name+' is not implemented")'
|
||||
result += '\n // END DELETE BEFORE MODIFYING'
|
||||
result += '\n}\n\n'
|
||||
return wrap_code(result)
|
||||
|
||||
result += '\n // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING\n'
|
||||
|
||||
result_len = len(result)
|
||||
|
||||
optional = []
|
||||
|
||||
# parameter verification
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
# skip optional params
|
||||
optional_params = arg.parent.get_attrib_list('optional_param')
|
||||
if not optional_params is None and arg_name in optional_params:
|
||||
optional.append(arg_name)
|
||||
continue
|
||||
|
||||
comment = '\n // Verify param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byaddr' or arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+');'\
|
||||
'\n if (!'+arg_name+')'\
|
||||
'\n return'+retval_default+';'
|
||||
elif arg_type == 'refptr_same' or arg_type == 'refptr_diff':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+'.get());'\
|
||||
'\n if (!'+arg_name+'.get())'\
|
||||
'\n return'+retval_default+';'
|
||||
elif arg_type == 'string_byref_const':
|
||||
result += comment+\
|
||||
'\n DCHECK(!'+arg_name+'.empty());'\
|
||||
'\n if ('+arg_name+'.empty())'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
# check index params
|
||||
index_params = arg.parent.get_attrib_list('index_param')
|
||||
if not index_params is None and arg_name in index_params:
|
||||
result += comment+\
|
||||
'\n DCHECK_GE('+arg_name+', 0);'\
|
||||
'\n if ('+arg_name+' < 0)'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
if len(optional) > 0:
|
||||
# Wrap the comment at 80 characters.
|
||||
str = '\n // Unverified params: ' + optional[0]
|
||||
for name in optional[1:]:
|
||||
str += ','
|
||||
if len(str) + len(name) + 1 > 80:
|
||||
result += str
|
||||
str = '\n //'
|
||||
str += ' ' + name
|
||||
result += str
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# parameter translation
|
||||
params = []
|
||||
if isinstance(func, obj_function_virtual):
|
||||
params.append('struct_')
|
||||
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
comment = '\n // Translate param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byval' or arg_type == 'simple_byaddr' or \
|
||||
arg_type == 'bool_byval':
|
||||
params.append(arg_name)
|
||||
elif arg_type == 'simple_byref' or arg_type == 'simple_byref_const' or \
|
||||
arg_type == 'struct_byref_const' or arg_type == 'struct_byref':
|
||||
params.append('&'+arg_name)
|
||||
elif arg_type == 'bool_byref':
|
||||
result += comment+\
|
||||
'\n int '+arg_name+'Int = '+arg_name+';'
|
||||
params.append('&'+arg_name+'Int')
|
||||
elif arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n int '+arg_name+'Int = '+arg_name+'?*'+arg_name+':0;'
|
||||
params.append('&'+arg_name+'Int')
|
||||
elif arg_type == 'string_byref_const':
|
||||
params.append(arg_name+'.GetStruct()')
|
||||
elif arg_type == 'string_byref':
|
||||
params.append(arg_name+'.GetWritableStruct()')
|
||||
elif arg_type == 'refptr_same':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
params.append(refptr_class+'CToCpp::Unwrap('+arg_name+')')
|
||||
elif arg_type == 'refptr_diff':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
params.append(refptr_class+'CppToC::Wrap('+arg_name+')')
|
||||
elif arg_type == 'refptr_same_byref' or arg_type == 'refptr_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
refptr_struct = arg.get_type().get_result_refptr_type_root()
|
||||
if arg_type == 'refptr_same_byref':
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+')'
|
||||
else:
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+')'
|
||||
result += comment+\
|
||||
'\n '+refptr_struct+'* '+arg_name+'Struct = NULL;'\
|
||||
'\n if ('+arg_name+'.get())'\
|
||||
'\n '+arg_name+'Struct = '+assign+';'\
|
||||
'\n '+refptr_struct+'* '+arg_name+'Orig = '+arg_name+'Struct;'
|
||||
params.append('&'+arg_name+'Struct')
|
||||
elif arg_type == 'string_vec_byref' or arg_type == 'string_vec_byref_const':
|
||||
result += comment+\
|
||||
'\n cef_string_list_t '+arg_name+'List = cef_string_list_alloc();'\
|
||||
'\n DCHECK('+arg_name+'List);'\
|
||||
'\n if ('+arg_name+'List)'\
|
||||
'\n transfer_string_list_contents('+arg_name+', '+arg_name+'List);'
|
||||
params.append(arg_name+'List')
|
||||
elif arg_type == 'string_map_single_byref' or arg_type == 'string_map_single_byref_const':
|
||||
result += comment+\
|
||||
'\n cef_string_map_t '+arg_name+'Map = cef_string_map_alloc();'\
|
||||
'\n DCHECK('+arg_name+'Map);'\
|
||||
'\n if ('+arg_name+'Map)'\
|
||||
'\n transfer_string_map_contents('+arg_name+', '+arg_name+'Map);'
|
||||
params.append(arg_name+'Map')
|
||||
elif arg_type == 'string_map_multi_byref' or arg_type == 'string_map_multi_byref_const':
|
||||
result += comment+\
|
||||
'\n cef_string_multimap_t '+arg_name+'Multimap = cef_string_multimap_alloc();'\
|
||||
'\n DCHECK('+arg_name+'Multimap);'\
|
||||
'\n if ('+arg_name+'Multimap)'\
|
||||
'\n transfer_string_multimap_contents('+arg_name+', '+arg_name+'Multimap);'
|
||||
params.append(arg_name+'Multimap')
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
count_func = arg.get_attrib_count_func()
|
||||
vec_type = arg.get_type().get_result_vector_type_root()
|
||||
if arg_type == 'refptr_vec_same_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+'[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+'[i])'
|
||||
else:
|
||||
assign = arg_name+'[i]'
|
||||
result += comment+\
|
||||
'\n size_t '+arg_name+'Size = '+arg_name+'.size();'\
|
||||
'\n size_t '+arg_name+'Count = std::max('+count_func+'(), '+arg_name+'Size);'\
|
||||
'\n '+vec_type+'* '+arg_name+'List = NULL;'\
|
||||
'\n if ('+arg_name+'Count > 0) {'\
|
||||
'\n '+arg_name+'List = new '+vec_type+'['+arg_name+'Count];'\
|
||||
'\n DCHECK('+arg_name+'List);'\
|
||||
'\n if ('+arg_name+'List) {'\
|
||||
'\n memset('+arg_name+'List, 0, sizeof('+vec_type+')*'+arg_name+'Count);'\
|
||||
'\n }'\
|
||||
'\n if ('+arg_name+'List && '+arg_name+'Size > 0) {'\
|
||||
'\n for (size_t i = 0; i < '+arg_name+'Size; ++i) {'\
|
||||
'\n '+arg_name+'List[i] = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
params.append('&'+arg_name+'Count')
|
||||
params.append(arg_name+'List')
|
||||
elif arg_type == 'simple_vec_byref_const' or arg_type == 'bool_vec_byref_const' or \
|
||||
arg_type == 'refptr_vec_same_byref_const' or arg_type == 'refptr_vec_diff_byref_const':
|
||||
count_func = arg.get_attrib_count_func()
|
||||
vec_type = arg.get_type().get_result_vector_type_root()
|
||||
if arg_type == 'refptr_vec_same_byref_const':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+'[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref_const':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+'[i])'
|
||||
else:
|
||||
assign = arg_name+'[i]'
|
||||
result += comment+\
|
||||
'\n const size_t '+arg_name+'Count = '+arg_name+'.size();'\
|
||||
'\n '+vec_type+'* '+arg_name+'List = NULL;'\
|
||||
'\n if ('+arg_name+'Count > 0) {'\
|
||||
'\n '+arg_name+'List = new '+vec_type+'['+arg_name+'Count];'\
|
||||
'\n DCHECK('+arg_name+'List);'\
|
||||
'\n if ('+arg_name+'List) {'\
|
||||
'\n for (size_t i = 0; i < '+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'List[i] = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
params.append(arg_name+'Count')
|
||||
params.append(arg_name+'List')
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# execution
|
||||
result += '\n // Execute\n '
|
||||
|
||||
if retval_type != 'none':
|
||||
# has a return value
|
||||
if retval_type == 'simple' or retval_type == 'bool':
|
||||
result += retval.get_type().get_result_simple_type_root()
|
||||
elif retval_type == 'string':
|
||||
result += 'cef_string_userfree_t'
|
||||
elif retval_type == 'refptr_same' or retval_type == 'refptr_diff':
|
||||
refptr_struct = retval.get_type().get_result_refptr_type_root()
|
||||
result += refptr_struct+'*'
|
||||
|
||||
result += ' _retval = '
|
||||
|
||||
if isinstance(func, obj_function_virtual):
|
||||
result += 'struct_->'
|
||||
result += func.get_capi_name()+'('
|
||||
|
||||
if len(params) > 0:
|
||||
if not isinstance(func, obj_function_virtual):
|
||||
result += '\n '
|
||||
result += string.join(params,',\n ')
|
||||
|
||||
result += ');\n'
|
||||
|
||||
result_len = len(result)
|
||||
|
||||
# parameter restoration
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
comment = '\n // Restore param:'+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'bool_byref':
|
||||
result += comment+\
|
||||
'\n '+arg_name+' = '+arg_name+'Int?true:false;'
|
||||
elif arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n *'+arg_name+' = '+arg_name+'Int?true:false;'
|
||||
elif arg_type == 'refptr_same_byref' or arg_type == 'refptr_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
refptr_struct = arg.get_type().get_result_refptr_type_root()
|
||||
if arg_type == 'refptr_same_byref':
|
||||
assign = refptr_class+'CToCpp::Wrap('+arg_name+'Struct)'
|
||||
else:
|
||||
assign = refptr_class+'CppToC::Unwrap('+arg_name+'Struct)'
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Struct) {'\
|
||||
'\n if ('+arg_name+'Struct != '+arg_name+'Orig) {'\
|
||||
'\n '+arg_name+' = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n } else {'\
|
||||
'\n '+arg_name+' = NULL;'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_vec_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'List) {'\
|
||||
'\n '+arg_name+'.clear();'\
|
||||
'\n transfer_string_list_contents('+arg_name+'List, '+arg_name+');'\
|
||||
'\n cef_string_list_free('+arg_name+'List);'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_vec_byref_const':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'List)'\
|
||||
'\n cef_string_list_free('+arg_name+'List);'
|
||||
elif arg_type == 'string_map_single_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Map) {'\
|
||||
'\n '+arg_name+'.clear();'\
|
||||
'\n transfer_string_map_contents('+arg_name+'Map, '+arg_name+');'\
|
||||
'\n cef_string_map_free('+arg_name+'Map);'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_map_single_byref_const':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Map)'\
|
||||
'\n cef_string_map_free('+arg_name+'Map);'
|
||||
elif arg_type == 'string_map_multi_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Multimap) {'\
|
||||
'\n '+arg_name+'.clear();'\
|
||||
'\n transfer_string_multimap_contents('+arg_name+'Multimap, '+arg_name+');'\
|
||||
'\n cef_string_multimap_free('+arg_name+'Multimap);'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_map_multi_byref_const':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Multimap)'\
|
||||
'\n cef_string_multimap_free('+arg_name+'Multimap);'
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
count_func = arg.get_attrib_count_func()
|
||||
vec_type = arg.get_type().get_result_vector_type_root()
|
||||
if arg_type == 'refptr_vec_same_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Wrap('+arg_name+'List[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Unwrap('+arg_name+'List[i])'
|
||||
elif arg_type == 'bool_vec_byref':
|
||||
assign = arg_name+'List[i]?true:false'
|
||||
else:
|
||||
assign = arg_name+'List[i]'
|
||||
result += comment+\
|
||||
'\n '+arg_name+'.clear();'\
|
||||
'\n if ('+arg_name+'Count > 0 && '+arg_name+'List) {'\
|
||||
'\n for (size_t i = 0; i < '+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'.push_back('+assign+');'\
|
||||
'\n }'\
|
||||
'\n delete [] '+arg_name+'List;'\
|
||||
'\n }'
|
||||
elif arg_type == 'simple_vec_byref_const' or arg_type == 'bool_vec_byref_const' or \
|
||||
arg_type == 'refptr_vec_same_byref_const' or arg_type == 'refptr_vec_diff_byref_const':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'List)'\
|
||||
'\n delete [] '+arg_name+'List;'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# special handling for the global CefShutdown function
|
||||
if name == 'CefShutdown' and isinstance(func.parent, obj_header):
|
||||
classes = func.parent.get_classes()
|
||||
|
||||
names = []
|
||||
for cls in classes:
|
||||
if cls.has_attrib('no_debugct_check'):
|
||||
continue;
|
||||
|
||||
if cls.is_library_side():
|
||||
names.append(cls.get_name()+'CToCpp')
|
||||
else:
|
||||
names.append(cls.get_name()+'CppToC')
|
||||
|
||||
if len(names) > 0:
|
||||
names = sorted(names)
|
||||
result += '\n#ifndef NDEBUG'\
|
||||
'\n // Check that all wrapper objects have been destroyed'
|
||||
for name in names:
|
||||
result += '\n DCHECK(base::AtomicRefCountIsZero(&'+name+'::DebugObjCt));';
|
||||
result += '\n#endif // !NDEBUG'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# return translation
|
||||
if retval_type != 'none':
|
||||
# has a return value
|
||||
result += '\n // Return type: '+retval_type
|
||||
if retval_type == 'simple':
|
||||
result += '\n return _retval;'
|
||||
elif retval_type == 'bool':
|
||||
result += '\n return _retval?true:false;'
|
||||
elif retval_type == 'string':
|
||||
result += '\n CefString _retvalStr;'\
|
||||
'\n _retvalStr.AttachToUserFree(_retval);'\
|
||||
'\n return _retvalStr;'
|
||||
elif retval_type == 'refptr_same':
|
||||
refptr_class = retval.get_type().get_refptr_type()
|
||||
result += '\n return '+refptr_class+'CToCpp::Wrap(_retval);'
|
||||
elif retval_type == 'refptr_diff':
|
||||
refptr_class = retval.get_type().get_refptr_type()
|
||||
result += '\n return '+refptr_class+'CppToC::Unwrap(_retval);'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
|
||||
result += '}\n'
|
||||
return wrap_code(result)
|
||||
|
||||
def make_ctocpp_function_impl(clsname, funcs, existing):
|
||||
impl = ''
|
||||
|
||||
for func in funcs:
|
||||
name = func.get_name()
|
||||
value = get_next_function_impl(existing, name)
|
||||
if not value is None \
|
||||
and value['body'].find('// AUTO-GENERATED CONTENT') < 0:
|
||||
# an implementation exists that was not auto-generated
|
||||
impl += make_ctocpp_function_impl_existing(clsname, name, func, value)
|
||||
else:
|
||||
impl += make_ctocpp_function_impl_new(clsname, name, func)
|
||||
|
||||
return impl
|
||||
|
||||
def make_ctocpp_class_impl(header, clsname, impl):
|
||||
cls = header.get_class(clsname)
|
||||
if cls is None:
|
||||
raise Exception('Class does not exist: '+clsname)
|
||||
|
||||
capiname = cls.get_capi_name()
|
||||
|
||||
# retrieve the existing virtual function implementations
|
||||
existing = get_function_impls(impl, clsname+'CToCpp::')
|
||||
|
||||
# generate virtual functions
|
||||
virtualimpl = make_ctocpp_function_impl(clsname, cls.get_virtual_funcs(), existing)
|
||||
if len(virtualimpl) > 0:
|
||||
virtualimpl = '\n// VIRTUAL METHODS - Body may be edited by hand.\n\n'+virtualimpl
|
||||
|
||||
# retrieve the existing static function implementations
|
||||
existing = get_function_impls(impl, clsname+'::')
|
||||
|
||||
# generate static functions
|
||||
staticimpl = make_ctocpp_function_impl(clsname, cls.get_static_funcs(), existing)
|
||||
if len(staticimpl) > 0:
|
||||
staticimpl = '\n// STATIC METHODS - Body may be edited by hand.\n\n'+staticimpl
|
||||
|
||||
resultingimpl = staticimpl + virtualimpl
|
||||
|
||||
# determine what includes are required by identifying what translation
|
||||
# classes are being used
|
||||
includes = format_translation_includes(resultingimpl)
|
||||
|
||||
# build the final output
|
||||
result = get_copyright()
|
||||
|
||||
result += includes+'\n'+resultingimpl+'\n'
|
||||
|
||||
result += wrap_code('#ifndef NDEBUG\n'+ \
|
||||
'template<> base::AtomicRefCount CefCToCpp<'+clsname+'CToCpp, '+clsname+', '+capiname+'>::DebugObjCt = 0;\n'+ \
|
||||
'#endif\n')
|
||||
|
||||
return result
|
||||
|
||||
def make_ctocpp_global_impl(header, impl):
|
||||
# retrieve the existing global function implementations
|
||||
existing = get_function_impls(impl, 'CEF_GLOBAL')
|
||||
|
||||
# generate static functions
|
||||
impl = make_ctocpp_function_impl(None, header.get_funcs(), existing)
|
||||
if len(impl) > 0:
|
||||
impl = '\n// GLOBAL METHODS - Body may be edited by hand.\n\n'+impl
|
||||
|
||||
includes = ''
|
||||
|
||||
# include required headers for global functions
|
||||
filenames = []
|
||||
for func in header.get_funcs():
|
||||
filename = func.get_file_name()
|
||||
if not filename in filenames:
|
||||
includes += '#include "include/'+func.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+func.get_capi_file_name()+'"\n'
|
||||
filenames.append(filename)
|
||||
|
||||
# determine what includes are required by identifying what translation
|
||||
# classes are being used
|
||||
includes += format_translation_includes(impl)
|
||||
|
||||
# build the final output
|
||||
result = get_copyright()
|
||||
|
||||
result += includes+'\n// Define used to facilitate parsing.\n#define CEF_GLOBAL\n\n'+impl
|
||||
|
||||
return result
|
||||
|
||||
def write_ctocpp_impl(header, clsname, dir, backup):
|
||||
if clsname is None:
|
||||
# global file
|
||||
file = dir
|
||||
else:
|
||||
# class file
|
||||
file = dir+os.sep+get_capi_name(clsname[3:], False)+'_ctocpp.cc'
|
||||
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
if clsname is None:
|
||||
newcontents = make_ctocpp_global_impl(header, oldcontents)
|
||||
else:
|
||||
newcontents = make_ctocpp_class_impl(header, clsname, oldcontents)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 4:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname> <existing_impl>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# read the existing implementation file into memory
|
||||
try:
|
||||
f = open(sys.argv[3], 'r')
|
||||
data = f.read()
|
||||
except IOError, (errno, strerror):
|
||||
raise Exception('Failed to read file '+sys.argv[3]+': '+strerror)
|
||||
else:
|
||||
f.close()
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_ctocpp_class_impl(header, sys.argv[2], data))
|
2
tools/make_distrib.bat
Normal file
2
tools/make_distrib.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
python.bat make_distrib.py --output-dir ..\binary_distrib\ %*
|
699
tools/make_distrib.py
Normal file
699
tools/make_distrib.py
Normal file
@@ -0,0 +1,699 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from date_util import *
|
||||
from file_util import *
|
||||
from make_cmake import process_cmake_template
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
import svn_util as svn
|
||||
import git_util as git
|
||||
import sys
|
||||
import zipfile
|
||||
|
||||
def create_archive(input_dir, zip_file):
|
||||
""" Creates a zip archive of the specified input directory. """
|
||||
zf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED, True)
|
||||
def addDir(dir):
|
||||
for f in os.listdir(dir):
|
||||
full_path = os.path.join(dir, f)
|
||||
if os.path.isdir(full_path):
|
||||
addDir(full_path)
|
||||
else:
|
||||
zf.write(full_path, os.path.relpath(full_path, \
|
||||
os.path.join(input_dir, os.pardir)))
|
||||
addDir(input_dir)
|
||||
zf.close()
|
||||
|
||||
def create_7z_archive(input_dir, zip_file):
|
||||
""" Creates a 7z archive of the specified input directory. """
|
||||
command = os.environ['CEF_COMMAND_7ZIP']
|
||||
run('"' + command + '" a -y ' + zip_file + ' ' + input_dir, os.path.split(zip_file)[0])
|
||||
|
||||
def create_output_dir(name, parent_dir):
|
||||
""" Creates an output directory and adds the path to the archive list. """
|
||||
output_dir = os.path.abspath(os.path.join(parent_dir, name))
|
||||
remove_dir(output_dir, options.quiet)
|
||||
make_dir(output_dir, options.quiet)
|
||||
archive_dirs.append(output_dir)
|
||||
return output_dir
|
||||
|
||||
def get_readme_component(name):
|
||||
""" Loads a README file component. """
|
||||
paths = []
|
||||
# platform directory
|
||||
if platform == 'windows':
|
||||
platform_cmp = 'win'
|
||||
elif platform == 'macosx':
|
||||
platform_cmp = 'mac'
|
||||
elif platform == 'linux':
|
||||
platform_cmp = 'linux'
|
||||
paths.append(os.path.join(script_dir, 'distrib', platform_cmp))
|
||||
|
||||
# shared directory
|
||||
paths.append(os.path.join(script_dir, 'distrib'))
|
||||
|
||||
# load the file if it exists
|
||||
for path in paths:
|
||||
file = os.path.join(path, 'README.' +name + '.txt')
|
||||
if path_exists(file):
|
||||
return read_file(file)
|
||||
|
||||
raise Exception('Readme component not found: ' + name)
|
||||
|
||||
def create_readme():
|
||||
""" Creates the README.TXT file. """
|
||||
# gather the components
|
||||
header_data = get_readme_component('header')
|
||||
mode_data = get_readme_component(mode)
|
||||
redistrib_data = get_readme_component('redistrib')
|
||||
footer_data = get_readme_component('footer')
|
||||
|
||||
# format the file
|
||||
data = header_data + '\n\n' + mode_data + '\n\n' + redistrib_data + '\n\n' + footer_data
|
||||
data = data.replace('$CEF_URL$', cef_url)
|
||||
data = data.replace('$CEF_REV$', cef_rev)
|
||||
data = data.replace('$CEF_VER$', cef_ver)
|
||||
data = data.replace('$CHROMIUM_URL$', chromium_url)
|
||||
data = data.replace('$CHROMIUM_REV$', chromium_rev)
|
||||
data = data.replace('$CHROMIUM_VER$', chromium_ver)
|
||||
data = data.replace('$DATE$', date)
|
||||
|
||||
if platform == 'windows':
|
||||
platform_str = 'Windows'
|
||||
elif platform == 'macosx':
|
||||
platform_str = 'Mac OS-X'
|
||||
elif platform == 'linux':
|
||||
platform_str = 'Linux'
|
||||
|
||||
data = data.replace('$PLATFORM$', platform_str)
|
||||
|
||||
if mode == 'standard':
|
||||
distrib_type = 'Standard'
|
||||
distrib_desc = 'This distribution contains all components necessary to build and distribute an\n' \
|
||||
'application using CEF on the ' + platform_str + ' platform. Please see the LICENSING\n' \
|
||||
'section of this document for licensing terms and conditions.'
|
||||
elif mode == 'minimal':
|
||||
distrib_type = 'Minimal'
|
||||
distrib_desc = 'This distribution contains only the components required to distribute an\n' \
|
||||
'application using CEF on the ' + platform_str + ' platform. Please see the LICENSING\n' \
|
||||
'section of this document for licensing terms and conditions.'
|
||||
elif mode == 'client':
|
||||
distrib_type = 'Client'
|
||||
distrib_desc = 'This distribution contains a release build of the cefclient sample application\n' \
|
||||
'for the ' + platform_str + ' platform. Please see the LICENSING section of this document for\n' \
|
||||
'licensing terms and conditions.'
|
||||
|
||||
data = data.replace('$DISTRIB_TYPE$', distrib_type)
|
||||
data = data.replace('$DISTRIB_DESC$', distrib_desc)
|
||||
|
||||
write_file(os.path.join(output_dir, 'README.txt'), data)
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Creating README.TXT file.\n')
|
||||
|
||||
def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet):
|
||||
""" Transfer files from one location to another. """
|
||||
for path in gypi_paths:
|
||||
# skip gyp includes
|
||||
if path[:2] == '<@':
|
||||
continue
|
||||
src = os.path.join(src_dir, path)
|
||||
dst = os.path.join(dst_dir, path.replace(gypi_path_prefix, ''))
|
||||
dst_path = os.path.dirname(dst)
|
||||
make_dir(dst_path, quiet)
|
||||
copy_file(src, dst, quiet)
|
||||
|
||||
def normalize_headers(file, new_path = ''):
|
||||
""" Normalize headers post-processing. Remove the path component from any
|
||||
project include directives. """
|
||||
data = read_file(file)
|
||||
data = re.sub(r'''#include \"(?!include\/)[a-zA-Z0-9_\/]+\/+([a-zA-Z0-9_\.]+)\"''', \
|
||||
"// Include path modified for CEF Binary Distribution.\n#include \""+new_path+"\\1\"", data)
|
||||
write_file(file, data)
|
||||
|
||||
def transfer_files(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
|
||||
""" Transfer files based on the specified configuration. """
|
||||
if not path_exists(transfer_cfg):
|
||||
return
|
||||
|
||||
configs = eval_file(transfer_cfg)
|
||||
for cfg in configs:
|
||||
dst = os.path.join(output_dir, cfg['target'])
|
||||
|
||||
# perform a copy if source is specified
|
||||
if not cfg['source'] is None:
|
||||
src = os.path.join(cef_dir, cfg['source'])
|
||||
dst_path = os.path.dirname(dst)
|
||||
make_dir(dst_path, quiet)
|
||||
copy_file(src, dst, quiet)
|
||||
|
||||
# place a readme file in the destination directory
|
||||
readme = os.path.join(dst_path, 'README-TRANSFER.txt')
|
||||
if not path_exists(readme):
|
||||
copy_file(os.path.join(script_dir, 'distrib/README-TRANSFER.txt'), readme)
|
||||
open(readme, 'ab').write(cfg['source']+"\n")
|
||||
|
||||
# perform any required post-processing
|
||||
if 'post-process' in cfg:
|
||||
post = cfg['post-process']
|
||||
if post == 'normalize_headers':
|
||||
new_path = ''
|
||||
if cfg.has_key('new_header_path'):
|
||||
new_path = cfg['new_header_path']
|
||||
normalize_headers(dst, new_path)
|
||||
|
||||
def combine_libs(build_dir, libs, dest_lib):
|
||||
""" Combine multiple static libraries into a single static library. """
|
||||
cmdline = 'msvs_env.bat python combine_libs.py -o "%s"' % dest_lib
|
||||
for lib in libs:
|
||||
lib_path = os.path.join(build_dir, lib)
|
||||
if not path_exists(lib_path):
|
||||
raise Exception('Library not found: ' + lib_path)
|
||||
cmdline = cmdline + ' "%s"' % lib_path
|
||||
run(cmdline, os.path.join(cef_dir, 'tools'))
|
||||
|
||||
def run(command_line, working_dir):
|
||||
""" Run a command. """
|
||||
sys.stdout.write('-------- Running "'+command_line+'" in "'+\
|
||||
working_dir+'"...'+"\n")
|
||||
args = shlex.split(command_line.replace('\\', '\\\\'))
|
||||
return subprocess.check_call(args, cwd=working_dir, env=os.environ,
|
||||
shell=(sys.platform == 'win32'))
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility builds the CEF Binary Distribution.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--output-dir', dest='outputdir', metavar='DIR',
|
||||
help='output directory [required]')
|
||||
parser.add_option('--allow-partial',
|
||||
action='store_true', dest='allowpartial', default=False,
|
||||
help='allow creation of partial distributions')
|
||||
parser.add_option('--no-symbols',
|
||||
action='store_true', dest='nosymbols', default=False,
|
||||
help='don\'t create symbol files')
|
||||
parser.add_option('--no-docs',
|
||||
action='store_true', dest='nodocs', default=False,
|
||||
help='don\'t create documentation')
|
||||
parser.add_option('--no-archive',
|
||||
action='store_true', dest='noarchive', default=False,
|
||||
help='don\'t create archives for output directories')
|
||||
parser.add_option('--ninja-build',
|
||||
action='store_true', dest='ninjabuild', default=False,
|
||||
help='build was created using ninja')
|
||||
parser.add_option('--x64-build',
|
||||
action='store_true', dest='x64build', default=False,
|
||||
help='build was created for 64-bit systems')
|
||||
parser.add_option('--minimal',
|
||||
action='store_true', dest='minimal', default=False,
|
||||
help='include only release build binary files')
|
||||
parser.add_option('--client',
|
||||
action='store_true', dest='client', default=False,
|
||||
help='include only the cefclient application')
|
||||
parser.add_option('-q', '--quiet',
|
||||
action='store_true', dest='quiet', default=False,
|
||||
help='do not output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# Test the operating system.
|
||||
platform = '';
|
||||
if sys.platform == 'win32':
|
||||
platform = 'windows'
|
||||
elif sys.platform == 'darwin':
|
||||
platform = 'macosx'
|
||||
elif sys.platform.startswith('linux'):
|
||||
platform = 'linux'
|
||||
|
||||
# the outputdir option is required
|
||||
if options.outputdir is None:
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
if options.minimal and options.client:
|
||||
print 'Invalid combination of options'
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
if not options.ninjabuild:
|
||||
print 'Ninja build is required on all platforms'
|
||||
sys.exit()
|
||||
|
||||
# script directory
|
||||
script_dir = os.path.dirname(__file__)
|
||||
|
||||
# CEF root directory
|
||||
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
|
||||
|
||||
# src directory
|
||||
src_dir = os.path.abspath(os.path.join(cef_dir, os.pardir))
|
||||
|
||||
# retrieve url and revision information for CEF
|
||||
if svn.is_checkout(cef_dir):
|
||||
cef_info = svn.get_svn_info(cef_dir)
|
||||
cef_url = cef_info['url']
|
||||
cef_rev = cef_info['revision']
|
||||
elif git.is_checkout(cef_dir):
|
||||
cef_url = git.get_url(cef_dir)
|
||||
cef_rev = git.get_svn_revision(cef_dir)
|
||||
else:
|
||||
raise Exception('Not a valid checkout: %s' % (cef_dir))
|
||||
|
||||
# retrieve url and revision information for Chromium
|
||||
if svn.is_checkout(src_dir):
|
||||
chromium_info = svn.get_svn_info(src_dir)
|
||||
chromium_url = chromium_info['url']
|
||||
chromium_rev = chromium_info['revision']
|
||||
elif git.is_checkout(src_dir):
|
||||
chromium_url = git.get_url(src_dir)
|
||||
chromium_rev = git.get_hash(src_dir)
|
||||
else:
|
||||
raise Exception('Not a valid checkout: %s' % (src_dir))
|
||||
|
||||
date = get_date()
|
||||
|
||||
# Read and parse the version file (key=value pairs, one per line)
|
||||
args = {}
|
||||
read_version_file(os.path.join(cef_dir, 'VERSION'), args)
|
||||
read_version_file(os.path.join(cef_dir, '../chrome/VERSION'), args)
|
||||
|
||||
cef_ver = args['CEF_MAJOR']+'.'+args['BUILD']+'.'+cef_rev
|
||||
chromium_ver = args['MAJOR']+'.'+args['MINOR']+'.'+args['BUILD']+'.'+args['PATCH']
|
||||
|
||||
# list of output directories to be archived
|
||||
archive_dirs = []
|
||||
|
||||
platform_arch = '32'
|
||||
if options.x64build:
|
||||
platform_arch = '64'
|
||||
|
||||
if platform == 'linux':
|
||||
platform_arch = ''
|
||||
lib_dir_name = 'lib'
|
||||
release_libcef_path = os.path.join(src_dir, 'out', 'Release', lib_dir_name, 'libcef.so');
|
||||
debug_libcef_path = os.path.join(src_dir, 'out', 'Debug', lib_dir_name, 'libcef.so');
|
||||
file_desc = ''
|
||||
output = subprocess.check_output('file ' + release_libcef_path + ' ' + debug_libcef_path + '; exit 0',
|
||||
env=os.environ, stderr=subprocess.STDOUT, shell=True)
|
||||
if output.find('32-bit') != -1:
|
||||
platform_arch = '32'
|
||||
if output.find('64-bit') != -1:
|
||||
platform_arch = '64'
|
||||
|
||||
# output directory
|
||||
output_dir_base = 'cef_binary_' + cef_ver
|
||||
output_dir_name = output_dir_base + '_' + platform + platform_arch
|
||||
|
||||
if options.minimal:
|
||||
mode = 'minimal'
|
||||
output_dir_name = output_dir_name + '_minimal'
|
||||
elif options.client:
|
||||
mode = 'client'
|
||||
output_dir_name = output_dir_name + '_client'
|
||||
else:
|
||||
mode = 'standard'
|
||||
|
||||
output_dir = create_output_dir(output_dir_name, options.outputdir)
|
||||
|
||||
# create the README.TXT file
|
||||
create_readme()
|
||||
|
||||
# transfer the LICENSE.txt file
|
||||
copy_file(os.path.join(cef_dir, 'LICENSE.txt'), output_dir, options.quiet)
|
||||
|
||||
# read the variables list from the autogenerated cef_paths.gypi file
|
||||
cef_paths = eval_file(os.path.join(cef_dir, 'cef_paths.gypi'))
|
||||
cef_paths = cef_paths['variables']
|
||||
|
||||
# read the variables list from the manually edited cef_paths2.gypi file
|
||||
cef_paths2 = eval_file(os.path.join(cef_dir, 'cef_paths2.gypi'))
|
||||
cef_paths2 = cef_paths2['variables']
|
||||
|
||||
if mode == 'standard':
|
||||
# create the include directory
|
||||
include_dir = os.path.join(output_dir, 'include')
|
||||
make_dir(include_dir, options.quiet)
|
||||
|
||||
# create the cefclient directory
|
||||
cefclient_dir = os.path.join(output_dir, 'cefclient')
|
||||
make_dir(cefclient_dir, options.quiet)
|
||||
|
||||
# create the cefsimple directory
|
||||
cefsimple_dir = os.path.join(output_dir, 'cefsimple')
|
||||
make_dir(cefsimple_dir, options.quiet)
|
||||
|
||||
# create the libcef_dll_wrapper directory
|
||||
wrapper_dir = os.path.join(output_dir, 'libcef_dll')
|
||||
make_dir(wrapper_dir, options.quiet)
|
||||
|
||||
# transfer common include files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_common'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_capi'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_wrapper'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths['autogen_cpp_includes'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths['autogen_capi_includes'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
|
||||
# transfer common cefclient files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_common'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_bundle_resources_common'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
|
||||
# transfer common cefsimple files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_common'], \
|
||||
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
||||
|
||||
# transfer common libcef_dll_wrapper files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['libcef_dll_wrapper_sources_common'], \
|
||||
'libcef_dll/', wrapper_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths['autogen_client_side'], \
|
||||
'libcef_dll/', wrapper_dir, options.quiet)
|
||||
|
||||
# transfer gyp files
|
||||
copy_file(os.path.join(script_dir, 'distrib/cefclient.gyp'), output_dir, options.quiet)
|
||||
paths_gypi = os.path.join(cef_dir, 'cef_paths2.gypi')
|
||||
data = read_file(paths_gypi)
|
||||
data = data.replace('tests/cefclient/', 'cefclient/')
|
||||
data = data.replace('tests/cefsimple/', 'cefsimple/')
|
||||
write_file(os.path.join(output_dir, 'cef_paths2.gypi'), data)
|
||||
copy_file(os.path.join(cef_dir, 'cef_paths.gypi'), \
|
||||
os.path.join(output_dir, 'cef_paths.gypi'), options.quiet)
|
||||
|
||||
# transfer additional files
|
||||
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib/transfer.cfg'), \
|
||||
output_dir, options.quiet)
|
||||
|
||||
# process cmake templates
|
||||
variables = dict(cef_paths.items() + cef_paths2.items())
|
||||
process_cmake_template(os.path.join(cef_dir, 'CMakeLists.txt.in'), \
|
||||
os.path.join(output_dir, 'CMakeLists.txt'), \
|
||||
variables, options.quiet)
|
||||
process_cmake_template(os.path.join(cef_dir, 'macros.cmake.in'), \
|
||||
os.path.join(output_dir, 'macros.cmake'), \
|
||||
variables, options.quiet)
|
||||
process_cmake_template(os.path.join(cef_dir, 'libcef_dll', 'CMakeLists.txt.in'), \
|
||||
os.path.join(output_dir, 'libcef_dll', 'CMakeLists.txt'), \
|
||||
variables, options.quiet)
|
||||
process_cmake_template(os.path.join(cef_dir, 'tests', 'cefclient', 'CMakeLists.txt.in'), \
|
||||
os.path.join(output_dir, 'cefclient', 'CMakeLists.txt'), \
|
||||
variables, options.quiet)
|
||||
process_cmake_template(os.path.join(cef_dir, 'tests', 'cefsimple', 'CMakeLists.txt.in'), \
|
||||
os.path.join(output_dir, 'cefsimple', 'CMakeLists.txt'), \
|
||||
variables, options.quiet)
|
||||
|
||||
if platform == 'windows':
|
||||
binaries = [
|
||||
'd3dcompiler_47.dll',
|
||||
'ffmpegsumo.dll',
|
||||
'libcef.dll',
|
||||
'libEGL.dll',
|
||||
'libGLESv2.dll',
|
||||
'pdf.dll',
|
||||
]
|
||||
if not options.x64build:
|
||||
binaries.append('wow_helper.exe')
|
||||
|
||||
out_dir = os.path.join(src_dir, 'out')
|
||||
libcef_dll_file = 'libcef.dll.lib'
|
||||
sandbox_libs = [
|
||||
'obj\\base\\base.lib',
|
||||
'obj\\base\\base_static.lib',
|
||||
'obj\\cef\\cef_sandbox.lib',
|
||||
'obj\\base\\third_party\\dynamic_annotations\\dynamic_annotations.lib',
|
||||
'obj\\sandbox\\sandbox.lib',
|
||||
]
|
||||
|
||||
valid_build_dir = None
|
||||
|
||||
build_dir_suffix = ''
|
||||
if options.x64build:
|
||||
build_dir_suffix = '_x64'
|
||||
|
||||
if mode == 'standard':
|
||||
# transfer Debug files
|
||||
build_dir = os.path.join(out_dir, 'Debug' + build_dir_suffix);
|
||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient.exe')):
|
||||
valid_build_dir = build_dir
|
||||
dst_dir = os.path.join(output_dir, 'Debug')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_files(os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
|
||||
for binary in binaries:
|
||||
copy_file(os.path.join(build_dir, binary), os.path.join(dst_dir, binary), options.quiet)
|
||||
copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \
|
||||
options.quiet)
|
||||
combine_libs(build_dir, sandbox_libs, os.path.join(dst_dir, 'cef_sandbox.lib'));
|
||||
|
||||
if not options.nosymbols:
|
||||
# create the symbol output directory
|
||||
symbol_output_dir = create_output_dir(output_dir_name + '_debug_symbols', options.outputdir)
|
||||
# transfer contents
|
||||
copy_file(os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir, options.quiet)
|
||||
else:
|
||||
sys.stderr.write("No Debug build files.\n")
|
||||
|
||||
# transfer Release files
|
||||
build_dir = os.path.join(out_dir, 'Release' + build_dir_suffix);
|
||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient.exe')):
|
||||
valid_build_dir = build_dir
|
||||
dst_dir = os.path.join(output_dir, 'Release')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_files(os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
|
||||
for binary in binaries:
|
||||
copy_file(os.path.join(build_dir, binary), os.path.join(dst_dir, binary), options.quiet)
|
||||
|
||||
if mode != 'client':
|
||||
copy_file(os.path.join(build_dir, libcef_dll_file), os.path.join(dst_dir, 'libcef.lib'), \
|
||||
options.quiet)
|
||||
combine_libs(build_dir, sandbox_libs, os.path.join(dst_dir, 'cef_sandbox.lib'));
|
||||
else:
|
||||
copy_file(os.path.join(build_dir, 'cefclient.exe'), dst_dir, options.quiet)
|
||||
|
||||
if not options.nosymbols:
|
||||
# create the symbol output directory
|
||||
symbol_output_dir = create_output_dir(output_dir_name + '_release_symbols', options.outputdir)
|
||||
# transfer contents
|
||||
copy_file(os.path.join(build_dir, 'libcef.dll.pdb'), symbol_output_dir, options.quiet)
|
||||
else:
|
||||
sys.stderr.write("No Release build files.\n")
|
||||
|
||||
if not valid_build_dir is None:
|
||||
# transfer resource files
|
||||
build_dir = valid_build_dir
|
||||
if mode == 'client':
|
||||
dst_dir = os.path.join(output_dir, 'Release')
|
||||
else:
|
||||
dst_dir = os.path.join(output_dir, 'Resources')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'devtools_resources.pak'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(build_dir, 'locales'), os.path.join(dst_dir, 'locales'), options.quiet)
|
||||
|
||||
if mode == 'standard':
|
||||
# transfer include files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_win'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
|
||||
# transfer cefclient files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_win'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
|
||||
# transfer cefsimple files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_win'], \
|
||||
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
||||
|
||||
# transfer additional files, if any
|
||||
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib/win/transfer.cfg'), \
|
||||
output_dir, options.quiet)
|
||||
|
||||
if not options.nodocs:
|
||||
# generate doc files
|
||||
os.popen('make_cppdocs.bat '+cef_rev)
|
||||
|
||||
src_dir = os.path.join(cef_dir, 'docs')
|
||||
if path_exists(src_dir):
|
||||
# create the docs output directory
|
||||
docs_output_dir = create_output_dir(output_dir_base + '_docs', options.outputdir)
|
||||
# transfer contents
|
||||
copy_dir(src_dir, docs_output_dir, options.quiet)
|
||||
|
||||
elif platform == 'macosx':
|
||||
out_dir = os.path.join(src_dir, 'out')
|
||||
|
||||
valid_build_dir = None
|
||||
framework_name = 'Chromium Embedded Framework'
|
||||
|
||||
if mode == 'standard':
|
||||
# transfer Debug files
|
||||
build_dir = os.path.join(out_dir, 'Debug')
|
||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient.app')):
|
||||
valid_build_dir = build_dir
|
||||
dst_dir = os.path.join(output_dir, 'Debug')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \
|
||||
os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet)
|
||||
|
||||
# transfer Release files
|
||||
build_dir = os.path.join(out_dir, 'Release')
|
||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient.app')):
|
||||
valid_build_dir = build_dir
|
||||
dst_dir = os.path.join(output_dir, 'Release')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
if mode != 'client':
|
||||
copy_dir(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/%s.framework' % framework_name), \
|
||||
os.path.join(dst_dir, '%s.framework' % framework_name), options.quiet)
|
||||
else:
|
||||
copy_dir(os.path.join(build_dir, 'cefclient.app'), os.path.join(dst_dir, 'cefclient.app'), options.quiet)
|
||||
|
||||
if not options.nosymbols:
|
||||
# create the symbol output directory
|
||||
symbol_output_dir = create_output_dir(output_dir_name + '_release_symbols', options.outputdir)
|
||||
|
||||
# create the real dSYM file from the "fake" dSYM file
|
||||
sys.stdout.write("Creating the real dSYM file...\n")
|
||||
src_path = os.path.join(build_dir, \
|
||||
'%s.framework.dSYM/Contents/Resources/DWARF/%s' % (framework_name, framework_name))
|
||||
dst_path = os.path.join(symbol_output_dir, '%s.dSYM' % framework_name)
|
||||
run('dsymutil "%s" -o "%s"' % (src_path, dst_path), cef_dir)
|
||||
|
||||
if mode == 'standard':
|
||||
# transfer include files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_mac'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
|
||||
# transfer cefclient files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_mac'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_mac_helper'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_bundle_resources_mac'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
|
||||
# transfer cefclient/mac files
|
||||
copy_dir(os.path.join(cef_dir, 'tests/cefclient/mac/'), os.path.join(output_dir, 'cefclient/mac/'), \
|
||||
options.quiet)
|
||||
|
||||
# transfer cefsimple files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_mac'], \
|
||||
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_mac_helper'], \
|
||||
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_bundle_resources_mac'], \
|
||||
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
||||
|
||||
# transfer cefsimple/mac files
|
||||
copy_dir(os.path.join(cef_dir, 'tests/cefsimple/mac/'), os.path.join(output_dir, 'cefsimple/mac/'), \
|
||||
options.quiet)
|
||||
|
||||
# transfer additional files, if any
|
||||
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib/mac/transfer.cfg'), \
|
||||
output_dir, options.quiet)
|
||||
|
||||
elif platform == 'linux':
|
||||
out_dir = os.path.join(src_dir, 'out')
|
||||
lib_dir_name = 'lib'
|
||||
|
||||
valid_build_dir = None
|
||||
|
||||
if mode == 'standard':
|
||||
# transfer Debug files
|
||||
build_dir = os.path.join(out_dir, 'Debug');
|
||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient')):
|
||||
valid_build_dir = build_dir
|
||||
dst_dir = os.path.join(output_dir, 'Debug')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'chrome_sandbox'), os.path.join(dst_dir, 'chrome-sandbox'), options.quiet)
|
||||
copy_file(os.path.join(build_dir, lib_dir_name, 'libcef.so'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'libffmpegsumo.so'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'libpdf.so'), dst_dir, options.quiet)
|
||||
else:
|
||||
sys.stderr.write("No Debug build files.\n")
|
||||
|
||||
# transfer Release files
|
||||
build_dir = os.path.join(out_dir, 'Release');
|
||||
if not options.allowpartial or path_exists(os.path.join(build_dir, 'cefclient')):
|
||||
valid_build_dir = build_dir
|
||||
dst_dir = os.path.join(output_dir, 'Release')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
|
||||
if mode == 'client':
|
||||
lib_dst_dir = os.path.join(dst_dir, lib_dir_name)
|
||||
make_dir(lib_dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, lib_dir_name, 'libcef.so'), lib_dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'cefclient'), dst_dir, options.quiet)
|
||||
else:
|
||||
copy_file(os.path.join(build_dir, lib_dir_name, 'libcef.so'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'chrome_sandbox'), os.path.join(dst_dir, 'chrome-sandbox'), options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'libffmpegsumo.so'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'libpdf.so'), dst_dir, options.quiet)
|
||||
else:
|
||||
sys.stderr.write("No Release build files.\n")
|
||||
|
||||
if not valid_build_dir is None:
|
||||
# transfer resource files
|
||||
build_dir = valid_build_dir
|
||||
if mode == 'client':
|
||||
dst_dir = os.path.join(output_dir, 'Release')
|
||||
copy_dir(os.path.join(build_dir, 'files'), os.path.join(dst_dir, 'files'), options.quiet)
|
||||
else:
|
||||
dst_dir = os.path.join(output_dir, 'Resources')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'cef.pak'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'cef_100_percent.pak'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'cef_200_percent.pak'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'devtools_resources.pak'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'icudtl.dat'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'natives_blob.bin'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'snapshot_blob.bin'), dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(build_dir, 'locales'), os.path.join(dst_dir, 'locales'), options.quiet)
|
||||
|
||||
if mode == 'standard':
|
||||
# transfer include files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_linux'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
|
||||
# transfer cefclient files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_linux'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_bundle_resources_linux'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
|
||||
# transfer cefsimple files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_linux'], \
|
||||
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
||||
|
||||
# transfer additional files, if any
|
||||
copy_file(os.path.join(script_dir, 'distrib/linux/build.sh'), output_dir, options.quiet)
|
||||
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib/linux/transfer.cfg'), \
|
||||
output_dir, options.quiet)
|
||||
|
||||
if not options.noarchive:
|
||||
# create an archive for each output directory
|
||||
archive_extenstion = '.zip'
|
||||
if os.getenv('CEF_COMMAND_7ZIP', '') != '':
|
||||
archive_extenstion = '.7z'
|
||||
for dir in archive_dirs:
|
||||
zip_file = os.path.split(dir)[1] + archive_extenstion
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Creating '+zip_file+"...\n")
|
||||
if archive_extenstion == '.zip':
|
||||
create_archive(dir, os.path.join(dir, os.pardir, zip_file))
|
||||
else:
|
||||
create_7z_archive(dir, os.path.join(dir, os.pardir, zip_file))
|
2
tools/make_distrib.sh
Executable file
2
tools/make_distrib.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python make_distrib.py --output-dir ../binary_distrib/ $@
|
108
tools/make_gypi_file.py
Normal file
108
tools/make_gypi_file.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_gypi_file(header):
|
||||
# header string
|
||||
result = \
|
||||
"""# Copyright (c) $YEAR$ The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
#
|
||||
# ---------------------------------------------------------------------------
|
||||
#
|
||||
# This file was generated by the CEF translator tool and should not edited
|
||||
# by hand. See the translator.README.txt file in the tools directory for
|
||||
# more information.
|
||||
#
|
||||
|
||||
{
|
||||
'variables': {
|
||||
"""
|
||||
|
||||
filenames = sorted(header.get_file_names())
|
||||
|
||||
# cpp includes
|
||||
result += " 'autogen_cpp_includes': [\n"
|
||||
for filename in filenames:
|
||||
result += " 'include/"+filename+"',\n"
|
||||
result += " ],\n"
|
||||
|
||||
# capi includes
|
||||
result += " 'autogen_capi_includes': [\n"
|
||||
for filename in filenames:
|
||||
result += " 'include/capi/"+get_capi_file_name(filename)+"',\n"
|
||||
result += " ],\n"
|
||||
|
||||
classes = sorted(header.get_class_names())
|
||||
|
||||
# library side includes
|
||||
result += " 'autogen_library_side': [\n"
|
||||
for clsname in classes:
|
||||
cls = header.get_class(clsname)
|
||||
filename = get_capi_name(clsname[3:], False)
|
||||
if cls.is_library_side():
|
||||
result += " 'libcef_dll/cpptoc/"+filename+"_cpptoc.cc',\n" \
|
||||
" 'libcef_dll/cpptoc/"+filename+"_cpptoc.h',\n"
|
||||
else:
|
||||
result += " 'libcef_dll/ctocpp/"+filename+"_ctocpp.cc',\n" \
|
||||
" 'libcef_dll/ctocpp/"+filename+"_ctocpp.h',\n"
|
||||
result += " ],\n"
|
||||
|
||||
# client side includes
|
||||
result += " 'autogen_client_side': [\n"
|
||||
for clsname in classes:
|
||||
cls = header.get_class(clsname)
|
||||
filename = get_capi_name(clsname[3:], False)
|
||||
if cls.is_library_side():
|
||||
result += " 'libcef_dll/ctocpp/"+filename+"_ctocpp.cc',\n" \
|
||||
" 'libcef_dll/ctocpp/"+filename+"_ctocpp.h',\n"
|
||||
else:
|
||||
result += " 'libcef_dll/cpptoc/"+filename+"_cpptoc.cc',\n" \
|
||||
" 'libcef_dll/cpptoc/"+filename+"_cpptoc.h',\n"
|
||||
result += " ],\n"
|
||||
|
||||
# footer string
|
||||
result += \
|
||||
""" },
|
||||
}
|
||||
"""
|
||||
|
||||
# add the copyright year
|
||||
result = result.replace('$YEAR$', get_year())
|
||||
|
||||
return result
|
||||
|
||||
def write_gypi_file(header, file, backup):
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
newcontents = make_gypi_file(header)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 2:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_gypi_file(header))
|
121
tools/make_pack_header.py
Normal file
121
tools/make_pack_header.py
Normal file
@@ -0,0 +1,121 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file.
|
||||
|
||||
"""
|
||||
A simple utility function to merge pack resource files into a single resource file.
|
||||
"""
|
||||
|
||||
from date_util import *
|
||||
from file_util import *
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
|
||||
|
||||
def MakeFileSegment(input):
|
||||
result = """
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// From $FILE$:
|
||||
"""
|
||||
|
||||
filename = os.path.split(input)[1]
|
||||
result = result.replace('$FILE$', filename)
|
||||
|
||||
contents = read_file(input)
|
||||
|
||||
# identify the defines in the file
|
||||
p = re.compile('#define\s([A-Za-z0-9_]{1,})\s([0-9]{1,})')
|
||||
list = p.findall(contents)
|
||||
for name, id in list:
|
||||
result += "\n#define %s %s" % (name, id)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def MakeFile(output, input):
|
||||
# header string
|
||||
result = \
|
||||
"""// Copyright (c) $YEAR$ Marshall A. Greenblatt. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the name Chromium Embedded
|
||||
// Framework nor the names of its contributors may be used to endorse
|
||||
// or promote products derived from this software without specific prior
|
||||
// written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// ---------------------------------------------------------------------------
|
||||
//
|
||||
// This file is generated by the make_pack_header.py tool.
|
||||
//
|
||||
|
||||
#ifndef $GUARD$
|
||||
#define $GUARD$
|
||||
#pragma once"""
|
||||
|
||||
# sort the input files by name
|
||||
input = sorted(input, key=lambda path: os.path.split(path)[1])
|
||||
|
||||
# generate the file segments
|
||||
for file in input:
|
||||
result += MakeFileSegment(file)
|
||||
|
||||
# footer string
|
||||
result += \
|
||||
"""
|
||||
|
||||
#endif // $GUARD$
|
||||
"""
|
||||
|
||||
# add the copyright year
|
||||
result = result.replace('$YEAR$', get_year())
|
||||
# add the guard string
|
||||
filename = os.path.split(output)[1]
|
||||
guard = 'CEF_INCLUDE_'+string.upper(filename.replace('.', '_'))+'_'
|
||||
result = result.replace('$GUARD$', guard)
|
||||
|
||||
if path_exists(output):
|
||||
old_contents = read_file(output)
|
||||
else:
|
||||
old_contents = ''
|
||||
|
||||
if (result != old_contents):
|
||||
write_file(output, result)
|
||||
sys.stdout.write('File '+output+' updated.\n')
|
||||
else:
|
||||
sys.stdout.write('File '+output+' is already up to date.\n')
|
||||
|
||||
def main(argv):
|
||||
if len(argv) < 3:
|
||||
print ("Usage:\n %s <output_filename> <input_file1> [input_file2] ... " %
|
||||
argv[0])
|
||||
sys.exit(-1)
|
||||
MakeFile(argv[1], argv[2:])
|
||||
|
||||
|
||||
if '__main__' == __name__:
|
||||
main(sys.argv)
|
2
tools/make_version_header.bat
Normal file
2
tools/make_version_header.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
python.bat tools\make_version_header.py --header include\cef_version.h --cef_version VERSION --chrome_version ../chrome/VERSION --cpp_header_dir include
|
176
tools/make_version_header.py
Normal file
176
tools/make_version_header.py
Normal file
@@ -0,0 +1,176 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from date_util import *
|
||||
from file_util import *
|
||||
from optparse import OptionParser
|
||||
from cef_api_hash import cef_api_hash
|
||||
import svn_util as svn
|
||||
import git_util as git
|
||||
import sys
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility creates the version header file.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--header', dest='header', metavar='FILE',
|
||||
help='output version header file [required]')
|
||||
parser.add_option('--cef_version', dest='cef_version', metavar='FILE',
|
||||
help='input CEF version config file [required]')
|
||||
parser.add_option('--chrome_version', dest='chrome_version', metavar='FILE',
|
||||
help='input Chrome version config file [required]')
|
||||
parser.add_option('--cpp_header_dir', dest='cpp_header_dir', metavar='DIR',
|
||||
help='input directory for C++ header files [required]')
|
||||
parser.add_option('-q', '--quiet',
|
||||
action='store_true', dest='quiet', default=False,
|
||||
help='do not output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the header option is required
|
||||
if options.header is None or options.cef_version is None or options.chrome_version is None or options.cpp_header_dir is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
def write_svn_header(header, chrome_version, cef_version, cpp_header_dir):
|
||||
""" Creates the header file for the current revision and Chrome version information
|
||||
if the information has changed or if the file doesn't already exist. """
|
||||
|
||||
if not path_exists(chrome_version):
|
||||
raise Exception('Chrome version file '+chrome_version+' does not exist.')
|
||||
if not path_exists(cef_version):
|
||||
raise Exception('CEF version file '+cef_version+' does not exist.')
|
||||
|
||||
args = {}
|
||||
read_version_file(chrome_version, args)
|
||||
read_version_file(cef_version, args)
|
||||
|
||||
if path_exists(header):
|
||||
oldcontents = read_file(header)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
year = get_year()
|
||||
|
||||
if svn.is_checkout('.'):
|
||||
revision = svn.get_revision()
|
||||
elif git.is_checkout('.'):
|
||||
revision = git.get_svn_revision()
|
||||
else:
|
||||
raise Exception('Not a valid checkout')
|
||||
|
||||
# calculate api hashes
|
||||
api_hash_calculator = cef_api_hash(cpp_header_dir, verbose = False)
|
||||
api_hashes = api_hash_calculator.calculate()
|
||||
|
||||
newcontents = '// Copyright (c) '+year+' Marshall A. Greenblatt. All rights reserved.\n'+\
|
||||
'//\n'+\
|
||||
'// Redistribution and use in source and binary forms, with or without\n'+\
|
||||
'// modification, are permitted provided that the following conditions are\n'+\
|
||||
'// met:\n'+\
|
||||
'//\n'+\
|
||||
'// * Redistributions of source code must retain the above copyright\n'+\
|
||||
'// notice, this list of conditions and the following disclaimer.\n'+\
|
||||
'// * Redistributions in binary form must reproduce the above\n'+\
|
||||
'// copyright notice, this list of conditions and the following disclaimer\n'+\
|
||||
'// in the documentation and/or other materials provided with the\n'+\
|
||||
'// distribution.\n'+\
|
||||
'// * Neither the name of Google Inc. nor the name Chromium Embedded\n'+\
|
||||
'// Framework nor the names of its contributors may be used to endorse\n'+\
|
||||
'// or promote products derived from this software without specific prior\n'+\
|
||||
'// written permission.\n'+\
|
||||
'//\n'+\
|
||||
'// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n'+\
|
||||
'// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n'+\
|
||||
'// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n'+\
|
||||
'// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n'+\
|
||||
'// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n'+\
|
||||
'// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n'+\
|
||||
'// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n'+\
|
||||
'// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n'+\
|
||||
'// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n'+\
|
||||
'// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n'+\
|
||||
'// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n'+\
|
||||
'//\n'+\
|
||||
'// ---------------------------------------------------------------------------\n'+\
|
||||
'//\n'+\
|
||||
'// This file is generated by the make_version_header.py tool.\n'+\
|
||||
'//\n\n'+\
|
||||
'#ifndef CEF_INCLUDE_CEF_VERSION_H_\n'+\
|
||||
'#define CEF_INCLUDE_CEF_VERSION_H_\n\n'+\
|
||||
'#define CEF_VERSION_MAJOR ' + args['CEF_MAJOR'] + '\n'+\
|
||||
'#define CEF_REVISION ' + revision + '\n'+\
|
||||
'#define COPYRIGHT_YEAR ' + year + '\n\n'+\
|
||||
'#define CHROME_VERSION_MAJOR ' + args['MAJOR'] + '\n'+\
|
||||
'#define CHROME_VERSION_MINOR ' + args['MINOR'] + '\n'+\
|
||||
'#define CHROME_VERSION_BUILD ' + args['BUILD'] + '\n'+\
|
||||
'#define CHROME_VERSION_PATCH ' + args['PATCH'] + '\n\n'+\
|
||||
'#define DO_MAKE_STRING(p) #p\n'+\
|
||||
'#define MAKE_STRING(p) DO_MAKE_STRING(p)\n\n'+\
|
||||
'#ifndef APSTUDIO_HIDDEN_SYMBOLS\n\n'\
|
||||
'#include "include/internal/cef_export.h"\n\n'+\
|
||||
'#ifdef __cplusplus\n'+\
|
||||
'extern "C" {\n'+\
|
||||
'#endif\n\n'+\
|
||||
'// The API hash is created by analyzing CEF header files for C API type\n'+\
|
||||
'// definitions. The hash value will change when header files are modified\n'+\
|
||||
'// in a way that may cause binary incompatibility with other builds. The\n'+\
|
||||
'// universal hash value will change if any platform is affected whereas the\n'+\
|
||||
'// platform hash values will change only if that particular platform is\n'+\
|
||||
'// affected.\n'+\
|
||||
'#define CEF_API_HASH_UNIVERSAL "' + api_hashes['universal'] + '"\n'+\
|
||||
'#if defined(OS_WIN)\n'+\
|
||||
'#define CEF_API_HASH_PLATFORM "' + api_hashes['windows'] + '"\n'+\
|
||||
'#elif defined(OS_MACOSX)\n'+\
|
||||
'#define CEF_API_HASH_PLATFORM "' + api_hashes['macosx'] + '"\n'+\
|
||||
'#elif defined(OS_LINUX)\n'+\
|
||||
'#define CEF_API_HASH_PLATFORM "' + api_hashes['linux'] + '"\n'+\
|
||||
'#endif\n\n'+\
|
||||
'///\n'+\
|
||||
'// Returns the CEF build revision for the libcef library.\n'+\
|
||||
'///\n'+\
|
||||
'CEF_EXPORT int cef_build_revision();\n\n'+\
|
||||
'///\n'+\
|
||||
'// Returns CEF version information for the libcef library. The |entry|\n'+\
|
||||
'// parameter describes which version component will be returned:\n'+\
|
||||
'// 0 - CEF_VERSION_MAJOR\n'+\
|
||||
'// 1 - CEF_REVISION\n'+\
|
||||
'// 2 - CHROME_VERSION_MAJOR\n'+\
|
||||
'// 3 - CHROME_VERSION_MINOR\n'+\
|
||||
'// 4 - CHROME_VERSION_BUILD\n'+\
|
||||
'// 5 - CHROME_VERSION_PATCH\n'+\
|
||||
'///\n'+\
|
||||
'CEF_EXPORT int cef_version_info(int entry);\n\n'+\
|
||||
'///\n'+\
|
||||
'// Returns CEF API hashes for the libcef library. The returned string is owned\n'+\
|
||||
'// by the library and should not be freed. The |entry| parameter describes which\n'+\
|
||||
'// hash value will be returned:\n'+\
|
||||
'// 0 - CEF_API_HASH_PLATFORM\n'+\
|
||||
'// 1 - CEF_API_HASH_UNIVERSAL\n'+\
|
||||
'///\n'+\
|
||||
'CEF_EXPORT const char* cef_api_hash(int entry);\n\n'+\
|
||||
'#ifdef __cplusplus\n'+\
|
||||
'}\n'+\
|
||||
'#endif\n\n'+\
|
||||
'#endif // APSTUDIO_HIDDEN_SYMBOLS\n\n'+\
|
||||
'#endif // CEF_INCLUDE_CEF_VERSION_H_\n'
|
||||
if newcontents != oldcontents:
|
||||
write_file(header, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
written = write_svn_header(options.header, options.chrome_version, options.cef_version, options.cpp_header_dir)
|
||||
if not options.quiet:
|
||||
if written:
|
||||
sys.stdout.write('File '+options.header+' updated.\n')
|
||||
else:
|
||||
sys.stdout.write('File '+options.header+' is already up to date.\n')
|
2
tools/make_version_header.sh
Executable file
2
tools/make_version_header.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python tools/make_version_header.py --header include/cef_version.h --cef_version VERSION --chrome_version ../chrome/VERSION --cpp_header_dir include
|
60
tools/msvs_env.bat
Normal file
60
tools/msvs_env.bat
Normal file
@@ -0,0 +1,60 @@
|
||||
@echo off
|
||||
:: Copyright (c) 2013 The Chromium Embedded Framework Authors. All rights
|
||||
:: reserved. Use of this source code is governed by a BSD-style license
|
||||
:: that can be found in the LICENSE file.
|
||||
|
||||
:: Set up the environment for use with MSVS tools and then execute whatever
|
||||
:: was specified on the command-line.
|
||||
|
||||
set RC=
|
||||
setlocal
|
||||
|
||||
:: In case it's already provided via the environment.
|
||||
set vcvars="%CEF_VCVARS%"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
|
||||
:: Hardcoded list of MSVS paths.
|
||||
:: Alternatively we could 'reg query' this key:
|
||||
:: HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\VisualStudio\10.0\Setup\VS;ProductDir
|
||||
set vcvars="%PROGRAMFILES(X86)%\Microsoft Visual Studio 12.0\VC\bin\vcvars32.bat"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
set vcvars="%PROGRAMFILES(X86)%\Microsoft Visual Studio 11.0\VC\bin\vcvars32.bat"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
set vcvars="%PROGRAMFILES(X86)%\Microsoft Visual Studio 10.0\VC\bin\vcvars32.bat"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
set vcvars="%PROGRAMFILES%\Microsoft Visual Studio 12.0\VC\bin\vcvars32.bat"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
set vcvars="%PROGRAMFILES%\Microsoft Visual Studio 11.0\VC\bin\vcvars32.bat"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
set vcvars="%PROGRAMFILES%\Microsoft Visual Studio 10.0\VC\bin\vcvars32.bat"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
:: VS 2008 vcvars isn't standalone, it needs this env var.
|
||||
set VS90COMNTOOLS=%PROGRAMFILES(X86)%\Microsoft Visual Studio 9.0\Common7\Tools\
|
||||
set vcvars="%PROGRAMFILES(X86)%\Microsoft Visual Studio 9.0\VC\bin\vcvars32.bat"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
set VS90COMNTOOLS=%PROGRAMFILES%\Microsoft Visual Studio 9.0\Common7\Tools\
|
||||
set vcvars="%PROGRAMFILES%\Microsoft Visual Studio 9.0\VC\bin\vcvars32.bat"
|
||||
if exist %vcvars% goto found_vcvars
|
||||
|
||||
set RC=1
|
||||
echo Failed to find vcvars
|
||||
goto end
|
||||
|
||||
:found_vcvars
|
||||
echo vcvars:
|
||||
echo %vcvars%
|
||||
call %vcvars%
|
||||
|
||||
echo PATH:
|
||||
echo %PATH%
|
||||
%*
|
||||
|
||||
:end
|
||||
endlocal & set RC=%ERRORLEVEL%
|
||||
goto omega
|
||||
|
||||
:returncode
|
||||
exit /B %RC%
|
||||
|
||||
:omega
|
||||
call :returncode %RC%
|
2
tools/patch.bat
Normal file
2
tools/patch.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
python.bat tools\patcher.py --patch-config patch/patch.cfg
|
2
tools/patch.sh
Executable file
2
tools/patch.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python tools/patcher.py --patch-config patch/patch.cfg
|
157
tools/patch_updater.py
Normal file
157
tools/patch_updater.py
Normal file
@@ -0,0 +1,157 @@
|
||||
# Copyright (c) 2014 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from exec_util import exec_cmd
|
||||
import svn_util as svn
|
||||
import git_util as git
|
||||
|
||||
def msg(message):
|
||||
""" Output a message. """
|
||||
sys.stdout.write('--> ' + message + "\n")
|
||||
|
||||
def warn(message):
|
||||
""" Output a warning. """
|
||||
sys.stdout.write('-' * 80 + "\n")
|
||||
sys.stdout.write('!!!! WARNING: ' + message + "\n")
|
||||
sys.stdout.write('-' * 80 + "\n")
|
||||
|
||||
def extract_paths(file):
|
||||
""" Extract the list of modified paths from the patch file. """
|
||||
paths = []
|
||||
fp = open(file)
|
||||
for line in fp:
|
||||
if line[:4] != '+++ ':
|
||||
continue
|
||||
match = re.match('^([^\t]+)', line[4:])
|
||||
if not match:
|
||||
continue
|
||||
paths.append(match.group(1).strip())
|
||||
return paths
|
||||
|
||||
# Cannot be loaded as a module.
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
# Parse command-line options.
|
||||
disc = """
|
||||
This utility updates existing patch files.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--resave',
|
||||
action='store_true', dest='resave', default=False,
|
||||
help='re-save existing patch files to pick up manual changes')
|
||||
parser.add_option('--revert',
|
||||
action='store_true', dest='revert', default=False,
|
||||
help='revert all changes from existing patch files')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if options.resave and options.revert:
|
||||
print 'Invalid combination of options.'
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
# The CEF root directory is the parent directory of _this_ script.
|
||||
cef_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||
src_dir = os.path.join(cef_dir, os.pardir)
|
||||
|
||||
# Determine the type of Chromium checkout.
|
||||
if svn.is_checkout(src_dir):
|
||||
src_is_git = False
|
||||
elif git.is_checkout(src_dir):
|
||||
src_is_git = True
|
||||
else:
|
||||
raise Exception('Not a valid checkout: %s' % src_dir)
|
||||
|
||||
patch_dir = os.path.join(cef_dir, 'patch')
|
||||
patch_cfg = os.path.join(patch_dir, 'patch.cfg')
|
||||
if not os.path.isfile(patch_cfg):
|
||||
raise Exception('File does not exist: %s' % patch_cfg)
|
||||
|
||||
# Read the patch configuration file.
|
||||
msg('Reading patch config %s' % patch_cfg)
|
||||
scope = {}
|
||||
execfile(patch_cfg, scope)
|
||||
patches = scope["patches"]
|
||||
|
||||
# Read each individual patch file.
|
||||
patches_dir = os.path.join(patch_dir, 'patches')
|
||||
for patch in patches:
|
||||
sys.stdout.write('\n')
|
||||
patch_file = os.path.join(patches_dir, patch['name']+'.patch')
|
||||
|
||||
if os.path.isfile(patch_file):
|
||||
msg('Reading patch file %s' % patch_file)
|
||||
patch_root = patch['path']
|
||||
patch_root_abs = os.path.abspath(os.path.join(cef_dir, patch_root))
|
||||
|
||||
# Retrieve the list of paths modified by the patch file.
|
||||
patch_paths = extract_paths(patch_file)
|
||||
|
||||
# List of paths added by the patch file.
|
||||
added_paths = []
|
||||
|
||||
if not options.resave:
|
||||
# Revert any changes to existing files in the patch.
|
||||
for patch_path in patch_paths:
|
||||
patch_path_abs = os.path.abspath(os.path.join(patch_root_abs, \
|
||||
patch_path))
|
||||
if os.path.exists(patch_path_abs):
|
||||
msg('Reverting changes to %s' % patch_path_abs)
|
||||
if src_is_git:
|
||||
cmd = 'git checkout -- %s' % (patch_path_abs)
|
||||
else:
|
||||
cmd = 'svn revert %s' % (patch_path_abs)
|
||||
result = exec_cmd(cmd, patch_root_abs)
|
||||
if result['err'] != '':
|
||||
msg('Failed to revert file: %s' % result['err'])
|
||||
msg('Deleting file %s' % patch_path_abs)
|
||||
os.remove(patch_path_abs)
|
||||
added_paths.append(patch_path_abs)
|
||||
if result['out'] != '':
|
||||
sys.stdout.write(result['out'])
|
||||
else:
|
||||
msg('Skipping non-existing file %s' % patch_path_abs)
|
||||
added_paths.append(patch_path_abs)
|
||||
|
||||
if not options.revert:
|
||||
# Apply the patch file.
|
||||
msg('Applying patch to %s' % patch_root_abs)
|
||||
result = exec_cmd('patch -p0', patch_root_abs, patch_file)
|
||||
if result['err'] != '':
|
||||
raise Exception('Failed to apply patch file: %s' % result['err'])
|
||||
sys.stdout.write(result['out'])
|
||||
if result['out'].find('FAILED') != -1:
|
||||
warn('Failed to apply %s, fix manually and run with --resave' % \
|
||||
patch['name'])
|
||||
continue
|
||||
|
||||
if not options.revert:
|
||||
msg('Saving changes to %s' % patch_file)
|
||||
if src_is_git and added_paths:
|
||||
# Inform git of the added paths so they appear in the patch file.
|
||||
cmd = 'git add -N %s' % ' '.join(added_paths)
|
||||
result = exec_cmd(cmd, patch_root_abs)
|
||||
if result['err'] != '' and result['err'].find('warning:') != 0:
|
||||
raise Exception('Failed to add paths: %s' % result['err'])
|
||||
|
||||
# Re-create the patch file.
|
||||
patch_paths_str = ' '.join(patch_paths)
|
||||
if src_is_git:
|
||||
cmd = 'git diff --no-prefix --relative %s' % patch_paths_str
|
||||
else:
|
||||
cmd = 'svn diff %s' % patch_paths_str
|
||||
result = exec_cmd(cmd, patch_root_abs)
|
||||
if result['err'] != '' and result['err'].find('warning:') != 0:
|
||||
raise Exception('Failed to create patch file: %s' % result['err'])
|
||||
f = open(patch_file, 'wb')
|
||||
f.write(result['out'])
|
||||
f.close()
|
||||
else:
|
||||
raise Exception('Patch file does not exist: %s' % patch_file)
|
593
tools/patch_util.py
Normal file
593
tools/patch_util.py
Normal file
@@ -0,0 +1,593 @@
|
||||
""" Patch utility to apply unified diffs """
|
||||
""" Brute-force line-by-line parsing
|
||||
|
||||
Project home: http://code.google.com/p/python-patch/
|
||||
|
||||
This file is subject to the MIT license available here:
|
||||
http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
CEF Changes
|
||||
-----------
|
||||
|
||||
2013/01/03
|
||||
- Add support for patches containing new files
|
||||
|
||||
2009/07/22
|
||||
- Add a 'root_directory' argument to PatchInfo::apply
|
||||
- Fix a Python 2.4 compile error in PatchInfo::parse_stream
|
||||
|
||||
"""
|
||||
|
||||
__author__ = "techtonik.rainforce.org"
|
||||
__version__ = "8.12-1"
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from stat import *
|
||||
# cStringIO doesn't support unicode in 2.5
|
||||
from StringIO import StringIO
|
||||
from logging import debug, info, warning
|
||||
|
||||
from os.path import exists, isfile
|
||||
from os import unlink
|
||||
|
||||
debugmode = False
|
||||
|
||||
|
||||
def from_file(filename):
|
||||
""" read and parse patch file
|
||||
return PatchInfo() object
|
||||
"""
|
||||
|
||||
info("reading patch from file %s" % filename)
|
||||
fp = open(filename, "rb")
|
||||
patch = PatchInfo(fp)
|
||||
fp.close()
|
||||
return patch
|
||||
|
||||
|
||||
def from_string(s):
|
||||
""" parse text string and return PatchInfo() object """
|
||||
return PatchInfo(
|
||||
StringIO.StringIO(s)
|
||||
)
|
||||
|
||||
|
||||
class HunkInfo(object):
|
||||
""" parsed hunk data (hunk starts with @@ -R +R @@) """
|
||||
|
||||
def __init__(self):
|
||||
# define HunkInfo data members
|
||||
self.startsrc=None
|
||||
self.linessrc=None
|
||||
self.starttgt=None
|
||||
self.linestgt=None
|
||||
self.invalid=False
|
||||
self.text=[]
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
# def apply(self, estream):
|
||||
# """ write hunk data into enumerable stream
|
||||
# return strings one by one until hunk is
|
||||
# over
|
||||
#
|
||||
# enumerable stream are tuples (lineno, line)
|
||||
# where lineno starts with 0
|
||||
# """
|
||||
# pass
|
||||
|
||||
|
||||
|
||||
|
||||
class PatchInfo(object):
|
||||
""" patch information container """
|
||||
|
||||
def __init__(self, stream=None):
|
||||
""" parse incoming stream """
|
||||
|
||||
# define PatchInfo data members
|
||||
# table with a row for every source file
|
||||
|
||||
#: list of source filenames
|
||||
self.source=None
|
||||
self.target=None
|
||||
#: list of lists of hunks
|
||||
self.hunks=None
|
||||
#: file endings statistics for every hunk
|
||||
self.hunkends=None
|
||||
|
||||
if stream:
|
||||
self.parse_stream(stream)
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
def parse_stream(self, stream):
|
||||
""" parse unified diff """
|
||||
self.source = []
|
||||
self.target = []
|
||||
self.hunks = []
|
||||
self.hunkends = []
|
||||
|
||||
# define possible file regions that will direct the parser flow
|
||||
header = False # comments before the patch body
|
||||
filenames = False # lines starting with --- and +++
|
||||
|
||||
hunkhead = False # @@ -R +R @@ sequence
|
||||
hunkbody = False #
|
||||
hunkskip = False # skipping invalid hunk mode
|
||||
|
||||
header = True
|
||||
lineends = dict(lf=0, crlf=0, cr=0)
|
||||
nextfileno = 0
|
||||
nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1
|
||||
|
||||
# hunkinfo holds parsed values, hunkactual - calculated
|
||||
hunkinfo = HunkInfo()
|
||||
hunkactual = dict(linessrc=None, linestgt=None)
|
||||
|
||||
fe = enumerate(stream)
|
||||
for lineno, line in fe:
|
||||
|
||||
# analyze state
|
||||
if header and line.startswith("--- "):
|
||||
header = False
|
||||
# switch to filenames state
|
||||
filenames = True
|
||||
#: skip hunkskip and hunkbody code until you read definition of hunkhead
|
||||
if hunkbody:
|
||||
# process line first
|
||||
if re.match(r"^[- \+\\]", line):
|
||||
# gather stats about line endings
|
||||
if line.endswith("\r\n"):
|
||||
self.hunkends[nextfileno-1]["crlf"] += 1
|
||||
elif line.endswith("\n"):
|
||||
self.hunkends[nextfileno-1]["lf"] += 1
|
||||
elif line.endswith("\r"):
|
||||
self.hunkends[nextfileno-1]["cr"] += 1
|
||||
|
||||
if line.startswith("-"):
|
||||
hunkactual["linessrc"] += 1
|
||||
elif line.startswith("+"):
|
||||
hunkactual["linestgt"] += 1
|
||||
elif not line.startswith("\\"):
|
||||
hunkactual["linessrc"] += 1
|
||||
hunkactual["linestgt"] += 1
|
||||
hunkinfo.text.append(line)
|
||||
# todo: handle \ No newline cases
|
||||
else:
|
||||
warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||
# add hunk status node
|
||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
||||
# switch to hunkskip state
|
||||
hunkbody = False
|
||||
hunkskip = True
|
||||
|
||||
# check exit conditions
|
||||
if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
|
||||
warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||
# add hunk status node
|
||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
||||
# switch to hunkskip state
|
||||
hunkbody = False
|
||||
hunkskip = True
|
||||
elif hunkinfo.linessrc == hunkactual["linessrc"] and hunkinfo.linestgt == hunkactual["linestgt"]:
|
||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||
# switch to hunkskip state
|
||||
hunkbody = False
|
||||
hunkskip = True
|
||||
|
||||
# detect mixed window/unix line ends
|
||||
ends = self.hunkends[nextfileno-1]
|
||||
if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
|
||||
warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
|
||||
if debugmode:
|
||||
debuglines = dict(ends)
|
||||
debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
|
||||
debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
|
||||
|
||||
if hunkskip:
|
||||
match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
|
||||
if match:
|
||||
# switch to hunkhead state
|
||||
hunkskip = False
|
||||
hunkhead = True
|
||||
elif line.startswith("--- "):
|
||||
# switch to filenames state
|
||||
hunkskip = False
|
||||
filenames = True
|
||||
if debugmode and len(self.source) > 0:
|
||||
debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
|
||||
|
||||
if filenames:
|
||||
if line.startswith("--- "):
|
||||
if nextfileno in self.source:
|
||||
warning("skipping invalid patch for %s" % self.source[nextfileno])
|
||||
del self.source[nextfileno]
|
||||
# double source filename line is encountered
|
||||
# attempt to restart from this second line
|
||||
re_filename = "^--- ([^\t]+)"
|
||||
match = re.match(re_filename, line)
|
||||
if not match:
|
||||
warning("skipping invalid filename at line %d" % lineno)
|
||||
# switch back to header state
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
self.source.append(match.group(1).strip())
|
||||
elif not line.startswith("+++ "):
|
||||
if nextfileno in self.source:
|
||||
warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
|
||||
del self.source[nextfileno]
|
||||
else:
|
||||
# this should be unreachable
|
||||
warning("skipping invalid target patch")
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
if nextfileno in self.target:
|
||||
warning("skipping invalid patch - double target at line %d" % lineno)
|
||||
del self.source[nextfileno]
|
||||
del self.target[nextfileno]
|
||||
nextfileno -= 1
|
||||
# double target filename line is encountered
|
||||
# switch back to header state
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
re_filename = "^\+\+\+ ([^\t]+)"
|
||||
match = re.match(re_filename, line)
|
||||
if not match:
|
||||
warning("skipping invalid patch - no target filename at line %d" % lineno)
|
||||
# switch back to header state
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
self.target.append(match.group(1))
|
||||
nextfileno += 1
|
||||
# switch to hunkhead state
|
||||
filenames = False
|
||||
hunkhead = True
|
||||
nexthunkno = 0
|
||||
self.hunks.append([])
|
||||
self.hunkends.append(lineends.copy())
|
||||
continue
|
||||
|
||||
|
||||
if hunkhead:
|
||||
match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
|
||||
if not match:
|
||||
if nextfileno-1 not in self.hunks:
|
||||
warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
|
||||
# switch to header state
|
||||
hunkhead = False
|
||||
header = True
|
||||
continue
|
||||
else:
|
||||
# switch to header state
|
||||
hunkhead = False
|
||||
header = True
|
||||
else:
|
||||
hunkinfo.startsrc = int(match.group(1))
|
||||
if match.group(3):
|
||||
hunkinfo.linessrc = int(match.group(3))
|
||||
else:
|
||||
hunkinfo.linessrc = 1
|
||||
hunkinfo.starttgt = int(match.group(4))
|
||||
if match.group(6):
|
||||
hunkinfo.linestgt = int(match.group(6))
|
||||
else:
|
||||
hunkinfo.linestgt = 1
|
||||
hunkinfo.invalid = False
|
||||
hunkinfo.text = []
|
||||
|
||||
hunkactual["linessrc"] = hunkactual["linestgt"] = 0
|
||||
|
||||
# switch to hunkbody state
|
||||
hunkhead = False
|
||||
hunkbody = True
|
||||
nexthunkno += 1
|
||||
continue
|
||||
else:
|
||||
if not hunkskip:
|
||||
warning("patch file incomplete - %s" % filename)
|
||||
# sys.exit(?)
|
||||
else:
|
||||
# duplicated message when an eof is reached
|
||||
if debugmode and len(self.source) > 0:
|
||||
debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
|
||||
|
||||
info("total files: %d total hunks: %d" % (len(self.source), sum(len(hset) for hset in self.hunks)))
|
||||
|
||||
def apply(self, root_directory = None):
|
||||
""" apply parsed patch """
|
||||
|
||||
total = len(self.source)
|
||||
for fileno, filename in enumerate(self.source):
|
||||
|
||||
f2patch = filename
|
||||
if not root_directory is None:
|
||||
f2patch = root_directory + f2patch
|
||||
if not exists(f2patch):
|
||||
# if the patch contains a single hunk at position 0 consider it a new file
|
||||
if len(self.hunks[fileno]) == 1 and self.hunks[fileno][0].startsrc == 0:
|
||||
hunklines = [x[1:].rstrip("\r\n") for x in self.hunks[fileno][0].text if x[0] in " +"]
|
||||
if len(hunklines) > 0:
|
||||
warning("creating file %s" % (f2patch))
|
||||
f = open(f2patch, "wb")
|
||||
for line in hunklines:
|
||||
f.write(line + "\n")
|
||||
f.close()
|
||||
continue
|
||||
|
||||
f2patch = self.target[fileno]
|
||||
if not exists(f2patch):
|
||||
warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
|
||||
continue
|
||||
if not isfile(f2patch):
|
||||
warning("not a file - %s" % f2patch)
|
||||
continue
|
||||
filename = f2patch
|
||||
|
||||
info("processing %d/%d:\t %s" % (fileno+1, total, filename))
|
||||
|
||||
# validate before patching
|
||||
f2fp = open(filename)
|
||||
hunkno = 0
|
||||
hunk = self.hunks[fileno][hunkno]
|
||||
hunkfind = []
|
||||
hunkreplace = []
|
||||
validhunks = 0
|
||||
canpatch = False
|
||||
for lineno, line in enumerate(f2fp):
|
||||
if lineno+1 < hunk.startsrc:
|
||||
continue
|
||||
elif lineno+1 == hunk.startsrc:
|
||||
hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
|
||||
hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
|
||||
#pprint(hunkreplace)
|
||||
hunklineno = 0
|
||||
|
||||
# todo \ No newline at end of file
|
||||
|
||||
# check hunks in source file
|
||||
if lineno+1 < hunk.startsrc+len(hunkfind)-1:
|
||||
if line.rstrip("\r\n") == hunkfind[hunklineno]:
|
||||
hunklineno+=1
|
||||
else:
|
||||
debug("hunk no.%d doesn't match source file %s" % (hunkno+1, filename))
|
||||
# file may be already patched, but we will check other hunks anyway
|
||||
hunkno += 1
|
||||
if hunkno < len(self.hunks[fileno]):
|
||||
hunk = self.hunks[fileno][hunkno]
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
# check if processed line is the last line
|
||||
if lineno+1 == hunk.startsrc+len(hunkfind)-1:
|
||||
debug("file %s hunk no.%d -- is ready to be patched" % (filename, hunkno+1))
|
||||
hunkno+=1
|
||||
validhunks+=1
|
||||
if hunkno < len(self.hunks[fileno]):
|
||||
hunk = self.hunks[fileno][hunkno]
|
||||
else:
|
||||
if validhunks == len(self.hunks[fileno]):
|
||||
# patch file
|
||||
canpatch = True
|
||||
break
|
||||
else:
|
||||
if hunkno < len(self.hunks[fileno]) and \
|
||||
(len(self.hunks[fileno]) != 1 or self.hunks[fileno][0].startsrc != 0):
|
||||
warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
|
||||
|
||||
f2fp.close()
|
||||
|
||||
if validhunks < len(self.hunks[fileno]):
|
||||
if check_patched(filename, self.hunks[fileno]):
|
||||
warning("already patched %s" % filename)
|
||||
else:
|
||||
warning("source file is different - %s" % filename)
|
||||
if canpatch:
|
||||
backupname = filename+".orig"
|
||||
if exists(backupname):
|
||||
warning("can't backup original file to %s - aborting" % backupname)
|
||||
else:
|
||||
import shutil
|
||||
shutil.move(filename, backupname)
|
||||
if patch_hunks(backupname, filename, self.hunks[fileno]):
|
||||
warning("successfully patched %s" % filename)
|
||||
unlink(backupname)
|
||||
else:
|
||||
warning("error patching file %s" % filename)
|
||||
shutil.copy(filename, filename+".invalid")
|
||||
warning("invalid version is saved to %s" % filename+".invalid")
|
||||
# todo: proper rejects
|
||||
shutil.move(backupname, filename)
|
||||
|
||||
# todo: check for premature eof
|
||||
|
||||
|
||||
|
||||
def check_patched(filename, hunks):
|
||||
matched = True
|
||||
fp = open(filename)
|
||||
|
||||
class NoMatch(Exception):
|
||||
pass
|
||||
|
||||
# special case for new files
|
||||
try:
|
||||
if len(hunks) == 1 and hunks[0].startsrc == 0:
|
||||
hunklines = [x[1:].rstrip("\r\n") for x in hunks[0].text if x[0] in " +"]
|
||||
if len(hunklines) > 0:
|
||||
for line in hunklines:
|
||||
srcline = fp.readline()
|
||||
if not len(srcline) or srcline.rstrip("\r\n") != line:
|
||||
raise NoMatch
|
||||
srcline = fp.readline()
|
||||
if len(srcline):
|
||||
raise NoMatch
|
||||
fp.close()
|
||||
return True
|
||||
except NoMatch:
|
||||
fp.close()
|
||||
fp = open(filename)
|
||||
|
||||
lineno = 1
|
||||
line = fp.readline()
|
||||
hno = None
|
||||
try:
|
||||
if not len(line):
|
||||
raise NoMatch
|
||||
for hno, h in enumerate(hunks):
|
||||
# skip to line just before hunk starts
|
||||
while lineno < h.starttgt-1:
|
||||
line = fp.readline()
|
||||
lineno += 1
|
||||
if not len(line):
|
||||
raise NoMatch
|
||||
for hline in h.text:
|
||||
# todo: \ No newline at the end of file
|
||||
if not hline.startswith("-") and not hline.startswith("\\"):
|
||||
line = fp.readline()
|
||||
lineno += 1
|
||||
if not len(line):
|
||||
raise NoMatch
|
||||
if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
|
||||
warning("file is not patched - failed hunk: %d" % (hno+1))
|
||||
raise NoMatch
|
||||
except NoMatch:
|
||||
matched = False
|
||||
# todo: display failed hunk, i.e. expected/found
|
||||
|
||||
fp.close()
|
||||
return matched
|
||||
|
||||
|
||||
|
||||
def patch_stream(instream, hunks):
|
||||
""" given a source stream and hunks iterable, yield patched stream
|
||||
|
||||
converts lineends in hunk lines to the best suitable format
|
||||
autodetected from input
|
||||
"""
|
||||
|
||||
# todo: At the moment substituted lineends may not be the same
|
||||
# at the start and at the end of patching. Also issue a
|
||||
# warning/throw about mixed lineends (is it really needed?)
|
||||
|
||||
hunks = iter(hunks)
|
||||
|
||||
srclineno = 1
|
||||
|
||||
lineends = {'\n':0, '\r\n':0, '\r':0}
|
||||
def get_line():
|
||||
"""
|
||||
local utility function - return line from source stream
|
||||
collecting line end statistics on the way
|
||||
"""
|
||||
line = instream.readline()
|
||||
# 'U' mode works only with text files
|
||||
if line.endswith("\r\n"):
|
||||
lineends["\r\n"] += 1
|
||||
elif line.endswith("\n"):
|
||||
lineends["\n"] += 1
|
||||
elif line.endswith("\r"):
|
||||
lineends["\r"] += 1
|
||||
return line
|
||||
|
||||
|
||||
for hno, h in enumerate(hunks):
|
||||
debug("hunk %d" % (hno+1))
|
||||
# skip to line just before hunk starts
|
||||
while srclineno < h.startsrc:
|
||||
yield get_line()
|
||||
srclineno += 1
|
||||
|
||||
for hline in h.text:
|
||||
# todo: check \ No newline at the end of file
|
||||
if hline.startswith("-") or hline.startswith("\\"):
|
||||
get_line()
|
||||
srclineno += 1
|
||||
continue
|
||||
else:
|
||||
if not hline.startswith("+"):
|
||||
get_line()
|
||||
srclineno += 1
|
||||
line2write = hline[1:]
|
||||
# detect if line ends are consistent in source file
|
||||
if sum([bool(lineends[x]) for x in lineends]) == 1:
|
||||
newline = [x for x in lineends if lineends[x] != 0][0]
|
||||
yield line2write.rstrip("\r\n")+newline
|
||||
else: # newlines are mixed
|
||||
yield line2write
|
||||
|
||||
for line in instream:
|
||||
yield line
|
||||
|
||||
|
||||
|
||||
def patch_hunks(srcname, tgtname, hunks):
|
||||
# get the current file mode
|
||||
mode = os.stat(srcname)[ST_MODE]
|
||||
|
||||
src = open(srcname, "rb")
|
||||
tgt = open(tgtname, "wb")
|
||||
|
||||
debug("processing target file %s" % tgtname)
|
||||
|
||||
tgt.writelines(patch_stream(src, hunks))
|
||||
|
||||
tgt.close()
|
||||
src.close()
|
||||
|
||||
# restore the file mode
|
||||
os.chmod(tgtname, mode)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
from optparse import OptionParser
|
||||
from os.path import exists
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
opt = OptionParser(usage="%prog [options] unipatch-file", version="python-patch %s" % __version__)
|
||||
opt.add_option("-d", action="store_true", dest="debugmode", help="debug mode")
|
||||
(options, args) = opt.parse_args()
|
||||
|
||||
if not args:
|
||||
opt.print_version()
|
||||
print("")
|
||||
opt.print_help()
|
||||
sys.exit()
|
||||
debugmode = options.debugmode
|
||||
patchfile = args[0]
|
||||
if not exists(patchfile) or not isfile(patchfile):
|
||||
sys.exit("patch file does not exist - %s" % patchfile)
|
||||
|
||||
|
||||
if debugmode:
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)8s %(message)s")
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
||||
|
||||
|
||||
|
||||
patch = from_file(patchfile)
|
||||
#pprint(patch)
|
||||
patch.apply()
|
||||
|
||||
# todo: document and test line ends handling logic - patch.py detects proper line-endings
|
||||
# for inserted hunks and issues a warning if patched file has incosistent line ends
|
32
tools/patcher.README.txt
Normal file
32
tools/patcher.README.txt
Normal file
@@ -0,0 +1,32 @@
|
||||
Chromium Embedded Framework (CEF) Patch Application Tool -- patcher.py
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
Document Last Updated: July 23, 2009
|
||||
|
||||
|
||||
OVERVIEW
|
||||
--------
|
||||
|
||||
The CEF patch application tool is used by the patch project to apply patches
|
||||
to the Chromium and WebKit code bases. Currently only unified diff format is
|
||||
supported. See the README.txt file in the patch directory for information on
|
||||
how the patch project uses this tool.
|
||||
|
||||
The 'patcher.bat' file can be used to run the patch application tool with
|
||||
command-line arguments that match the default CEF directory structure and
|
||||
output options. Run 'patcher.py -h' for a complete list of available command-
|
||||
line arguments.
|
||||
|
||||
|
||||
CREDITS
|
||||
-------
|
||||
|
||||
Thanks go to techtonik for developing the python-patch script. The
|
||||
patch_util.py file is a slightly modified version of the original script which
|
||||
can be found here: http://code.google.com/p/python-patch/
|
||||
|
||||
|
||||
WORK REMAINING
|
||||
--------------
|
||||
|
||||
o Add support for the GIT patch format.
|
103
tools/patcher.py
Normal file
103
tools/patcher.py
Normal file
@@ -0,0 +1,103 @@
|
||||
# Copyright (c) 2009 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import pickle
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import sys
|
||||
from file_util import *
|
||||
from patch_util import *
|
||||
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility applies patch files.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--patch-config', dest='patchconfig', metavar='DIR',
|
||||
help='patch configuration file')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the patchconfig option is required
|
||||
if options.patchconfig is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
# normalize the patch directory value
|
||||
patchdir = os.path.dirname(os.path.abspath(options.patchconfig)).replace('\\', '/')
|
||||
if patchdir[-1] != '/':
|
||||
patchdir += '/'
|
||||
|
||||
# check if the patching should be skipped
|
||||
if os.path.isfile(patchdir + 'NOPATCH'):
|
||||
nopatch = True
|
||||
sys.stdout.write('NOPATCH exists -- files have not been patched.\n')
|
||||
else:
|
||||
nopatch = False
|
||||
# locate the patch configuration file
|
||||
if not os.path.isfile(options.patchconfig):
|
||||
sys.stderr.write('File '+options.patchconfig+' does not exist.\n')
|
||||
sys.exit()
|
||||
|
||||
scope = {}
|
||||
execfile(options.patchconfig, scope)
|
||||
patches = scope["patches"]
|
||||
|
||||
for patch in patches:
|
||||
file = patchdir+'patches/'+patch['name']+'.patch'
|
||||
dopatch = True
|
||||
|
||||
if 'condition' in patch:
|
||||
# Check that the environment variable is set.
|
||||
if patch['condition'] not in os.environ:
|
||||
sys.stderr.write('Skipping patch file '+file+'\n')
|
||||
dopatch = False
|
||||
|
||||
if dopatch:
|
||||
if not os.path.isfile(file):
|
||||
sys.stderr.write('Patch file '+file+' does not exist.\n')
|
||||
else:
|
||||
sys.stderr.write('Reading patch file '+file+'\n')
|
||||
dir = patch['path']
|
||||
patchObj = from_file(file)
|
||||
patchObj.apply(dir)
|
||||
if 'note' in patch:
|
||||
separator = '-' * 79 + '\n'
|
||||
sys.stderr.write(separator)
|
||||
sys.stderr.write('NOTE: '+patch['note']+'\n')
|
||||
sys.stderr.write(separator)
|
||||
|
||||
# read the current include file, if any
|
||||
incfile = patchdir + 'patch_state.h'
|
||||
if nopatch:
|
||||
incnew = """// This file is generated by the patch tool and should not be edited manually.
|
||||
#ifndef _PATCH_STATE_H
|
||||
#define _PATCH_STATE_H
|
||||
// No patches have been applied to the Chromium/WebKit source base.
|
||||
#define CEF_PATCHES_APPLIED 0
|
||||
#endif // _PATCH_STATE_H
|
||||
"""
|
||||
else:
|
||||
incnew = """// This file is generated by the patch tool and should not be edited manually.
|
||||
#ifndef _PATCH_STATE_H
|
||||
#define _PATCH_STATE_H
|
||||
// Patches have been applied to the Chromium/WebKit source base.
|
||||
#define CEF_PATCHES_APPLIED 1
|
||||
#endif // _PATCH_STATE_H
|
||||
"""
|
||||
|
||||
inccur = ''
|
||||
if os.path.isfile(incfile):
|
||||
inccur = read_file(incfile)
|
||||
|
||||
if inccur != incnew:
|
||||
sys.stdout.write('Writing file '+incfile+'.\n')
|
||||
write_file(incfile, incnew)
|
186
tools/repack_locales.py
Normal file
186
tools/repack_locales.py
Normal file
@@ -0,0 +1,186 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Helper script to repack paks for a list of locales.
|
||||
|
||||
Gyp doesn't have any built-in looping capability, so this just provides a way to
|
||||
loop over a list of locales when repacking pak files, thus avoiding a
|
||||
proliferation of mostly duplicate, cut-n-paste gyp actions.
|
||||
"""
|
||||
|
||||
import getopt
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..',
|
||||
'tools', 'grit'))
|
||||
from grit.format import data_pack
|
||||
|
||||
# Some build paths defined by gyp.
|
||||
GRIT_DIR = None
|
||||
SHARE_INT_DIR = None
|
||||
INT_DIR = None
|
||||
|
||||
|
||||
class Usage(Exception):
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
|
||||
def calc_output(locale, create_dir):
|
||||
"""Determine the file that will be generated for the given locale."""
|
||||
#e.g. '<(INTERMEDIATE_DIR)/da.pak',
|
||||
# For Fake Bidi, generate it at a fixed path so that tests can safely
|
||||
# reference it.
|
||||
if locale == 'fake-bidi':
|
||||
return '%s/%s.pak' % (INT_DIR, locale)
|
||||
if sys.platform in ('darwin',):
|
||||
# For Cocoa to find the locale at runtime, it needs to use '_' instead
|
||||
# of '-' (http://crbug.com/20441). Also, 'en-US' should be represented
|
||||
# simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
|
||||
if locale == 'en-US':
|
||||
locale = 'en'
|
||||
dir = '%s/%s.lproj' % (INT_DIR, locale.replace('-', '_'))
|
||||
if create_dir and not os.path.exists(dir):
|
||||
os.makedirs(dir)
|
||||
return dir + '/locale.pak'
|
||||
else:
|
||||
return os.path.join(INT_DIR, locale + '.pak')
|
||||
|
||||
|
||||
def calc_inputs(locale):
|
||||
"""Determine the files that need processing for the given locale."""
|
||||
inputs = []
|
||||
|
||||
#e.g.
|
||||
# '<(SHARED_INTERMEDIATE_DIR)/components/strings/components_strings_da.pak'
|
||||
inputs.append(os.path.join(SHARE_INT_DIR, 'components', 'strings',
|
||||
'components_strings_%s.pak' % locale))
|
||||
|
||||
#e.g.
|
||||
# '<(SHARED_INTERMEDIATE_DIR)/content/app/strings/content_strings_da.pak'
|
||||
inputs.append(os.path.join(SHARE_INT_DIR, 'content', 'app', 'strings',
|
||||
'content_strings_%s.pak' % locale))
|
||||
|
||||
#e.g. '<(SHARED_INTERMEDIATE_DIR)/ui/strings/ui_strings_da.pak',
|
||||
inputs.append(os.path.join(SHARE_INT_DIR, 'ui', 'strings',
|
||||
'ui_strings_%s.pak' % locale))
|
||||
|
||||
#e.g. '<(SHARED_INTERMEDIATE_DIR)/ui/strings/app_locale_settings_da.pak',
|
||||
inputs.append(os.path.join(SHARE_INT_DIR, 'ui', 'strings',
|
||||
'app_locale_settings_%s.pak' % locale))
|
||||
|
||||
#e.g. '<(SHARED_INTERMEDIATE_DIR)/cef/cef_strings_da.pak'
|
||||
inputs.append(os.path.join(SHARE_INT_DIR, 'cef',
|
||||
'cef_strings_%s.pak' % locale))
|
||||
|
||||
return inputs
|
||||
|
||||
|
||||
def list_outputs(locales):
|
||||
"""Returns the names of files that will be generated for the given locales.
|
||||
|
||||
This is to provide gyp the list of output files, so build targets can
|
||||
properly track what needs to be built.
|
||||
"""
|
||||
outputs = []
|
||||
for locale in locales:
|
||||
outputs.append(calc_output(locale, False))
|
||||
# Quote each element so filename spaces don't mess up gyp's attempt to parse
|
||||
# it into a list.
|
||||
return " ".join(['"%s"' % x for x in outputs])
|
||||
|
||||
|
||||
def list_inputs(locales):
|
||||
"""Returns the names of files that will be processed for the given locales.
|
||||
|
||||
This is to provide gyp the list of input files, so build targets can properly
|
||||
track their prerequisites.
|
||||
"""
|
||||
inputs = []
|
||||
for locale in locales:
|
||||
inputs += calc_inputs(locale)
|
||||
# Quote each element so filename spaces don't mess up gyp's attempt to parse
|
||||
# it into a list.
|
||||
return " ".join(['"%s"' % x for x in inputs])
|
||||
|
||||
|
||||
def repack_locales(locales):
|
||||
""" Loop over and repack the given locales."""
|
||||
for locale in locales:
|
||||
inputs = []
|
||||
inputs += calc_inputs(locale)
|
||||
output = calc_output(locale, True)
|
||||
data_pack.DataPack.RePack(output, inputs)
|
||||
|
||||
|
||||
def DoMain(argv):
|
||||
global GRIT_DIR
|
||||
global SHARE_INT_DIR
|
||||
global INT_DIR
|
||||
|
||||
short_options = 'iog:s:x:b:h'
|
||||
long_options = 'help'
|
||||
|
||||
print_inputs = False
|
||||
print_outputs = False
|
||||
usage_msg = ''
|
||||
|
||||
helpstr = """\
|
||||
Usage: %s [-h] [-i | -o] -g <DIR> -x <DIR> -s <DIR> <locale> [...]
|
||||
-h, --help Print this help, then exit.
|
||||
-i Print the expected input file list, then exit.
|
||||
-o Print the expected output file list, then exit.
|
||||
-g DIR GRIT build files output directory.
|
||||
-x DIR Intermediate build files output directory.
|
||||
-s DIR Shared intermediate build files output directory.
|
||||
locale [...] One or more locales to repack.""" % (
|
||||
os.path.basename(__file__))
|
||||
|
||||
try:
|
||||
opts, locales = getopt.getopt(argv, short_options, long_options)
|
||||
except getopt.GetoptError, msg:
|
||||
raise Usage(str(msg))
|
||||
|
||||
if not locales:
|
||||
usage_msg = 'Please specificy at least one locale to process.\n'
|
||||
|
||||
for o, a in opts:
|
||||
if o in ('-i'):
|
||||
print_inputs = True
|
||||
elif o in ('-o'):
|
||||
print_outputs = True
|
||||
elif o in ('-g'):
|
||||
GRIT_DIR = a
|
||||
elif o in ('-s'):
|
||||
SHARE_INT_DIR = a
|
||||
elif o in ('-x'):
|
||||
INT_DIR = a
|
||||
elif o in ('-h', '--help'):
|
||||
raise Usage(helpstr)
|
||||
|
||||
if not (GRIT_DIR and INT_DIR and SHARE_INT_DIR):
|
||||
usage_msg += 'Please specify all of "-g" and "-x" and "-s".\n'
|
||||
if print_inputs and print_outputs:
|
||||
usage_msg += 'Please specify only one of "-i" or "-o".\n'
|
||||
|
||||
if usage_msg:
|
||||
raise Usage(usage_msg)
|
||||
|
||||
if print_inputs:
|
||||
return list_inputs(locales)
|
||||
|
||||
if print_outputs:
|
||||
return list_outputs(locales)
|
||||
|
||||
if not os.path.exists(INT_DIR):
|
||||
os.makedirs(INT_DIR)
|
||||
|
||||
return repack_locales(locales)
|
||||
|
||||
if __name__ == '__main__':
|
||||
results = DoMain(sys.argv[1:])
|
||||
if results:
|
||||
print results
|
22
tools/revision.py
Normal file
22
tools/revision.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import svn_util as svn
|
||||
import git_util as git
|
||||
import os
|
||||
import sys
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
if os.path.exists(os.path.join('.', '.svn')):
|
||||
sys.stdout.write(svn.get_revision())
|
||||
elif os.path.exists(os.path.join('.', '.git')):
|
||||
sys.stdout.write(git.get_svn_revision())
|
||||
else:
|
||||
raise Exception('Not a valid checkout')
|
||||
|
||||
|
112
tools/setup_toolchain.py
Normal file
112
tools/setup_toolchain.py
Normal file
@@ -0,0 +1,112 @@
|
||||
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
"""
|
||||
Copies the given "win tool" (which the toolchain uses to wrap compiler
|
||||
invocations) and the environment blocks for the 32-bit and 64-bit builds on
|
||||
Windows to the build directory.
|
||||
|
||||
The arguments are the visual studio install location and the location of the
|
||||
win tool. The script assumes that the root build directory is the current dir
|
||||
and the files will be written to the current directory.
|
||||
"""
|
||||
|
||||
|
||||
def ExtractImportantEnvironment():
|
||||
"""Extracts environment variables required for the toolchain from the
|
||||
current environment."""
|
||||
# This list should be kept synchronized with _ExtractImportantEnvironment from
|
||||
# tools/gyp/pylib/gyp/msvs_emulation.py.
|
||||
envvars_to_save = (
|
||||
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
|
||||
'include', # Needed by midl compiler.
|
||||
'lib',
|
||||
'libpath',
|
||||
'path',
|
||||
'pathext',
|
||||
'systemroot',
|
||||
'temp',
|
||||
'tmp',
|
||||
)
|
||||
result = {}
|
||||
for envvar in envvars_to_save:
|
||||
if envvar in os.environ:
|
||||
envvar = envvar.lower()
|
||||
if envvar == 'path':
|
||||
# Our own rules (for running gyp-win-tool) and other actions in
|
||||
# Chromium rely on python being in the path. Add the path to this
|
||||
# python here so that if it's not in the path when ninja is run
|
||||
# later, python will still be found.
|
||||
result[envvar.upper()] = os.path.dirname(sys.executable) + \
|
||||
os.pathsep + os.environ[envvar]
|
||||
else:
|
||||
result[envvar.upper()] = os.environ[envvar]
|
||||
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
|
||||
if required not in result:
|
||||
raise Exception('Environment variable "%s" '
|
||||
'required to be set to valid path' % required)
|
||||
return result
|
||||
|
||||
|
||||
def FormatAsEnvironmentBlock(envvar_dict):
|
||||
"""Format as an 'environment block' directly suitable for CreateProcess.
|
||||
Briefly this is a list of key=value\0, terminated by an additional \0. See
|
||||
CreateProcess documentation for more details."""
|
||||
block = ''
|
||||
nul = '\0'
|
||||
for key, value in envvar_dict.iteritems():
|
||||
block += key + '=' + value + nul
|
||||
block += nul
|
||||
return block
|
||||
|
||||
|
||||
def CopyTool(source_path):
|
||||
"""Copies the given tool to the current directory, including a warning not
|
||||
to edit it."""
|
||||
with open(source_path) as source_file:
|
||||
tool_source = source_file.readlines()
|
||||
|
||||
# Add header and write it out to the current directory (which should be the
|
||||
# root build dir).
|
||||
with open("gyp-win-tool", 'w') as tool_file:
|
||||
tool_file.write(''.join([tool_source[0],
|
||||
'# Generated by setup_toolchain.py do not edit.\n']
|
||||
+ tool_source[1:]))
|
||||
|
||||
if len(sys.argv) != 4:
|
||||
print('Usage setup_toolchain.py '
|
||||
'<visual studio path> <win tool path> <win sdk path>')
|
||||
sys.exit(2)
|
||||
vs_path = sys.argv[1]
|
||||
tool_source = sys.argv[2]
|
||||
win_sdk_path = sys.argv[3]
|
||||
|
||||
CopyTool(tool_source)
|
||||
|
||||
important_env_vars = ExtractImportantEnvironment()
|
||||
path = important_env_vars["PATH"].split(";")
|
||||
|
||||
# Add 32-bit compiler path to the beginning and write the block.
|
||||
path32 = [os.path.join(vs_path, "VC\\BIN\\amd64_x86")] + \
|
||||
[os.path.join(win_sdk_path, "bin\\x86")] + \
|
||||
path
|
||||
important_env_vars["PATH"] = ";".join(path32)
|
||||
environ = FormatAsEnvironmentBlock(important_env_vars)
|
||||
with open('environment.x86', 'wb') as env_file:
|
||||
env_file.write(environ)
|
||||
|
||||
# Add 64-bit compiler path to the beginning and write the block.
|
||||
path64 = [os.path.join(vs_path, "VC\\BIN\\amd64")] + \
|
||||
[os.path.join(win_sdk_path, "bin\\x64")] + \
|
||||
path
|
||||
important_env_vars["PATH"] = ";".join(path64)
|
||||
environ = FormatAsEnvironmentBlock(important_env_vars)
|
||||
with open('environment.x64', 'wb') as env_file:
|
||||
env_file.write(environ)
|
60
tools/svn_util.py
Normal file
60
tools/svn_util.py
Normal file
@@ -0,0 +1,60 @@
|
||||
# Copyright (c) 2014 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file
|
||||
|
||||
from exec_util import exec_cmd
|
||||
import os
|
||||
import urllib
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
def is_checkout(path):
|
||||
""" Returns true if the path represents an svn checkout. """
|
||||
return os.path.exists(os.path.join(path, '.svn'))
|
||||
|
||||
def check_url(url):
|
||||
""" Check the URL and raise an exception if invalid. """
|
||||
if ':' in url[:7]:
|
||||
parts = url.split(':', 1)
|
||||
if (parts[0] == 'http' or parts[0] == 'https' or parts[0] == 'svn') and \
|
||||
parts[1] == urllib.quote(parts[1]):
|
||||
return url
|
||||
raise Exception('Invalid URL: %s' % (url))
|
||||
|
||||
def get_svn_info(path = '.'):
|
||||
""" Retrieves the URL and revision from svn info. """
|
||||
url = 'None'
|
||||
rev = 'None'
|
||||
cmd = "svn info --xml %s" % (path)
|
||||
is_http = path[0:4] == 'http'
|
||||
if is_http or os.path.exists(path):
|
||||
result = exec_cmd(cmd, path if not is_http else '.')
|
||||
if result['err'] == '':
|
||||
tree = ET.ElementTree(ET.fromstring(result['out']))
|
||||
entry = tree.getroot().find('entry')
|
||||
url = entry.find('url').text
|
||||
rev = entry.attrib['revision']
|
||||
else:
|
||||
raise Exception("Failed to execute svn info: %s" % (result['err']))
|
||||
return {'url': url, 'revision': rev}
|
||||
|
||||
def get_revision(path = '.'):
|
||||
""" Retrieves the revision from svn info. """
|
||||
info = get_svn_info(path)
|
||||
if info['revision'] == 'None':
|
||||
raise Exception('Unable to retrieve SVN revision for %s' % (path))
|
||||
return info['revision']
|
||||
|
||||
def get_changed_files(path = '.'):
|
||||
""" Retrieves the list of changed files from svn status. """
|
||||
files = []
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
stream = os.popen('svn status '+path)
|
||||
for line in stream:
|
||||
status = line[0]
|
||||
# Return paths with add, modify and switch status.
|
||||
if status == 'A' or status == 'M' or status == 'S':
|
||||
files.append(line[8:].strip())
|
||||
except IOError, (errno, strerror):
|
||||
raise
|
||||
return files
|
1697
tools/translator.README.txt
Normal file
1697
tools/translator.README.txt
Normal file
File diff suppressed because it is too large
Load Diff
3
tools/translator.bat
Normal file
3
tools/translator.bat
Normal file
@@ -0,0 +1,3 @@
|
||||
@echo off
|
||||
call python.bat translator.py --cpp-header-dir ..\include --capi-header-dir ..\include\capi --cpptoc-global-impl ..\libcef_dll\libcef_dll.cc --ctocpp-global-impl ..\libcef_dll\wrapper\libcef_dll_wrapper.cc --cpptoc-dir ..\libcef_dll\cpptoc --ctocpp-dir ..\libcef_dll\ctocpp --gypi-file ..\cef_paths.gypi
|
||||
pause
|
164
tools/translator.py
Normal file
164
tools/translator.py
Normal file
@@ -0,0 +1,164 @@
|
||||
# Copyright (c) 2009 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
from cef_parser import *
|
||||
from make_capi_header import *
|
||||
from make_cpptoc_header import *
|
||||
from make_cpptoc_impl import *
|
||||
from make_ctocpp_header import *
|
||||
from make_ctocpp_impl import *
|
||||
from make_gypi_file import *
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility generates files for the CEF C++ to C API translation layer.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--cpp-header-dir', dest='cppheaderdir', metavar='DIR',
|
||||
help='input directory for C++ header files [required]')
|
||||
parser.add_option('--capi-header-dir', dest='capiheaderdir', metavar='DIR',
|
||||
help='output directory for C API header files')
|
||||
parser.add_option('--cpptoc-global-impl', dest='cpptocglobalimpl', metavar='FILE',
|
||||
help='input/output file for CppToC global translations')
|
||||
parser.add_option('--ctocpp-global-impl', dest='ctocppglobalimpl', metavar='FILE',
|
||||
help='input/output file for CppToC global translations')
|
||||
parser.add_option('--cpptoc-dir', dest='cpptocdir', metavar='DIR',
|
||||
help='input/output directory for CppToC class translations')
|
||||
parser.add_option('--ctocpp-dir', dest='ctocppdir', metavar='DIR',
|
||||
help='input/output directory for CppToC class translations')
|
||||
parser.add_option('--gypi-file', dest='gypifile', metavar='FILE',
|
||||
help='output file for path information')
|
||||
parser.add_option('--no-cpptoc-header',
|
||||
action='store_true', dest='nocpptocheader', default=False,
|
||||
help='do not output the CppToC headers')
|
||||
parser.add_option('--no-cpptoc-impl',
|
||||
action='store_true', dest='nocpptocimpl', default=False,
|
||||
help='do not output the CppToC implementations')
|
||||
parser.add_option('--no-ctocpp-header',
|
||||
action='store_true', dest='noctocppheader', default=False,
|
||||
help='do not output the CToCpp headers')
|
||||
parser.add_option('--no-ctocpp-impl',
|
||||
action='store_true', dest='noctocppimpl', default=False,
|
||||
help='do not output the CToCpp implementations')
|
||||
parser.add_option('--no-backup',
|
||||
action='store_true', dest='nobackup', default=False,
|
||||
help='do not create a backup of modified files')
|
||||
parser.add_option('-c', '--classes', dest='classes', action='append',
|
||||
help='only translate the specified classes')
|
||||
parser.add_option('-q', '--quiet',
|
||||
action='store_true', dest='quiet', default=False,
|
||||
help='do not output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the cppheader option is required
|
||||
if options.cppheaderdir is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
# make sure the header exists
|
||||
if not path_exists(options.cppheaderdir):
|
||||
sys.stderr.write('File '+options.cppheaderdir+' does not exist.')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Parsing C++ headers from '+options.cppheaderdir+'...\n')
|
||||
header = obj_header()
|
||||
excluded_files = ['cef_application_mac.h', 'cef_version.h']
|
||||
header.add_directory(options.cppheaderdir, excluded_files)
|
||||
|
||||
writect = 0
|
||||
|
||||
if not options.capiheaderdir is None:
|
||||
#output the C API header
|
||||
if not options.quiet:
|
||||
sys.stdout.write('In C API header directory '+options.capiheaderdir+'...\n')
|
||||
filenames = sorted(header.get_file_names())
|
||||
for filename in filenames:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+filename+' C API header...\n')
|
||||
writect += write_capi_header(header,
|
||||
os.path.join(options.capiheaderdir, filename),
|
||||
not options.nobackup)
|
||||
|
||||
# build the list of classes to parse
|
||||
allclasses = header.get_class_names()
|
||||
if not options.classes is None:
|
||||
for cls in options.classes:
|
||||
if not cls in allclasses:
|
||||
sys.stderr.write('ERROR: Unknown class: '+cls)
|
||||
sys.exit()
|
||||
classes = options.classes
|
||||
else:
|
||||
classes = allclasses
|
||||
|
||||
classes = sorted(classes)
|
||||
|
||||
if not options.cpptocglobalimpl is None:
|
||||
# output CppToC global file
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating CppToC global implementation...\n')
|
||||
writect += write_cpptoc_impl(header, None, options.cpptocglobalimpl, \
|
||||
not options.nobackup)
|
||||
|
||||
if not options.ctocppglobalimpl is None:
|
||||
# output CToCpp global file
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating CToCpp global implementation...\n')
|
||||
writect += write_ctocpp_impl(header, None, options.ctocppglobalimpl, \
|
||||
not options.nobackup)
|
||||
|
||||
if not options.cpptocdir is None:
|
||||
# output CppToC class files
|
||||
if not options.quiet:
|
||||
sys.stdout.write('In CppToC directory '+options.cpptocdir+'...\n')
|
||||
|
||||
for cls in classes:
|
||||
if not options.nocpptocheader:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+cls+'CppToC class header...\n')
|
||||
writect += write_cpptoc_header(header, cls, options.cpptocdir,
|
||||
not options.nobackup)
|
||||
if not options.nocpptocimpl:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+cls+'CppToC class implementation...\n')
|
||||
writect += write_cpptoc_impl(header, cls, options.cpptocdir,
|
||||
not options.nobackup)
|
||||
|
||||
if not options.ctocppdir is None:
|
||||
# output CppToC class files
|
||||
if not options.quiet:
|
||||
sys.stdout.write('In CToCpp directory '+options.ctocppdir+'...\n')
|
||||
for cls in classes:
|
||||
if not options.nocpptocheader:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+cls+'CToCpp class header...\n')
|
||||
writect += write_ctocpp_header(header, cls, options.ctocppdir,
|
||||
not options.nobackup)
|
||||
if not options.nocpptocimpl:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+cls+'CToCpp class implementation...\n')
|
||||
writect += write_ctocpp_impl(header, cls, options.ctocppdir,
|
||||
not options.nobackup)
|
||||
|
||||
if not options.gypifile is None:
|
||||
# output the gypi file
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+options.gypifile+' file...\n')
|
||||
writect += write_gypi_file(header, options.gypifile, not options.nobackup)
|
||||
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Done - Wrote '+str(writect)+' files.\n')
|
||||
|
||||
|
2
tools/translator.sh
Executable file
2
tools/translator.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python translator.py --cpp-header-dir ../include --capi-header-dir ../include/capi --cpptoc-global-impl ../libcef_dll/libcef_dll.cc --ctocpp-global-impl ../libcef_dll/wrapper/libcef_dll_wrapper.cc --cpptoc-dir ../libcef_dll/cpptoc --ctocpp-dir ../libcef_dll/ctocpp --gypi-file ../cef_paths.gypi
|
Reference in New Issue
Block a user