mirror of
https://bitbucket.org/chromiumembedded/cef
synced 2025-06-05 21:39:12 +02:00
Branch CEF3 files from /branches/cef3 to /trunk/cef3 (issue #564).
git-svn-id: https://chromiumembedded.googlecode.com/svn/trunk@571 5089003a-bbd8-11dd-ad1f-f1f9622dbc98
This commit is contained in:
362
tools/automate/automate-git.py
Normal file
362
tools/automate/automate-git.py
Normal file
@@ -0,0 +1,362 @@
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from optparse import OptionParser
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
from tempfile import mktemp
|
||||
import os
|
||||
import shlex
|
||||
import shutil
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
# default URL values
|
||||
chromium_url = 'http://git.chromium.org/chromium/src.git'
|
||||
depot_tools_url = 'http://src.chromium.org/svn/trunk/tools/depot_tools'
|
||||
|
||||
def check_url(url):
|
||||
""" Check the URL and raise an exception if invalid. """
|
||||
if ':' in url[:7]:
|
||||
parts = url.split(':', 1)
|
||||
if (parts[0] in ["http", "https", "git"] and \
|
||||
parts[1] == urllib.quote(parts[1])):
|
||||
return url
|
||||
sys.stderr.write('Invalid URL: '+url+"\n")
|
||||
raise Exception('Invalid URL: '+url)
|
||||
|
||||
def get_exec_environ():
|
||||
env = os.environ
|
||||
env['PATH'] = depot_tools_dir + os.pathsep + env['PATH']
|
||||
return env
|
||||
|
||||
def run(args, **kwargs):
|
||||
'''Run a command and capture the output iteratively'''
|
||||
if isinstance(args, str):
|
||||
args = shlex.split(args.replace('\\', '\\\\'))
|
||||
cwd = kwargs.get("cwd", os.getcwd())
|
||||
quiet = kwargs.get("quiet", False)
|
||||
print "-> Running '%s' in %s" % (" ".join(args), os.path.relpath(cwd))
|
||||
cmd = Popen(args, cwd=cwd, stdout=PIPE, stderr=STDOUT,
|
||||
env=kwargs.get("env", get_exec_environ()),
|
||||
shell=(sys.platform == 'win32'))
|
||||
output = ''
|
||||
while True:
|
||||
out = cmd.stdout.read(1)
|
||||
if out == '' and cmd.poll() != None:
|
||||
break
|
||||
output += out
|
||||
if not quiet:
|
||||
sys.stdout.write(out)
|
||||
if cmd.wait() != 0:
|
||||
raise Exception("Command failed: \"%s\"" % " ".join(args), output)
|
||||
return output
|
||||
|
||||
def get_current_branch(path):
|
||||
return run("git rev-parse --abbrev-ref HEAD", cwd=path, quiet=True)
|
||||
|
||||
def get_chromium_compat_rev(cef_url, path, cef_rev):
|
||||
if not os.path.isdir(path):
|
||||
path = mktemp()
|
||||
run("git clone --depth 1 %s %s" % (cef_url, path), quiet = True)
|
||||
if cef_rev == "None":
|
||||
cef_rev = get_git_rev(path, get_current_branch(path))
|
||||
compat_cmd = "git cat-file -p %s:CHROMIUM_BUILD_COMPATIBILITY.txt" % cef_rev
|
||||
compat_value = run(compat_cmd, cwd = path, quiet = True)
|
||||
config = eval(compat_value, {'__builtins__': None}, None)
|
||||
if not 'chromium_revision' in config:
|
||||
raise Exception("Missing chromium_revision value")
|
||||
return str(int(config['chromium_revision']))
|
||||
|
||||
def get_svn_rev(path, branch):
|
||||
svn_rev = "None"
|
||||
cmd = ("git log --grep=^git-svn-id: -n 1 %s" % branch).split()
|
||||
try:
|
||||
process = Popen(cmd, cwd=path, stdout = PIPE, stderr = PIPE)
|
||||
for line in process.stdout:
|
||||
if line.find("git-svn-id") > 0:
|
||||
svn_rev = line.split("@")[1].split()[0]
|
||||
break
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read git log: ' + strerror + "\n")
|
||||
raise
|
||||
return svn_rev
|
||||
|
||||
def get_git_rev_for_svn_rvn(path, svn_rev):
|
||||
git_rev = "None"
|
||||
cmd = ("git log --grep=^git-svn-id:.*@%s --oneline" % svn_rev).split()
|
||||
try:
|
||||
process = Popen(cmd, cwd=path, stdout = PIPE, stderr = PIPE)
|
||||
git_rev = process.communicate()[0].split()[0]
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read git log: ' + strerror + "\n")
|
||||
raise
|
||||
return git_rev
|
||||
|
||||
def get_git_rev(path, branch):
|
||||
git_rev = "None"
|
||||
cmd = ("git describe --always %s" % branch).split()
|
||||
try:
|
||||
process = Popen(cmd, cwd=path, stdout = PIPE, stderr = PIPE)
|
||||
git_rev = process.communicate()[0].strip()
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read git log: ' + strerror + "\n")
|
||||
raise
|
||||
return git_rev
|
||||
|
||||
def get_git_origin(path):
|
||||
git_origin = "None"
|
||||
get_origin_cmd = "git remote show origin -n".split()
|
||||
try:
|
||||
process = Popen(get_origin_cmd, cwd=path, stdout = PIPE, stderr = PIPE)
|
||||
for line in process.stdout:
|
||||
if line.startswith(" Fetch URL: "):
|
||||
git_origin = line.replace(" Fetch URL: ", "").strip()
|
||||
break
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read git log: ' + strerror + "\n")
|
||||
raise
|
||||
return git_origin
|
||||
|
||||
def get_checkout_info(path, fetch_latest = True):
|
||||
""" Retrieves the origin URL, git HEAD revision and last SVN revision """
|
||||
url = 'None'
|
||||
origin_svn_rev = 'None'
|
||||
origin_git_rev = 'None'
|
||||
local_svn_rev = 'None'
|
||||
local_git_rev = 'None'
|
||||
if os.path.isdir(path):
|
||||
if fetch_latest:
|
||||
run("git fetch", cwd = path, quiet = True)
|
||||
url = get_git_origin(path)
|
||||
branch = get_current_branch(path)
|
||||
origin_svn_rev = get_svn_rev(path, "origin/%s" % branch)
|
||||
origin_git_rev = get_git_rev(path, "origin/%s" % branch)
|
||||
local_svn_rev = get_svn_rev(path, branch)
|
||||
local_git_rev = get_git_rev(path, branch)
|
||||
return {
|
||||
'url' : url,
|
||||
'local' : {
|
||||
'svn-revision' : local_svn_rev,
|
||||
'git-revision' : local_git_rev
|
||||
},
|
||||
'origin' : {
|
||||
'svn-revision' : origin_svn_rev,
|
||||
'git-revision' : origin_git_rev
|
||||
}
|
||||
}
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
# parse command-line options
|
||||
desc = """
|
||||
This utility implements automation for the download, update, build and
|
||||
distribution of CEF.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=desc)
|
||||
parser.add_option('--url', dest='url',
|
||||
help='CEF source URL')
|
||||
parser.add_option('--download-dir', dest='downloaddir', metavar='DIR',
|
||||
help='download directory with no spaces [required]')
|
||||
parser.add_option('--revision', dest='revision',
|
||||
help='CEF source revision')
|
||||
parser.add_option('--force-config',
|
||||
action='store_true', dest='forceconfig', default=False,
|
||||
help='force Chromium configuration')
|
||||
parser.add_option('--force-clean',
|
||||
action='store_true', dest='forceclean', default=False,
|
||||
help='force revert of all Chromium changes, deletion of '+\
|
||||
'all unversioned files including the CEF folder and '+\
|
||||
'trigger the force-update, force-build and '+\
|
||||
'force-distrib options')
|
||||
parser.add_option('--force-update',
|
||||
action='store_true', dest='forceupdate', default=False,
|
||||
help='force Chromium and CEF update')
|
||||
parser.add_option('--force-build',
|
||||
action='store_true', dest='forcebuild', default=False,
|
||||
help='force CEF debug and release builds')
|
||||
parser.add_option('--force-distrib',
|
||||
action='store_true', dest='forcedistrib', default=False,
|
||||
help='force creation of CEF binary distribution')
|
||||
parser.add_option('--no-debug-build',
|
||||
action='store_true', dest='nodebugbuild', default=False,
|
||||
help="don't perform the CEF debug build")
|
||||
parser.add_option('--no-release-build',
|
||||
action='store_true', dest='noreleasebuild', default=False,
|
||||
help="don't perform the CEF release build")
|
||||
parser.add_option('--no-distrib',
|
||||
action='store_true', dest='nodistrib', default=False,
|
||||
help="don't create the CEF binary distribution")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the downloaddir and url options are required
|
||||
if options.downloaddir is None:
|
||||
print "ERROR: Download directory is required"
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
if options.url is None:
|
||||
print "ERROR: CEF URL is required"
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
cef_url = check_url(options.url)
|
||||
download_dir = os.path.abspath(options.downloaddir)
|
||||
if not os.path.exists(download_dir):
|
||||
# create the download directory
|
||||
os.makedirs(download_dir)
|
||||
|
||||
# set the expected script extension
|
||||
if sys.platform == 'win32':
|
||||
script_ext = '.bat'
|
||||
else:
|
||||
script_ext = '.sh'
|
||||
|
||||
# check if the "depot_tools" directory exists
|
||||
depot_tools_dir = os.path.join(download_dir, 'depot_tools')
|
||||
if not os.path.exists(depot_tools_dir):
|
||||
# checkout depot_tools
|
||||
run('svn checkout %s %s' % (depot_tools_url, depot_tools_dir),
|
||||
cwd = download_dir, quiet = True)
|
||||
|
||||
chromium_dir = os.path.join(download_dir, 'chromium')
|
||||
if not os.path.exists(chromium_dir):
|
||||
# create the "chromium" directory
|
||||
os.makedirs(chromium_dir)
|
||||
|
||||
chromium_src_dir = os.path.join(chromium_dir, 'src')
|
||||
cef_src_dir = os.path.join(chromium_src_dir, 'cef')
|
||||
cef_tools_dir = os.path.join(cef_src_dir, 'tools')
|
||||
|
||||
# retrieve the current CEF URL and revision
|
||||
info = get_checkout_info(cef_src_dir)
|
||||
cef_rev = info['origin']['git-revision']
|
||||
if not options.revision is None:
|
||||
cef_rev = str(options.revision)
|
||||
current_cef_url = info['url']
|
||||
current_cef_rev = info['local']['git-revision']
|
||||
|
||||
# retrieve the compatible Chromium revision
|
||||
chromium_rev = get_chromium_compat_rev(cef_url, cef_src_dir, cef_rev)
|
||||
|
||||
# retrieve the current Chromium URL and revision
|
||||
info = get_checkout_info(chromium_src_dir, False)
|
||||
current_chromium_url = info['url']
|
||||
current_chromium_rev = info['local']['svn-revision']
|
||||
|
||||
# test if the CEF URL changed
|
||||
cef_url_changed = current_cef_url != cef_url
|
||||
print "-- CEF URL: %s" % current_cef_url
|
||||
if cef_url_changed:
|
||||
print "\t-> CHANGED TO: %s" % cef_url
|
||||
|
||||
# test if the CEF revision changed
|
||||
cef_rev_changed = current_cef_rev != cef_rev
|
||||
print "-- CEF Revision: %s" % current_cef_rev
|
||||
if cef_url_changed:
|
||||
print "\t-> CHANGED TO: %s" % cef_rev
|
||||
|
||||
# test if the Chromium URL changed
|
||||
chromium_url_changed = current_chromium_url != chromium_url
|
||||
print "-- Chromium URL: %s" % current_chromium_url
|
||||
if cef_url_changed:
|
||||
print "\t-> CHANGED TO: %s" % chromium_url
|
||||
|
||||
# test if the Chromium revision changed
|
||||
chromium_rev_changed = current_chromium_rev != chromium_rev
|
||||
print "-- Chromium Revision: %s" % current_chromium_rev
|
||||
if cef_url_changed:
|
||||
print "\t-> CHANGED TO: %s" % chromium_rev
|
||||
|
||||
# true if anything changed
|
||||
any_changed = chromium_url_changed or chromium_rev_changed or \
|
||||
cef_url_changed or cef_rev_changed
|
||||
if not any_changed:
|
||||
print "*** NO CHANGE ***"
|
||||
|
||||
if chromium_url_changed or options.forceconfig:
|
||||
# run gclient config to create the .gclient file
|
||||
run('gclient config %s --git-deps' % chromium_url, cwd = chromium_dir)
|
||||
|
||||
path = os.path.join(chromium_dir, '.gclient')
|
||||
if not os.path.exists(path):
|
||||
raise Exception('.gclient file was not created')
|
||||
|
||||
# read the resulting .gclient file
|
||||
fp = open(path, 'r')
|
||||
data = fp.read()
|
||||
fp.close()
|
||||
|
||||
# populate "custom_deps" section
|
||||
data = data.replace('"custom_deps" : {', '"custom_deps" : {'+\
|
||||
"\n "+'"src/third_party/WebKit/LayoutTests": None,'+\
|
||||
"\n "+'"src/chrome_frame/tools/test/reference_build/chrome": None,'+\
|
||||
"\n "+'"src/chrome/tools/test/reference_build/chrome_mac": None,'+\
|
||||
"\n "+'"src/chrome/tools/test/reference_build/chrome_win": None,'+\
|
||||
"\n "+'"src/chrome/tools/test/reference_build/chrome_linux": None,')
|
||||
|
||||
# write the new .gclient file
|
||||
fp = open(path, 'w')
|
||||
fp.write(data)
|
||||
fp.close()
|
||||
|
||||
if options.forceclean:
|
||||
if os.path.exists(chromium_src_dir):
|
||||
# revert all Chromium changes and delete all unversioned files
|
||||
run('gclient revert -n', cwd = chromium_dir)
|
||||
|
||||
# force update, build and distrib steps
|
||||
options.forceupdate = True
|
||||
options.forcebuild = True
|
||||
options.forcedistrib = True
|
||||
|
||||
if chromium_url_changed or chromium_rev_changed or options.forceupdate:
|
||||
# download/update the Chromium source cod
|
||||
fetch_rev = "HEAD"
|
||||
if os.path.isdir(chromium_src_dir):
|
||||
fetch_rev = get_git_rev_for_svn_rvn(
|
||||
chromium_src_dir, current_chromium_rev)
|
||||
run('gclient sync --jobs 8 -n --force --revision=src@%s' % fetch_rev,
|
||||
cwd = chromium_dir)
|
||||
checkout_rev = get_git_rev_for_svn_rvn(chromium_src_dir, chromium_rev)
|
||||
run('gclient sync --jobs 8 --revision=src@%s' % checkout_rev,
|
||||
cwd = chromium_dir)
|
||||
|
||||
if not os.path.exists(cef_src_dir) or cef_url_changed:
|
||||
if cef_url_changed and os.path.exists(cef_src_dir):
|
||||
# delete the cef directory (it will be re-downloaded)
|
||||
shutil.rmtree(cef_src_dir)
|
||||
# download the CEF source code
|
||||
run("git clone %s %s" % (cef_url, cef_src_dir))
|
||||
elif cef_rev_changed or options.forceupdate:
|
||||
# update the CEF source code
|
||||
stashed = run("git stash", cwd = cef_src_dir).find(
|
||||
"No local changes to save") < 0
|
||||
ref = cef_rev
|
||||
if ref == "None":
|
||||
ref = "origin/%s" % get_current_branch(cef_src_dir)
|
||||
run("git fetch origin", cwd = cef_src_dir)
|
||||
run("git reset --hard %s" % ref, cwd = cef_src_dir)
|
||||
if stashed:
|
||||
run("git stash pop", cwd = cef_src_dir)
|
||||
|
||||
if any_changed or options.forceupdate:
|
||||
# create CEF projects
|
||||
path = os.path.join(cef_src_dir, 'cef_create_projects' + script_ext)
|
||||
run(path, cwd = cef_src_dir, quiet = True)
|
||||
|
||||
if any_changed or options.forcebuild:
|
||||
path = os.path.join(cef_tools_dir, 'build_projects' + script_ext)
|
||||
if not options.nodebugbuild:
|
||||
run(path +' Debug', cwd = cef_tools_dir)
|
||||
if not options.noreleasebuild:
|
||||
run(path +' Release', cwd = cef_tools_dir)
|
||||
|
||||
if any_changed or options.forcedistrib:
|
||||
if not options.nodistrib:
|
||||
# make CEF binary distribution
|
||||
path = os.path.join(cef_tools_dir, 'make_distrib' + script_ext)
|
||||
run(path, cwd = cef_tools_dir)
|
99
tools/automate/automate.README.txt
Normal file
99
tools/automate/automate.README.txt
Normal file
@@ -0,0 +1,99 @@
|
||||
Chromium Embedded Framework (CEF) Automation Tool
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
Document Last Updated: October 10, 2011
|
||||
|
||||
|
||||
OVERVIEW
|
||||
--------
|
||||
|
||||
The CEF Automation Tool can perform the following actions in an automated manner:
|
||||
|
||||
1. Download/update the Chromium and CEF source code.
|
||||
2. Build the CEF Debug and Release targets.
|
||||
3. Create the CEF binary distribution package.
|
||||
|
||||
|
||||
SETUP
|
||||
-----
|
||||
|
||||
1. Install Subversion [1] and Python [2]. Make sure the bin directories for both
|
||||
programs are discoverable via your system PATH configuration. On Windows
|
||||
install CppDoc [3] in the default location (assumes a 64bit version of
|
||||
Windows 7).
|
||||
|
||||
2. Install build tools. On Windows install Visual Studio 2008 or newer and all
|
||||
required prerequisite software [4]. On Mac install Xcode 3.2 or newer.
|
||||
|
||||
3. Configure the GYP environment. On Windows set the GYP_MSVS_VERSION
|
||||
environment variable to "2008" or "2010" depending on which version of Visual
|
||||
Studio you're building with. On Mac Lion set the GYP_DEFINES environment
|
||||
variable to 'mac_sdk=10.6'.
|
||||
|
||||
4. Checkout the "automate" folder to a location on your hard drive. For the
|
||||
trunk version of CEF you can use the following command:
|
||||
|
||||
svn checkout http://chromiumembedded.googlecode.com/svn/trunk/cef3/tools/automate /path/to/automate
|
||||
|
||||
5. Run the automate.py script at whatever interval is appropriate (for each
|
||||
CEF commit, once per day, once per week, etc):
|
||||
|
||||
python /path/to/automate/automate.py --download-dir=/path/to/download
|
||||
|
||||
|
||||
HOW IT WORKS
|
||||
------------
|
||||
|
||||
The automate.py script performs the following actions in the download directory
|
||||
specified by the "--download-dir" flag. This path value must contain no spaces.
|
||||
|
||||
1. Retrieve the Chromium URL and revision associated with a CEF URL and
|
||||
revision. A specific CEF URL and revision can be specified using the "--url"
|
||||
and "--revision" flags. Otherwise, the current CEF trunk URL [7] and HEAD
|
||||
revision will be used. The Chromium URL and revision are retrieved by
|
||||
querying the SVN repository for a CHROMIUM_BUILD_COMPATIBILITY.txt file.
|
||||
|
||||
2. If a "depot_tools" folder does not already exist download depot_tools [5].
|
||||
The depot_tools folder will be added to the beginning of the PATH to support
|
||||
execution of the below steps.
|
||||
|
||||
3. If a "chromium" folder does not already exist create it and configure
|
||||
gclient [6] using the Chromium URL retrieved in step 1. To force execution of
|
||||
this step use the "--force-config" flag.
|
||||
|
||||
4. If the "--force-clean" flag is specified all Chromium changes will be
|
||||
reverted, all unversioned files including the CEF folder will be deleted and
|
||||
steps 6-10 will be triggered automatically.
|
||||
|
||||
5. If both Chromium and CEF are already at the correct URL and revision the
|
||||
script will exit at this point. To force execution of steps 6-8 use the
|
||||
"--force-update" flag. To force execution of step 9 use the "--force-build"
|
||||
flag. To force execution of step 10 use the "--force-distrib" flag.
|
||||
|
||||
6. Use gclient [6] to update the Chromium source code to the correct revision.
|
||||
|
||||
7. If a "chromium/src/cef" folder does not already exist download the CEF source
|
||||
code at the correct revision. Otherwise, update CEF source code to the
|
||||
correct revision.
|
||||
|
||||
8. Run the "chromium/src/cef/cef_create_projects.[sh|bat]" script to apply CEF
|
||||
patches and generate CEF project files.
|
||||
|
||||
9. Run the "chromium/src/cef/tools/build_projects.[sh|bat]" script twice; once
|
||||
to build the Debug target and once to build the Release target.
|
||||
|
||||
10.Run the "chromium/src/cef/tools/make_distrib.[sh|bat]" script to build the
|
||||
binary distribution package. The resulting package will be output in a
|
||||
"chromium/src/cef/binary_distrib/cef_binary_rXXX_[windows|macosx]" folder.
|
||||
|
||||
|
||||
REFERENCES
|
||||
----------
|
||||
|
||||
[1] http://subversion.apache.org/
|
||||
[2] http://www.python.org/
|
||||
[3] http://www.cppdoc.com/
|
||||
[4] http://dev.chromium.org/developers/how-tos/build-instructions-windows
|
||||
[5] http://dev.chromium.org/developers/how-tos/depottools
|
||||
[6] http://dev.chromium.org/developers/how-tos/get-the-code
|
||||
[7] http://chromiumembedded.googlecode.com/svn/trunk/cef3
|
347
tools/automate/automate.py
Normal file
347
tools/automate/automate.py
Normal file
@@ -0,0 +1,347 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
# default URL values
|
||||
cef_url = 'http://chromiumembedded.googlecode.com/svn/trunk/cef3'
|
||||
depot_tools_url = 'http://src.chromium.org/svn/trunk/tools/depot_tools'
|
||||
|
||||
def run(command_line, working_dir, depot_tools_dir=None):
|
||||
# add depot_tools to the path
|
||||
env = os.environ
|
||||
if not depot_tools_dir is None:
|
||||
env['PATH'] = depot_tools_dir+os.pathsep+env['PATH']
|
||||
|
||||
sys.stdout.write('-------- Running "'+command_line+'" in "'+\
|
||||
working_dir+'"...'+"\n")
|
||||
args = shlex.split(command_line.replace('\\', '\\\\'))
|
||||
return subprocess.check_call(args, cwd=working_dir, env=env,
|
||||
shell=(sys.platform == 'win32'))
|
||||
|
||||
def check_url(url):
|
||||
""" Check the URL and raise an exception if invalid. """
|
||||
if ':' in url[:7]:
|
||||
parts = url.split(':', 1)
|
||||
if (parts[0] == 'http' or parts[0] == 'https') and \
|
||||
parts[1] == urllib.quote(parts[1]):
|
||||
return url
|
||||
sys.stderr.write('Invalid URL: '+url+"\n")
|
||||
raise Exception('Invalid URL: '+url)
|
||||
|
||||
def get_svn_info(path):
|
||||
""" Retrieves the URL and revision from svn info. """
|
||||
url = 'None'
|
||||
rev = 'None'
|
||||
if path[0:4] == 'http' or os.path.exists(path):
|
||||
try:
|
||||
stream = os.popen('svn info '+path)
|
||||
for line in stream:
|
||||
if line[0:4] == "URL:":
|
||||
url = check_url(line[5:-1])
|
||||
elif line[0:9] == "Revision:":
|
||||
rev = str(int(line[10:-1]))
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read svn info: '+strerror+"\n")
|
||||
raise
|
||||
return {'url': url, 'revision': rev}
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility implements automation for the download, update, build and
|
||||
distribution of CEF.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--download-dir', dest='downloaddir', metavar='DIR',
|
||||
help='download directory with no spaces [required]')
|
||||
parser.add_option('--revision', dest='revision', type="int",
|
||||
help='CEF source revision')
|
||||
parser.add_option('--url', dest='url',
|
||||
help='CEF source URL')
|
||||
parser.add_option('--force-config',
|
||||
action='store_true', dest='forceconfig', default=False,
|
||||
help='force Chromium configuration')
|
||||
parser.add_option('--force-clean',
|
||||
action='store_true', dest='forceclean', default=False,
|
||||
help='force revert of all Chromium changes, deletion of '+\
|
||||
'all unversioned files including the CEF folder and '+\
|
||||
'trigger the force-update, force-build and '+\
|
||||
'force-distrib options')
|
||||
parser.add_option('--force-update',
|
||||
action='store_true', dest='forceupdate', default=False,
|
||||
help='force Chromium and CEF update')
|
||||
parser.add_option('--force-build',
|
||||
action='store_true', dest='forcebuild', default=False,
|
||||
help='force CEF debug and release builds')
|
||||
parser.add_option('--force-distrib',
|
||||
action='store_true', dest='forcedistrib', default=False,
|
||||
help='force creation of CEF binary distribution')
|
||||
parser.add_option('--no-debug-build',
|
||||
action='store_true', dest='nodebugbuild', default=False,
|
||||
help="don't perform the CEF debug build")
|
||||
parser.add_option('--no-release-build',
|
||||
action='store_true', dest='noreleasebuild', default=False,
|
||||
help="don't perform the CEF release build")
|
||||
parser.add_option('--no-distrib',
|
||||
action='store_true', dest='nodistrib', default=False,
|
||||
help="don't create the CEF binary distribution")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the downloaddir option is required
|
||||
if options.downloaddir is None:
|
||||
parser.print_help(sys.stderr)
|
||||
sys.exit()
|
||||
|
||||
# script directory
|
||||
script_dir = os.path.dirname(__file__)
|
||||
|
||||
if not options.url is None:
|
||||
# set the CEF URL
|
||||
cef_url = check_url(options.url)
|
||||
|
||||
if not options.revision is None:
|
||||
# set the CEF revision
|
||||
cef_rev = str(options.revision)
|
||||
else:
|
||||
# retrieve the CEF revision from the remote repo
|
||||
info = get_svn_info(cef_url)
|
||||
cef_rev = info['revision']
|
||||
if cef_rev == 'None':
|
||||
sys.stderr.write('No SVN info for: '+cef_url+"\n")
|
||||
raise Exception('No SVN info for: '+cef_url)
|
||||
|
||||
# Retrieve the Chromium URL and revision from the CEF repo
|
||||
compat_url = cef_url + "/CHROMIUM_BUILD_COMPATIBILITY.txt?r="+cef_rev
|
||||
|
||||
release_url = None
|
||||
chromium_url = None
|
||||
chromium_rev = None
|
||||
|
||||
try:
|
||||
# Read the remote URL contents
|
||||
handle = urllib.urlopen(compat_url)
|
||||
compat_value = handle.read().strip()
|
||||
handle.close()
|
||||
|
||||
# Parse the contents
|
||||
config = eval(compat_value, {'__builtins__': None}, None)
|
||||
|
||||
if 'release_url' in config:
|
||||
# building from a release
|
||||
release_url = check_url(config['release_url'])
|
||||
else:
|
||||
# building from chromium src
|
||||
if not 'chromium_url' in config:
|
||||
raise Exception("Missing chromium_url value")
|
||||
if not 'chromium_revision' in config:
|
||||
raise Exception("Missing chromium_revision value")
|
||||
|
||||
chromium_url = check_url(config['chromium_url'])
|
||||
chromium_rev = str(int(config['chromium_revision']))
|
||||
except Exception, e:
|
||||
sys.stderr.write('Failed to read URL and revision information from '+ \
|
||||
compat_url+"\n")
|
||||
raise
|
||||
|
||||
download_dir = options.downloaddir
|
||||
if not os.path.exists(download_dir):
|
||||
# create the download directory
|
||||
os.makedirs(download_dir)
|
||||
|
||||
# set the expected script extension
|
||||
if sys.platform == 'win32':
|
||||
script_ext = '.bat'
|
||||
else:
|
||||
script_ext = '.sh'
|
||||
|
||||
# check if the "depot_tools" directory exists
|
||||
depot_tools_dir = os.path.join(download_dir, 'depot_tools')
|
||||
if not os.path.exists(depot_tools_dir):
|
||||
# checkout depot_tools
|
||||
run('svn checkout '+depot_tools_url+' '+depot_tools_dir, download_dir)
|
||||
|
||||
# check if the "chromium" directory exists
|
||||
chromium_dir = os.path.join(download_dir, 'chromium')
|
||||
if not os.path.exists(chromium_dir):
|
||||
# create the "chromium" directory
|
||||
os.makedirs(chromium_dir)
|
||||
|
||||
chromium_src_dir = os.path.join(chromium_dir, 'src')
|
||||
cef_src_dir = os.path.join(chromium_src_dir, 'cef')
|
||||
cef_tools_dir = os.path.join(cef_src_dir, 'tools')
|
||||
|
||||
# retrieve the current CEF URL and revision
|
||||
info = get_svn_info(cef_src_dir)
|
||||
current_cef_url = info['url']
|
||||
current_cef_rev = info['revision']
|
||||
|
||||
if release_url is None:
|
||||
# retrieve the current Chromium URL and revision
|
||||
info = get_svn_info(chromium_src_dir)
|
||||
current_chromium_url = info['url']
|
||||
current_chromium_rev = info['revision']
|
||||
|
||||
# test if the CEF URL changed
|
||||
cef_url_changed = current_cef_url != cef_url
|
||||
sys.stdout.write('CEF URL: '+current_cef_url+"\n")
|
||||
if cef_url_changed:
|
||||
sys.stdout.write(' -> CHANGED TO: '+cef_url+"\n")
|
||||
|
||||
# test if the CEF revision changed
|
||||
cef_rev_changed = current_cef_rev != cef_rev
|
||||
sys.stdout.write('CEF Revision: '+current_cef_rev+"\n")
|
||||
if cef_rev_changed:
|
||||
sys.stdout.write(' -> CHANGED TO: '+cef_rev+"\n")
|
||||
|
||||
release_url_changed = False
|
||||
chromium_url_changed = False
|
||||
chromium_rev_changed = False
|
||||
|
||||
if release_url is None:
|
||||
# test if the Chromium URL changed
|
||||
chromium_url_changed = current_chromium_url != chromium_url
|
||||
sys.stdout.write('Chromium URL: '+current_chromium_url+"\n")
|
||||
if chromium_url_changed:
|
||||
sys.stdout.write(' -> CHANGED TO: '+chromium_url+"\n")
|
||||
|
||||
# test if the Chromium revision changed
|
||||
chromium_rev_changed = current_chromium_rev != chromium_rev
|
||||
sys.stdout.write('Chromium Revision: '+current_chromium_rev+"\n")
|
||||
if chromium_rev_changed:
|
||||
sys.stdout.write(' -> CHANGED TO: '+chromium_rev+"\n")
|
||||
else:
|
||||
# test if the release URL changed
|
||||
current_release_url = 'None'
|
||||
|
||||
path = os.path.join(chromium_dir, '.gclient')
|
||||
if os.path.exists(path):
|
||||
# read the .gclient file
|
||||
fp = open(path, 'r')
|
||||
data = fp.read()
|
||||
fp.close()
|
||||
|
||||
# Parse the contents
|
||||
config_dict = {}
|
||||
try:
|
||||
exec(data, config_dict)
|
||||
current_release_url = config_dict['solutions'][0]['url']
|
||||
except Exception, e:
|
||||
sys.stderr.write('Failed to parse existing .glient file.\n')
|
||||
raise
|
||||
|
||||
release_url_changed = current_release_url != release_url
|
||||
sys.stdout.write('Release URL: '+current_release_url+"\n")
|
||||
if release_url_changed:
|
||||
sys.stdout.write(' -> CHANGED TO: '+release_url+"\n")
|
||||
|
||||
# true if anything changed
|
||||
any_changed = release_url_changed or chromium_url_changed or \
|
||||
chromium_rev_changed or cef_url_changed or cef_rev_changed
|
||||
if not any_changed:
|
||||
sys.stdout.write("No changes.\n")
|
||||
|
||||
if release_url_changed or chromium_url_changed or options.forceconfig:
|
||||
if release_url is None:
|
||||
url = chromium_url
|
||||
else:
|
||||
url = release_url
|
||||
|
||||
# run gclient config to create the .gclient file
|
||||
run('gclient config '+url, chromium_dir, depot_tools_dir)
|
||||
|
||||
path = os.path.join(chromium_dir, '.gclient')
|
||||
if not os.path.exists(path):
|
||||
sys.stderr.write(".gclient file was not created\n")
|
||||
raise Exception('.gclient file was not created')
|
||||
|
||||
# read the resulting .gclient file
|
||||
fp = open(path, 'r')
|
||||
data = fp.read()
|
||||
fp.close()
|
||||
|
||||
custom_deps = \
|
||||
"\n "+'"src/third_party/WebKit/LayoutTests": None,'+\
|
||||
"\n "+'"src/chrome_frame/tools/test/reference_build/chrome": None,'+\
|
||||
"\n "+'"src/chrome/tools/test/reference_build/chrome_mac": None,'+\
|
||||
"\n "+'"src/chrome/tools/test/reference_build/chrome_win": None,'+\
|
||||
"\n "+'"src/chrome/tools/test/reference_build/chrome_linux": None,'
|
||||
|
||||
if not release_url is None:
|
||||
# TODO: Read the DEPS file and exclude all non-src directories.
|
||||
custom_deps += \
|
||||
"\n "+'"chromeos": None,'+\
|
||||
"\n "+'"depot_tools": None,'
|
||||
|
||||
# populate "custom_deps" section
|
||||
data = data.replace('"custom_deps" : {', '"custom_deps" : {'+custom_deps)
|
||||
|
||||
# write the new .gclient file
|
||||
fp = open(path, 'w')
|
||||
fp.write(data)
|
||||
fp.close()
|
||||
|
||||
if options.forceclean:
|
||||
if os.path.exists(chromium_src_dir):
|
||||
# revert all Chromium changes and delete all unversioned files
|
||||
run('gclient revert -n', chromium_dir, depot_tools_dir)
|
||||
|
||||
# force update, build and distrib steps
|
||||
options.forceupdate = True
|
||||
options.forcebuild = True
|
||||
options.forcedistrib = True
|
||||
|
||||
if release_url is None:
|
||||
if chromium_url_changed or chromium_rev_changed or options.forceupdate:
|
||||
# download/update the Chromium source code
|
||||
run('gclient sync --revision src@'+chromium_rev+' --jobs 8 --force', \
|
||||
chromium_dir, depot_tools_dir)
|
||||
elif release_url_changed or options.forceupdate:
|
||||
# download/update the release source code
|
||||
run('gclient sync --jobs 8 --force', chromium_dir, depot_tools_dir)
|
||||
|
||||
if not os.path.exists(cef_src_dir) or cef_url_changed:
|
||||
if cef_url_changed and os.path.exists(cef_src_dir):
|
||||
# delete the cef directory (it will be re-downloaded)
|
||||
shutil.rmtree(cef_src_dir)
|
||||
|
||||
# download the CEF source code
|
||||
run('svn checkout '+cef_url+' -r '+cef_rev+' '+cef_src_dir, download_dir)
|
||||
elif cef_rev_changed or options.forceupdate:
|
||||
# update the CEF source code
|
||||
run('svn update -r '+cef_rev+' '+cef_src_dir, download_dir)
|
||||
|
||||
if any_changed or options.forceupdate:
|
||||
# create CEF projects
|
||||
path = os.path.join(cef_src_dir, 'cef_create_projects'+script_ext)
|
||||
run(path, cef_src_dir, depot_tools_dir)
|
||||
|
||||
if any_changed or options.forcebuild:
|
||||
path = os.path.join(cef_tools_dir, 'build_projects'+script_ext)
|
||||
|
||||
if not options.nodebugbuild:
|
||||
# make CEF Debug build
|
||||
run(path+' Debug', cef_tools_dir, depot_tools_dir)
|
||||
|
||||
if not options.noreleasebuild:
|
||||
# make CEF Release build
|
||||
run(path+' Release', cef_tools_dir, depot_tools_dir)
|
||||
|
||||
if any_changed or options.forcedistrib:
|
||||
if not options.nodistrib:
|
||||
# make CEF binary distribution
|
||||
path = os.path.join(cef_tools_dir, 'make_distrib'+script_ext)
|
||||
run(path, cef_tools_dir, depot_tools_dir)
|
45
tools/build_projects.bat
Normal file
45
tools/build_projects.bat
Normal file
@@ -0,0 +1,45 @@
|
||||
@echo off
|
||||
set RC=
|
||||
setlocal
|
||||
|
||||
if "%1" == "" (
|
||||
echo ERROR: Please specify a build target: Debug or Release
|
||||
set ERRORLEVEL=1
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%2" == "" (
|
||||
set PROJECT_NAME=cefclient
|
||||
) else (
|
||||
set PROJECT_NAME=%2
|
||||
)
|
||||
|
||||
echo Configuring Visual Studio environment...
|
||||
if "%GYP_MSVS_VERSION%" == "2008" (
|
||||
call "%VS90COMNTOOLS%vsvars32.bat"
|
||||
set PROJECT_EXT=.vcproj
|
||||
) else (
|
||||
call "%VS100COMNTOOLS%vsvars32.bat"
|
||||
set PROJECT_EXT=.vcxproj
|
||||
)
|
||||
|
||||
if exist "%DevEnvDir%\devenv.com" (
|
||||
echo Building %1 target for %PROJECT_NAME% project...
|
||||
"%DevEnvDir%\devenv.com" /build %1 ..\cef.sln /project %PROJECT_NAME%%PROJECT_EXT%
|
||||
) else if exist "%VCINSTALLDIR%\vcpackages\vcbuild.exe" (
|
||||
echo Building %1 target for all projects...
|
||||
"%VCINSTALLDIR%\vcpackages\vcbuild.exe" ..\cef.sln "%1|Win32"
|
||||
) else (
|
||||
echo ERROR: Cannot find Visual Studio builder
|
||||
set ERRORLEVEL=1
|
||||
)
|
||||
|
||||
:end
|
||||
endlocal & set RC=%ERRORLEVEL%
|
||||
goto omega
|
||||
|
||||
:returncode
|
||||
exit /B %RC%
|
||||
|
||||
:omega
|
||||
call :returncode %RC%
|
18
tools/build_projects.sh
Executable file
18
tools/build_projects.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "ERROR: Please specify a build target: Debug or Release"
|
||||
else
|
||||
if [ -z "$2" ]; then
|
||||
PROJECT_NAME='cefclient'
|
||||
else
|
||||
PROJECT_NAME=$2
|
||||
fi
|
||||
if [ `uname` = "Linux" ]; then
|
||||
pushd ../../
|
||||
make BUILDTYPE=$1 -j 16
|
||||
popd
|
||||
else
|
||||
xcodebuild -project ../cef.xcodeproj -configuration $1 -target "$PROJECT_NAME"
|
||||
fi
|
||||
fi
|
1849
tools/cef_parser.py
Normal file
1849
tools/cef_parser.py
Normal file
File diff suppressed because it is too large
Load Diff
87
tools/check_revision.py
Normal file
87
tools/check_revision.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from file_util import *
|
||||
from optparse import OptionParser
|
||||
from svn_util import *
|
||||
import sys
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility checks that the correct Chromium revision is being used.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('-q', '--quiet',
|
||||
action='store_true', dest='quiet', default=False,
|
||||
help='do not output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# The CEF root directory is the parent directory of _this_ script.
|
||||
cef_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||
|
||||
# Retrieve the CEF SVN info.
|
||||
cef_info = get_svn_info(cef_dir)
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Using CEF revision '+cef_info['revision']+' @ '+\
|
||||
cef_info['url']+"\n")
|
||||
|
||||
# Retrieve the Chromium SVN info.
|
||||
src_dir = os.path.join(cef_dir, os.pardir)
|
||||
chromium_info = get_svn_info(src_dir)
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Using Chromium revision '+chromium_info['revision']+' @ '+\
|
||||
chromium_info['url']+"\n")
|
||||
|
||||
# Parse the compatibility file contents.
|
||||
compat_file = os.path.join(cef_dir, 'CHROMIUM_BUILD_COMPATIBILITY.txt')
|
||||
config = eval(read_file(compat_file), {'__builtins__': None}, None)
|
||||
|
||||
error = False
|
||||
|
||||
if 'release_url' in config:
|
||||
current_release_url = None
|
||||
path = os.path.join(os.path.join(src_dir, os.pardir), '.gclient')
|
||||
if os.path.exists(path):
|
||||
# read the .gclient file
|
||||
fp = open(path, 'r')
|
||||
data = fp.read()
|
||||
fp.close()
|
||||
|
||||
# Parse the contents
|
||||
config_dict = {}
|
||||
try:
|
||||
exec(data, config_dict)
|
||||
current_release_url = config_dict['solutions'][0]['url']
|
||||
except Exception, e:
|
||||
sys.stderr.write('Failed to parse existing .glient file.\n')
|
||||
raise
|
||||
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Using Chromium release '+current_release_url+"\n")
|
||||
|
||||
if current_release_url != config['release_url']:
|
||||
error = True
|
||||
sys.stderr.write("\nWARNING: Incorrect Chromium release URL; found "+\
|
||||
current_release_url+', expected '+config['release_url']+"\n")
|
||||
else:
|
||||
if chromium_info['url'] != config['chromium_url']:
|
||||
error = True
|
||||
sys.stderr.write("\nWARNING: Incorrect Chromium URL; found "+\
|
||||
chromium_info['url']+', expected '+config['chromium_url']+"\n")
|
||||
|
||||
if chromium_info['revision'] != config['chromium_revision']:
|
||||
error = True
|
||||
sys.stderr.write("\nWARNING: Incorrect Chromium revision; found "+\
|
||||
chromium_info['revision']+', expected '+config['chromium_revision']+"\n")
|
||||
|
||||
if error:
|
||||
sys.stderr.write("\nPlease see CHROMIUM_BUILD_COMPATIBILITY.txt for "\
|
||||
"instructions.\n")
|
2
tools/check_style.bat
Normal file
2
tools/check_style.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
..\..\third_party\python_26\python.exe check_style.py %*
|
129
tools/check_style.py
Normal file
129
tools/check_style.py
Normal file
@@ -0,0 +1,129 @@
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors.
|
||||
# Portions copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os, re, string, sys
|
||||
from file_util import *
|
||||
import git_util as git
|
||||
import svn_util as svn
|
||||
|
||||
# script directory
|
||||
script_dir = os.path.dirname(__file__)
|
||||
|
||||
# CEF root directory
|
||||
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
|
||||
|
||||
# Valid extensions for files we want to lint.
|
||||
DEFAULT_LINT_WHITELIST_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)"
|
||||
DEFAULT_LINT_BLACKLIST_REGEX = r"$^"
|
||||
|
||||
try:
|
||||
# depot_tools may already be in the import path.
|
||||
import cpplint
|
||||
import cpplint_chromium
|
||||
except ImportError, e:
|
||||
# Search the PATH environment variable to find the depot_tools folder.
|
||||
depot_tools = None;
|
||||
paths = os.environ.get('PATH').split(os.pathsep)
|
||||
for path in paths:
|
||||
if os.path.exists(os.path.join(path, 'cpplint_chromium.py')):
|
||||
depot_tools = path
|
||||
break
|
||||
|
||||
if depot_tools is None:
|
||||
print >> sys.stderr, 'Error: could not find depot_tools in PATH.'
|
||||
sys.exit(2)
|
||||
|
||||
# Add depot_tools to import path.
|
||||
sys.path.append(depot_tools)
|
||||
import cpplint
|
||||
import cpplint_chromium
|
||||
|
||||
# The default implementation of FileInfo.RepositoryName looks for the top-most
|
||||
# directory that contains a .git or .svn folder. This is a problem for CEF
|
||||
# because the CEF root folder (which may have an arbitrary name) lives inside
|
||||
# the Chromium src folder. Reimplement in a dumb but sane way.
|
||||
def patch_RepositoryName(self):
|
||||
fullname = self.FullName()
|
||||
project_dir = os.path.dirname(fullname)
|
||||
if os.path.exists(fullname):
|
||||
root_dir = project_dir
|
||||
while os.path.basename(project_dir) != "src":
|
||||
project_dir = os.path.dirname(project_dir)
|
||||
prefix = os.path.commonprefix([root_dir, project_dir])
|
||||
components = fullname[len(prefix) + 1:].split('/')
|
||||
return string.join(["cef"] + components[1:], '/')
|
||||
return fullname
|
||||
|
||||
def check_style(args, white_list = None, black_list = None):
|
||||
""" Execute cpplint with the specified arguments. """
|
||||
|
||||
# Apply patches.
|
||||
cpplint.FileInfo.RepositoryName = patch_RepositoryName
|
||||
|
||||
# Process cpplint arguments.
|
||||
filenames = cpplint.ParseArguments(args)
|
||||
|
||||
if not white_list:
|
||||
white_list = DEFAULT_LINT_WHITELIST_REGEX
|
||||
white_regex = re.compile(white_list)
|
||||
if not black_list:
|
||||
black_list = DEFAULT_LINT_BLACKLIST_REGEX
|
||||
black_regex = re.compile(black_list)
|
||||
|
||||
extra_check_functions = [cpplint_chromium.CheckPointerDeclarationWhitespace]
|
||||
|
||||
for filename in filenames:
|
||||
if white_regex.match(filename):
|
||||
if black_regex.match(filename):
|
||||
print "Ignoring file %s" % filename
|
||||
else:
|
||||
cpplint.ProcessFile(filename, cpplint._cpplint_state.verbose_level,
|
||||
extra_check_functions)
|
||||
else:
|
||||
print "Skipping file %s" % filename
|
||||
|
||||
print "Total errors found: %d\n" % cpplint._cpplint_state.error_count
|
||||
return 1
|
||||
|
||||
def get_changed_files():
|
||||
""" Retrieve the list of changed files. """
|
||||
try:
|
||||
return svn.get_changed_files(cef_dir)
|
||||
except:
|
||||
return git.get_changed_files(cef_dir)
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Start with the default parameters.
|
||||
args = [
|
||||
# * Disable the 'build/class' test because it errors uselessly with C
|
||||
# structure pointers and template declarations.
|
||||
# * Disable the 'runtime/references' test because CEF allows non-const
|
||||
# arguments passed by reference.
|
||||
# * Disable the 'runtime/sizeof' test because it has a high number of
|
||||
# false positives and adds marginal value.
|
||||
'--filter=-build/class,-runtime/references,-runtime/sizeof',
|
||||
]
|
||||
|
||||
# Add anything passed on the command-line.
|
||||
args += sys.argv[1:]
|
||||
|
||||
# Pre-process the arguments before passing to the linter.
|
||||
new_args = []
|
||||
changed = []
|
||||
for arg in args:
|
||||
if arg == '--changed':
|
||||
# Add any changed files.
|
||||
changed = get_changed_files()
|
||||
elif arg[:2] == '--' or not os.path.isdir(arg):
|
||||
# Pass argument unchanged.
|
||||
new_args.append(arg)
|
||||
else:
|
||||
# Add all files in the directory.
|
||||
new_args += get_files(os.path.join(arg, '*'))
|
||||
|
||||
if len(changed) > 0:
|
||||
new_args += changed
|
||||
|
||||
check_style(new_args)
|
2
tools/check_style.sh
Executable file
2
tools/check_style.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python check_style.py $@
|
13
tools/date_util.py
Normal file
13
tools/date_util.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import datetime
|
||||
|
||||
def get_year():
|
||||
""" Returns the current year. """
|
||||
return str(datetime.datetime.now().year)
|
||||
|
||||
def get_date():
|
||||
""" Returns the current date. """
|
||||
return datetime.datetime.now().strftime('%B %d, %Y')
|
5
tools/distrib/README-TRANSFER.txt
Normal file
5
tools/distrib/README-TRANSFER.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
Files in this directory have been copied from other locations in the Chromium
|
||||
source tree. They have been modified only to the extent necessary to work in
|
||||
the CEF Binary Distribution directory structure. Below is a listing of the
|
||||
original file locations.
|
||||
|
244
tools/distrib/cefclient.gyp
Normal file
244
tools/distrib/cefclient.gyp
Normal file
@@ -0,0 +1,244 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'chromium_code': 1,
|
||||
'conditions': [
|
||||
[ 'OS=="mac"', {
|
||||
# Don't use clang with CEF binary releases due to Chromium tree structure dependency.
|
||||
'clang': 0,
|
||||
}]
|
||||
]
|
||||
},
|
||||
'includes': [
|
||||
# Bring in the source file lists for cefclient.
|
||||
'cef_paths2.gypi',
|
||||
],
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'cefclient',
|
||||
'type': 'executable',
|
||||
'mac_bundle': 1,
|
||||
'msvs_guid': '6617FED9-C5D4-4907-BF55-A90062A6683F',
|
||||
'dependencies': [
|
||||
'libcef_dll_wrapper',
|
||||
],
|
||||
'defines': [
|
||||
'USING_CEF_SHARED',
|
||||
],
|
||||
'include_dirs': [
|
||||
'.',
|
||||
],
|
||||
'sources': [
|
||||
'<@(includes_common)',
|
||||
'<@(includes_wrapper)',
|
||||
'<@(cefclient_sources_common)',
|
||||
],
|
||||
'mac_bundle_resources': [
|
||||
'<@(cefclient_bundle_resources_mac)',
|
||||
],
|
||||
'mac_bundle_resources!': [
|
||||
# TODO(mark): Come up with a fancier way to do this (mac_info_plist?)
|
||||
# that automatically sets the correct INFOPLIST_FILE setting and adds
|
||||
# the file to a source group.
|
||||
'cefclient/mac/Info.plist',
|
||||
],
|
||||
'xcode_settings': {
|
||||
'INFOPLIST_FILE': 'cefclient/mac/Info.plist',
|
||||
# Target build path.
|
||||
'SYMROOT': 'xcodebuild',
|
||||
},
|
||||
'conditions': [
|
||||
['OS=="win"', {
|
||||
'msvs_settings': {
|
||||
'VCLinkerTool': {
|
||||
# Set /SUBSYSTEM:WINDOWS.
|
||||
'SubSystem': '2',
|
||||
'EntryPointSymbol' : 'wWinMainCRTStartup',
|
||||
},
|
||||
},
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'-lcomctl32.lib',
|
||||
'-lshlwapi.lib',
|
||||
'-lrpcrt4.lib',
|
||||
'-lopengl32.lib',
|
||||
'-lglu32.lib',
|
||||
'-llib/$(ConfigurationName)/libcef.lib'
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(includes_win)',
|
||||
'<@(cefclient_sources_win)',
|
||||
],
|
||||
}],
|
||||
[ 'OS=="mac"', {
|
||||
'product_name': 'cefclient',
|
||||
'dependencies': [
|
||||
'cefclient_helper_app',
|
||||
],
|
||||
'copies': [
|
||||
{
|
||||
# Add library dependencies to the bundle.
|
||||
'destination': '<(PRODUCT_DIR)/cefclient.app/Contents/Frameworks/Chromium Embedded Framework.framework/Libraries/',
|
||||
'files': [
|
||||
'$(CONFIGURATION)/libcef.dylib',
|
||||
'$(CONFIGURATION)/ffmpegsumo.so',
|
||||
],
|
||||
},
|
||||
{
|
||||
# Add other resources to the bundle.
|
||||
'destination': '<(PRODUCT_DIR)/cefclient.app/Contents/Frameworks/Chromium Embedded Framework.framework/',
|
||||
'files': [
|
||||
'Resources/',
|
||||
],
|
||||
},
|
||||
{
|
||||
# Add the helper app.
|
||||
'destination': '<(PRODUCT_DIR)/cefclient.app/Contents/Frameworks',
|
||||
'files': [
|
||||
'<(PRODUCT_DIR)/cefclient Helper.app',
|
||||
],
|
||||
},
|
||||
],
|
||||
'postbuilds': [
|
||||
{
|
||||
'postbuild_name': 'Fix Framework Link',
|
||||
'action': [
|
||||
'install_name_tool',
|
||||
'-change',
|
||||
'@executable_path/libcef.dylib',
|
||||
'@executable_path/../Frameworks/Chromium Embedded Framework.framework/Libraries/libcef.dylib',
|
||||
'${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}'
|
||||
],
|
||||
},
|
||||
{
|
||||
# This postbuid step is responsible for creating the following
|
||||
# helpers:
|
||||
#
|
||||
# cefclient Helper EH.app and cefclient Helper NP.app are created
|
||||
# from cefclient Helper.app.
|
||||
#
|
||||
# The EH helper is marked for an executable heap. The NP helper
|
||||
# is marked for no PIE (ASLR).
|
||||
'postbuild_name': 'Make More Helpers',
|
||||
'action': [
|
||||
'tools/make_more_helpers.sh',
|
||||
'Frameworks',
|
||||
'cefclient',
|
||||
],
|
||||
},
|
||||
],
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
|
||||
'$(CONFIGURATION)/libcef.dylib',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(includes_mac)',
|
||||
'<@(cefclient_sources_mac)',
|
||||
],
|
||||
}],
|
||||
[ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
|
||||
'copies': [
|
||||
{
|
||||
'destination': '<(PRODUCT_DIR)/files',
|
||||
'files': [
|
||||
'<@(cefclient_bundle_resources_linux)',
|
||||
],
|
||||
},
|
||||
],
|
||||
'sources': [
|
||||
'<@(includes_linux)',
|
||||
'<@(cefclient_sources_linux)',
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'libcef_dll_wrapper',
|
||||
'type': 'static_library',
|
||||
'msvs_guid': 'A9D6DC71-C0DC-4549-AEA0-3B15B44E86A9',
|
||||
'defines': [
|
||||
'USING_CEF_SHARED',
|
||||
],
|
||||
'include_dirs': [
|
||||
'.',
|
||||
],
|
||||
'sources': [
|
||||
'<@(includes_common)',
|
||||
'<@(includes_capi)',
|
||||
'<@(includes_wrapper)',
|
||||
'<@(libcef_dll_wrapper_sources_common)',
|
||||
],
|
||||
'xcode_settings': {
|
||||
# Target build path.
|
||||
'SYMROOT': 'xcodebuild',
|
||||
},
|
||||
},
|
||||
],
|
||||
'conditions': [
|
||||
['OS=="mac"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'cefclient_helper_app',
|
||||
'type': 'executable',
|
||||
'variables': { 'enable_wexit_time_destructors': 1, },
|
||||
'product_name': 'cefclient Helper',
|
||||
'mac_bundle': 1,
|
||||
'dependencies': [
|
||||
'libcef_dll_wrapper',
|
||||
],
|
||||
'defines': [
|
||||
'USING_CEF_SHARED',
|
||||
],
|
||||
'include_dirs': [
|
||||
'.',
|
||||
],
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'$(CONFIGURATION)/libcef.dylib',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'<@(cefclient_sources_mac_helper)',
|
||||
],
|
||||
# TODO(mark): Come up with a fancier way to do this. It should only
|
||||
# be necessary to list helper-Info.plist once, not the three times it
|
||||
# is listed here.
|
||||
'mac_bundle_resources!': [
|
||||
'cefclient/mac/helper-Info.plist',
|
||||
],
|
||||
# TODO(mark): For now, don't put any resources into this app. Its
|
||||
# resources directory will be a symbolic link to the browser app's
|
||||
# resources directory.
|
||||
'mac_bundle_resources/': [
|
||||
['exclude', '.*'],
|
||||
],
|
||||
'xcode_settings': {
|
||||
'INFOPLIST_FILE': 'cefclient/mac/helper-Info.plist',
|
||||
},
|
||||
'postbuilds': [
|
||||
{
|
||||
# The framework defines its load-time path
|
||||
# (DYLIB_INSTALL_NAME_BASE) relative to the main executable
|
||||
# (chrome). A different relative path needs to be used in
|
||||
# cefclient_helper_app.
|
||||
'postbuild_name': 'Fix Framework Link',
|
||||
'action': [
|
||||
'install_name_tool',
|
||||
'-change',
|
||||
'@executable_path/libcef.dylib',
|
||||
'@executable_path/../../../../Frameworks/Chromium Embedded Framework.framework/Libraries/libcef.dylib',
|
||||
'${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}'
|
||||
],
|
||||
},
|
||||
],
|
||||
}, # target cefclient_helper_app
|
||||
],
|
||||
}], # OS=="mac"
|
||||
],
|
||||
}
|
89
tools/distrib/linux/README.txt
Normal file
89
tools/distrib/linux/README.txt
Normal file
@@ -0,0 +1,89 @@
|
||||
Chromium Embedded Framework (CEF) Binary Distribution
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
Date: $DATE$
|
||||
|
||||
CEF Version: $CEF_VER$
|
||||
CEF URL: $CEF_URL$@$CEF_REV$
|
||||
|
||||
Chromium Verison: $CHROMIUM_VER$
|
||||
Chromium URL: $CHROMIUM_URL$@$CHROMIUM_REV$
|
||||
|
||||
|
||||
This distribution contains all components necessary to build and distribute an
|
||||
application using CEF. Please see the LICENSING section of this document for
|
||||
licensing terms and conditions.
|
||||
|
||||
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
cefclient Contains the cefclient sample application configured to build
|
||||
using the files in this distribution.
|
||||
|
||||
Debug Contains libcef.so and other components required to run the debug
|
||||
version of CEF-based applications.
|
||||
|
||||
docs Contains C++ API documentation generated from the CEF header files.
|
||||
|
||||
include Contains all required CEF header files.
|
||||
|
||||
libcef_dll Contains the source code for the libcef_dll_wrapper static library
|
||||
that all applications using the CEF C++ API must link against.
|
||||
|
||||
Release Contains libcef.so and other components required to run the
|
||||
release version of CEF-based applications.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Run 'make -j4 cefclient BUILDTYPE=Debug' to build the cefclient target in
|
||||
Debug mode.
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
||||
|
||||
|
||||
REDISTRIBUTION
|
||||
--------------
|
||||
|
||||
This binary distribution contains the below components. Components listed under
|
||||
the "required" section must be redistributed with all applications using CEF.
|
||||
Components listed under the "optional" section may be excluded if the related
|
||||
features will not be used.
|
||||
|
||||
Required components:
|
||||
|
||||
* CEF core library
|
||||
libcef.so
|
||||
|
||||
Optional components:
|
||||
|
||||
* Localized resources
|
||||
locales/
|
||||
Note: Contains localized strings for WebKit UI controls. A .pak file is loaded
|
||||
from this folder based on the value of environment variables which are read
|
||||
with the following precedence order: LANGUAGE, LC_ALL, LC_MESSAGES and LANG.
|
||||
Only configured locales need to be distributed. If no locale is configured the
|
||||
default locale of "en-US" will be used. Locale file loading can be disabled
|
||||
completely using CefSettings.pack_loading_disabled. The locales folder path
|
||||
can be customized using CefSettings.locales_dir_path.
|
||||
|
||||
* Other resources
|
||||
cef.pak
|
||||
Note: Contains WebKit image and inspector resources. Pack file loading can be
|
||||
disabled completely using CefSettings.pack_loading_disabled. The cef.pak file
|
||||
path can be customized using CefSettings.pack_file_path.
|
||||
|
||||
|
||||
LICENSING
|
||||
---------
|
||||
|
||||
The CEF project is BSD licensed. Please read the LICENSE.txt file included with
|
||||
this binary distribution for licensing terms and conditions. Other software
|
||||
included in this distribution is provided under other licenses. Please visit the
|
||||
below link for complete Chromium and third-party licensing information.
|
||||
|
||||
http://code.google.com/chromium/terms.html
|
103
tools/distrib/mac/README.txt
Normal file
103
tools/distrib/mac/README.txt
Normal file
@@ -0,0 +1,103 @@
|
||||
Chromium Embedded Framework (CEF) Binary Distribution
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
Date: $DATE$
|
||||
|
||||
CEF Version: $CEF_VER$
|
||||
CEF URL: $CEF_URL$@$CEF_REV$
|
||||
|
||||
Chromium Verison: $CHROMIUM_VER$
|
||||
Chromium URL: $CHROMIUM_URL$@$CHROMIUM_REV$
|
||||
|
||||
|
||||
This distribution contains all components necessary to build and distribute an
|
||||
application using CEF. Please see the LICENSING section of this document for
|
||||
licensing terms and conditions.
|
||||
|
||||
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
cefclient Contains the cefclient sample application configured to build
|
||||
using the files in this distribution.
|
||||
|
||||
Debug Contains libcef.dylib and other components required to run the debug
|
||||
version of CEF-based applications.
|
||||
|
||||
docs Contains C++ API documentation generated from the CEF header files.
|
||||
|
||||
include Contains all required CEF header files.
|
||||
|
||||
libcef_dll Contains the source code for the libcef_dll_wrapper static library
|
||||
that all applications using the CEF C++ API must link against.
|
||||
|
||||
Release Contains libcef.dylib and other components required to run the
|
||||
release version of CEF-based applications.
|
||||
|
||||
Resources Contains images and resources required by applications using CEF.
|
||||
The contents of this folder should be transferred to the
|
||||
Contents/Resources folder in the app bundle.
|
||||
|
||||
tools Scripts that perform post-processing on Mac release targets.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Xcode 3 and 4: Open the cefclient.xcodeproj project and build.
|
||||
|
||||
When using Xcode 4.2 or newer you will need to change the "Compiler for
|
||||
C/C++/Objective-C" setting to "LLVM GCC 4.2" under "Build Settings" for
|
||||
each target.
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
||||
|
||||
|
||||
REDISTRIBUTION
|
||||
--------------
|
||||
|
||||
This binary distribution contains the below components. Components listed under
|
||||
the "required" section must be redistributed with all applications using CEF.
|
||||
Components listed under the "optional" section may be excluded if the related
|
||||
features will not be used.
|
||||
|
||||
Required components:
|
||||
|
||||
* CEF core library
|
||||
libcef.dylib
|
||||
|
||||
* Cursor resources
|
||||
Resources/*.png
|
||||
Resources/*.tiff
|
||||
|
||||
Optional components:
|
||||
|
||||
* Localized resources
|
||||
Resources/*.lproj/
|
||||
Note: Contains localized strings for WebKit UI controls. A .pak file is loaded
|
||||
from this folder based on the CefSettings.locale value. Only configured
|
||||
locales need to be distributed. If no locale is configured the default locale
|
||||
of "en" will be used. Locale file loading can be disabled completely using
|
||||
CefSettings.pack_loading_disabled.
|
||||
|
||||
* Other resources
|
||||
Resources/chrome.pak
|
||||
Note: Contains WebKit image and inspector resources. Pack file loading can be
|
||||
disabled completely using CefSettings.pack_loading_disabled.
|
||||
|
||||
* FFmpeg audio and video support
|
||||
ffmpegsumo.so
|
||||
Note: Without this component HTML5 audio and video will not function.
|
||||
|
||||
|
||||
LICENSING
|
||||
---------
|
||||
|
||||
The CEF project is BSD licensed. Please read the LICENSE.txt file included with
|
||||
this binary distribution for licensing terms and conditions. Other software
|
||||
included in this distribution is provided under other licenses. Please visit the
|
||||
below link for complete Chromium and third-party licensing information.
|
||||
|
||||
http://code.google.com/chromium/terms.html
|
33
tools/distrib/mac/transfer.cfg
Normal file
33
tools/distrib/mac/transfer.cfg
Normal file
@@ -0,0 +1,33 @@
|
||||
# Additional handling of transfer files.
|
||||
# target: Target location relative to the target release directory. This
|
||||
# value is required.
|
||||
# source: Source location relative to the CEF root directory. This value
|
||||
# is optional. If specified the target will be copied to this location
|
||||
# and a TRANSFER-README.txt file will be created.
|
||||
# post-process: Post-processing operation to perform. This value is
|
||||
# optional and may be any one of the following:
|
||||
# 'normalize_headers': Replace fully-qualified project header paths with
|
||||
# the optionally specified 'new_header_path' value.
|
||||
|
||||
[
|
||||
{
|
||||
'source' : '../build/mac/change_mach_o_flags_from_xcode.sh',
|
||||
'target' : 'tools/change_mach_o_flags_from_xcode.sh',
|
||||
},
|
||||
{
|
||||
'source' : '../build/mac/change_mach_o_flags.py',
|
||||
'target' : 'tools/change_mach_o_flags.py',
|
||||
},
|
||||
{
|
||||
'source' : '../build/mac/strip_from_xcode',
|
||||
'target' : 'tools/strip_from_xcode',
|
||||
},
|
||||
{
|
||||
'source' : '../build/mac/strip_save_dsym',
|
||||
'target' : 'tools/strip_save_dsym',
|
||||
},
|
||||
{
|
||||
'source' : '../build/mac/make_more_helpers.sh',
|
||||
'target' : 'tools/make_more_helpers.sh',
|
||||
},
|
||||
]
|
13
tools/distrib/transfer.cfg
Normal file
13
tools/distrib/transfer.cfg
Normal file
@@ -0,0 +1,13 @@
|
||||
# Additional handling of transfer files.
|
||||
# target: Target location relative to the target release directory. This
|
||||
# value is required.
|
||||
# source: Source location relative to the CEF root directory. This value
|
||||
# is optional. If specified the target will be copied to this location
|
||||
# and a TRANSFER-README.txt file will be created.
|
||||
# post-process: Post-processing operation to perform. This value is
|
||||
# optional and may be any one of the following:
|
||||
# 'normalize_headers': Replace fully-qualified project header paths with
|
||||
# the optionally specified 'new_header_path' value.
|
||||
|
||||
[
|
||||
]
|
113
tools/distrib/win/README.txt
Normal file
113
tools/distrib/win/README.txt
Normal file
@@ -0,0 +1,113 @@
|
||||
Chromium Embedded Framework (CEF) Binary Distribution
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
Date: $DATE$
|
||||
|
||||
CEF Version: $CEF_VER$
|
||||
CEF URL: $CEF_URL$@$CEF_REV$
|
||||
|
||||
Chromium Verison: $CHROMIUM_VER$
|
||||
Chromium URL: $CHROMIUM_URL$@$CHROMIUM_REV$
|
||||
|
||||
|
||||
This distribution contains all components necessary to build and distribute an
|
||||
application using CEF. Please see the LICENSING section of this document for
|
||||
licensing terms and conditions.
|
||||
|
||||
|
||||
CONTENTS
|
||||
--------
|
||||
|
||||
cefclient Contains the cefclient sample application configured to build
|
||||
using the files in this distribution.
|
||||
|
||||
Debug Contains libcef.dll and other components required to run the debug
|
||||
version of CEF-based applications. Also acts as the build target for
|
||||
the Debug build of cefclient.
|
||||
|
||||
docs Contains C++ API documentation generated from the CEF header files.
|
||||
|
||||
include Contains all required CEF header files.
|
||||
|
||||
lib Contains Debug and Release versions of the libcef.lib library file
|
||||
that all CEF-based applications must link against.
|
||||
|
||||
libcef_dll Contains the source code for the libcef_dll_wrapper static library
|
||||
that all applications using the CEF C++ API must link against.
|
||||
|
||||
Release Contains libcef.dll and other components required to run the release
|
||||
version of CEF-based applications. Also acts as the build target for
|
||||
the Release build of cefclient.
|
||||
|
||||
|
||||
USAGE
|
||||
-----
|
||||
|
||||
Visual Studio 2010: Open the cefclient2010.sln solution and build.
|
||||
Visual Studio 2008: Open the cefclient2008.sln solution and build.
|
||||
* If using VS2008 Express Edition add atlthunk.lib to the cefclient
|
||||
Configuration Properties > Linker > Input > Additional Dependencies
|
||||
Visual Studio 2005: Open the cefclient2005.sln solution and build.
|
||||
|
||||
Please visit the CEF Website for additional usage information.
|
||||
|
||||
http://code.google.com/p/chromiumembedded
|
||||
|
||||
|
||||
REDISTRIBUTION
|
||||
--------------
|
||||
|
||||
This binary distribution contains the below components. Components listed under
|
||||
the "required" section must be redistributed with all applications using CEF.
|
||||
Components listed under the "optional" section may be excluded if the related
|
||||
features will not be used.
|
||||
|
||||
Required components:
|
||||
|
||||
* CEF core library
|
||||
libcef.dll
|
||||
|
||||
* Unicode support
|
||||
icudt.dll
|
||||
|
||||
Optional components:
|
||||
|
||||
* Localized resources
|
||||
locales/
|
||||
Note: Contains localized strings for WebKit UI controls. A .pak file is loaded
|
||||
from this folder based on the CefSettings.locale value. Only configured
|
||||
locales need to be distributed. If no locale is configured the default locale
|
||||
of "en-US" will be used. Locale file loading can be disabled completely using
|
||||
CefSettings.pack_loading_disabled. The locales folder path can be customized
|
||||
using CefSettings.locales_dir_path.
|
||||
|
||||
* Other resources
|
||||
cef.pak
|
||||
Note: Contains WebKit image and inspector resources. Pack file loading can be
|
||||
disabled completely using CefSettings.pack_loading_disabled. The cef.pak file
|
||||
path can be customized using CefSettings.pack_file_path.
|
||||
|
||||
* FFmpeg audio and video support
|
||||
avcodec-54.dll
|
||||
avformat-54.dll
|
||||
avutil-51.dll
|
||||
Note: Without these components HTML5 audio and video will not function.
|
||||
|
||||
* Angle and Direct3D support
|
||||
d3dcompiler_43.dll
|
||||
d3dx9_43.dll
|
||||
libEGL.dll
|
||||
libGLESv2.dll
|
||||
Note: Without these components HTML5 accelerated content like 2D canvas, 3D
|
||||
CSS and WebGL will not function.
|
||||
|
||||
|
||||
LICENSING
|
||||
---------
|
||||
|
||||
The CEF project is BSD licensed. Please read the LICENSE.txt file included with
|
||||
this binary distribution for licensing terms and conditions. Other software
|
||||
included in this distribution is provided under other licenses. Please visit the
|
||||
below link for complete Chromium and third-party licensing information.
|
||||
|
||||
http://code.google.com/chromium/terms.html
|
BIN
tools/distrib/win/d3dcompiler_43.dll
Normal file
BIN
tools/distrib/win/d3dcompiler_43.dll
Normal file
Binary file not shown.
BIN
tools/distrib/win/d3dx9_43.dll
Normal file
BIN
tools/distrib/win/d3dx9_43.dll
Normal file
Binary file not shown.
111
tools/file_util.py
Normal file
111
tools/file_util.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from glob import iglob
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
|
||||
def read_file(name, normalize = True):
|
||||
""" Read a file. """
|
||||
try:
|
||||
f = open(name, 'r')
|
||||
# read the data
|
||||
data = f.read()
|
||||
if normalize:
|
||||
# normalize line endings
|
||||
data = data.replace("\r\n", "\n")
|
||||
return data
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read file '+name+': '+strerror)
|
||||
raise
|
||||
else:
|
||||
f.close()
|
||||
|
||||
def write_file(name, data):
|
||||
""" Write a file. """
|
||||
try:
|
||||
f = open(name, 'w')
|
||||
# write the data
|
||||
f.write(data)
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to write file '+name+': '+strerror)
|
||||
raise
|
||||
else:
|
||||
f.close()
|
||||
|
||||
def path_exists(name):
|
||||
""" Returns true if the path currently exists. """
|
||||
return os.path.exists(name)
|
||||
|
||||
def backup_file(name):
|
||||
""" Rename the file to a name that includes the current time stamp. """
|
||||
move_file(name, name+'.'+time.strftime('%Y-%m-%d-%H-%M-%S'))
|
||||
|
||||
def copy_file(src, dst, quiet = True):
|
||||
""" Copy a file. """
|
||||
try:
|
||||
shutil.copy(src, dst)
|
||||
if not quiet:
|
||||
sys.stdout.write('Transferring '+src+' file.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to copy file from '+src+' to '+dst+': '+strerror)
|
||||
raise
|
||||
|
||||
def move_file(src, dst, quiet = True):
|
||||
""" Move a file. """
|
||||
try:
|
||||
shutil.move(src, dst)
|
||||
if not quiet:
|
||||
sys.stdout.write('Moving '+src+' file.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to move file from '+src+' to '+dst+': '+strerror)
|
||||
raise
|
||||
|
||||
def copy_files(src_glob, dst_folder, quiet = True):
|
||||
""" Copy multiple files. """
|
||||
for fname in iglob(src_glob):
|
||||
dst = os.path.join(dst_folder, os.path.basename(fname))
|
||||
if os.path.isdir(fname):
|
||||
copy_dir(fname, dst, quiet)
|
||||
else:
|
||||
copy_file(fname, dst, quiet)
|
||||
|
||||
def copy_dir(src, dst, quiet = True):
|
||||
""" Copy a directory tree. """
|
||||
try:
|
||||
remove_dir(dst, quiet)
|
||||
shutil.copytree(src, dst)
|
||||
if not quiet:
|
||||
sys.stdout.write('Transferring '+src+' directory.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to copy directory from '+src+' to '+dst+': '+strerror)
|
||||
raise
|
||||
|
||||
def remove_dir(name, quiet = True):
|
||||
""" Remove the specified directory. """
|
||||
try:
|
||||
if path_exists(name):
|
||||
shutil.rmtree(name)
|
||||
if not quiet:
|
||||
sys.stdout.write('Removing '+name+' directory.\n')
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to remove directory '+name+': '+strerror)
|
||||
raise
|
||||
|
||||
def make_dir(name, quiet = True):
|
||||
""" Create the specified directory. """
|
||||
try:
|
||||
if not path_exists(name):
|
||||
if not quiet:
|
||||
sys.stdout.write('Creating '+name+' directory.\n')
|
||||
os.makedirs(name)
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to create directory '+name+': '+strerror)
|
||||
raise
|
||||
|
||||
def get_files(search_glob):
|
||||
""" Returns all files matching the search glob. """
|
||||
return iglob(search_glob)
|
30
tools/gclient_hook.py
Normal file
30
tools/gclient_hook.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors.
|
||||
# Portions copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
from gclient_util import *
|
||||
import os, sys
|
||||
|
||||
# The CEF root directory is the parent directory of _this_ script.
|
||||
cef_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||
|
||||
print "\nChecking CEF and Chromium revisions..."
|
||||
gyper = [ 'python', 'tools/check_revision.py' ]
|
||||
RunAction(cef_dir, gyper)
|
||||
|
||||
print "\nGenerating CEF version header file..."
|
||||
gyper = [ 'python', 'tools/make_version_header.py',
|
||||
'--header', 'include/cef_version.h',
|
||||
'--version', '../chrome/VERSION' ]
|
||||
RunAction(cef_dir, gyper)
|
||||
|
||||
print "\nPatching build configuration and source files for CEF..."
|
||||
patcher = [ 'python', 'tools/patcher.py',
|
||||
'--patch-config', 'patch/patch.cfg' ];
|
||||
RunAction(cef_dir, patcher)
|
||||
|
||||
print "\nGenerating CEF project files..."
|
||||
os.environ['CEF_DIRECTORY'] = os.path.basename(cef_dir);
|
||||
gyper = [ 'python', 'tools/gyp_cef', 'cef.gyp', '-I', 'cef.gypi' ]
|
||||
RunAction(cef_dir, gyper)
|
45
tools/gclient_util.py
Normal file
45
tools/gclient_util.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors.
|
||||
# Portions copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
import os, sys
|
||||
|
||||
try:
|
||||
# depot_tools may already be in the import path.
|
||||
import gclient_utils
|
||||
except ImportError, e:
|
||||
# Search the PATH environment variable to find the depot_tools folder.
|
||||
depot_tools = None;
|
||||
paths = os.environ.get('PATH').split(os.pathsep)
|
||||
for path in paths:
|
||||
if os.path.exists(os.path.join(path, 'gclient_utils.py')):
|
||||
depot_tools = path
|
||||
break
|
||||
|
||||
if depot_tools is None:
|
||||
print >> sys.stderr, 'Error: could not find depot_tools in PATH.'
|
||||
sys.exit(2)
|
||||
|
||||
# Add depot_tools to import path.
|
||||
sys.path.append(depot_tools)
|
||||
import gclient_utils
|
||||
|
||||
# Copied from gclient.py python code.
|
||||
def RunAction(dir, command):
|
||||
"""Runs the action."""
|
||||
if command[0] == 'python':
|
||||
# If the hook specified "python" as the first item, the action is a
|
||||
# Python script. Run it by starting a new copy of the same
|
||||
# interpreter.
|
||||
command[0] = sys.executable
|
||||
|
||||
try:
|
||||
gclient_utils.CheckCallAndFilterAndHeader(
|
||||
command, cwd=dir, always=True)
|
||||
except gclient_utils.Error, e:
|
||||
# Use a discrete exit status code of 2 to indicate that a hook action
|
||||
# failed. Users of this script may wish to treat hook action failures
|
||||
# differently from VC failures.
|
||||
print >> sys.stderr, 'Error: %s' % str(e)
|
||||
sys.exit(2)
|
24
tools/git_util.py
Normal file
24
tools/git_util.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file
|
||||
|
||||
from subprocess import Popen, PIPE
|
||||
|
||||
def get_svn_revision(path=".", branch="master"):
|
||||
svn_rev = "None"
|
||||
cmd = ("git log --grep=^git-svn-id: -n 1 %s" % branch).split()
|
||||
try:
|
||||
process = Popen(cmd, cwd=path, stdout = PIPE, stderr = PIPE)
|
||||
for line in process.stdout:
|
||||
if line.find("git-svn-id") > 0:
|
||||
svn_rev = line.split("@")[1].split()[0]
|
||||
break
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read git log: ' + strerror + "\n")
|
||||
raise
|
||||
return svn_rev
|
||||
|
||||
def get_changed_files(path="."):
|
||||
""" Retrieves the list of changed files. """
|
||||
# not implemented
|
||||
return []
|
155
tools/gyp_cef
Normal file
155
tools/gyp_cef
Normal file
@@ -0,0 +1,155 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors.
|
||||
# Portions copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
# This script is wrapper for CEF/Chromium that adds some support for how GYP
|
||||
# is invoked by Chromium beyond what can be done in the gclient hooks.
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# The CEF root directory is the parent directory of _this_ script.
|
||||
cef_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
|
||||
|
||||
# The Chromium source directory is the parent directory of CEF.
|
||||
chrome_src = os.path.abspath(os.path.join(cef_dir, os.pardir))
|
||||
|
||||
sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
|
||||
import gyp
|
||||
|
||||
# Add paths so that pymod_do_main(grit_info ...) can import files.
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
|
||||
sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
|
||||
|
||||
|
||||
# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
|
||||
# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
|
||||
# seconds. Conversely, memory usage of build/gyp_chromium with Psyco
|
||||
# maxes out at about 158 MB vs. 132 MB without it.
|
||||
#
|
||||
# Psyco uses native libraries, so we need to load a different
|
||||
# installation depending on which OS we are running under. It has not
|
||||
# been tested whether using Psyco on our Mac and Linux builds is worth
|
||||
# it (the GYP running time is a lot shorter, so the JIT startup cost
|
||||
# may not be worth it).
|
||||
if sys.platform == 'win32':
|
||||
try:
|
||||
sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
|
||||
import psyco
|
||||
except:
|
||||
psyco = None
|
||||
else:
|
||||
psyco = None
|
||||
|
||||
def apply_gyp_environment(file_path=None):
|
||||
"""
|
||||
Reads in a *.gyp_env file and applies the valid keys to os.environ.
|
||||
"""
|
||||
if not file_path or not os.path.exists(file_path):
|
||||
return
|
||||
file_contents = open(file_path).read()
|
||||
try:
|
||||
file_data = eval(file_contents, {'__builtins__': None}, None)
|
||||
except SyntaxError, e:
|
||||
e.filename = os.path.abspath(file_path)
|
||||
raise
|
||||
supported_vars = ( 'CHROMIUM_GYP_SYNTAX_CHECK',
|
||||
'GYP_DEFINES',
|
||||
'GYP_GENERATOR_FLAGS',
|
||||
'GYP_GENERATOR_OUTPUT', )
|
||||
for var in supported_vars:
|
||||
val = file_data.get(var)
|
||||
if val:
|
||||
if var in os.environ:
|
||||
print 'INFO: Environment value for "%s" overrides value in %s.' % (
|
||||
var, os.path.abspath(file_path)
|
||||
)
|
||||
else:
|
||||
os.environ[var] = val
|
||||
|
||||
def additional_include_files(args=[]):
|
||||
"""
|
||||
Returns a list of additional (.gypi) files to include, without
|
||||
duplicating ones that are already specified on the command line.
|
||||
"""
|
||||
# Determine the include files specified on the command line.
|
||||
# This doesn't cover all the different option formats you can use,
|
||||
# but it's mainly intended to avoid duplicating flags on the automatic
|
||||
# makefile regeneration which only uses this format.
|
||||
specified_includes = set()
|
||||
for arg in args:
|
||||
if arg.startswith('-I') and len(arg) > 2:
|
||||
specified_includes.add(os.path.realpath(arg[2:]))
|
||||
|
||||
result = []
|
||||
def AddInclude(path):
|
||||
if os.path.realpath(path) not in specified_includes:
|
||||
result.append(path)
|
||||
|
||||
# Always include common.gypi.
|
||||
AddInclude(os.path.join(chrome_src, 'build', 'common.gypi'))
|
||||
|
||||
# Optionally add supplemental .gypi files if present.
|
||||
supplements = glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
|
||||
for supplement in supplements:
|
||||
AddInclude(supplement)
|
||||
|
||||
return result
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = sys.argv[1:]
|
||||
|
||||
# Use the Psyco JIT if available.
|
||||
if psyco:
|
||||
psyco.profile()
|
||||
print "Enabled Psyco JIT."
|
||||
|
||||
# Fall back on hermetic python if we happen to get run under cygwin.
|
||||
# TODO(bradnelson): take this out once this issue is fixed:
|
||||
# http://code.google.com/p/gyp/issues/detail?id=177
|
||||
if sys.platform == 'cygwin':
|
||||
python_dir = os.path.join(chrome_src, 'third_party', 'python_26')
|
||||
env = os.environ.copy()
|
||||
env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
|
||||
p = subprocess.Popen(
|
||||
[os.path.join(python_dir, 'python.exe')] + sys.argv,
|
||||
env=env, shell=False)
|
||||
p.communicate()
|
||||
sys.exit(p.returncode)
|
||||
|
||||
if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
|
||||
# Update the environment based on chromium.gyp_env
|
||||
gyp_env_path = os.path.join(os.path.dirname(chrome_src), 'chromium.gyp_env')
|
||||
apply_gyp_environment(gyp_env_path)
|
||||
|
||||
args.extend(['-I' + i for i in additional_include_files(args)])
|
||||
|
||||
# There shouldn't be a circular dependency relationship between .gyp files,
|
||||
# but in Chromium's .gyp files, on non-Mac platforms, circular relationships
|
||||
# currently exist. The check for circular dependencies is currently
|
||||
# bypassed on other platforms, but is left enabled on the Mac, where a
|
||||
# violation of the rule causes Xcode to misbehave badly.
|
||||
# TODO(mark): Find and kill remaining circular dependencies, and remove this
|
||||
# option. http://crbug.com/35878.
|
||||
# TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
|
||||
# list.
|
||||
if sys.platform not in ('darwin',):
|
||||
args.append('--no-circular-check')
|
||||
|
||||
# If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
|
||||
# to enfore syntax checking.
|
||||
syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
|
||||
if syntax_check and int(syntax_check):
|
||||
args.append('--check')
|
||||
|
||||
print 'Updating projects from gyp files...'
|
||||
sys.stdout.flush()
|
||||
|
||||
# Off we go...
|
||||
sys.exit(gyp.main(args))
|
174
tools/make_capi_header.py
Normal file
174
tools/make_capi_header.py
Normal file
@@ -0,0 +1,174 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
from date_util import *
|
||||
|
||||
def make_capi_global_funcs(funcs, defined_names, translate_map, indent):
|
||||
result = ''
|
||||
first = True
|
||||
for func in funcs:
|
||||
comment = func.get_comment()
|
||||
if first or len(comment) > 0:
|
||||
result += '\n'+format_comment(comment, indent, translate_map);
|
||||
if func.get_retval().get_type().is_result_string():
|
||||
result += indent+'// The resulting string must be freed by calling cef_string_userfree_free().\n'
|
||||
result += wrap_code(indent+'CEF_EXPORT '+
|
||||
func.get_capi_proto(defined_names)+';')
|
||||
if first:
|
||||
first = False
|
||||
return result
|
||||
|
||||
def make_capi_member_funcs(funcs, defined_names, translate_map, indent):
|
||||
result = ''
|
||||
first = True
|
||||
for func in funcs:
|
||||
comment = func.get_comment()
|
||||
if first or len(comment) > 0:
|
||||
result += '\n'+format_comment(comment, indent, translate_map)
|
||||
if func.get_retval().get_type().is_result_string():
|
||||
result += indent+'// The resulting string must be freed by calling cef_string_userfree_free().\n'
|
||||
parts = func.get_capi_parts()
|
||||
result += wrap_code(indent+parts['retval']+' (CEF_CALLBACK *'+
|
||||
parts['name']+')('+
|
||||
string.join(parts['args'], ', ')+');')
|
||||
if first:
|
||||
first = False
|
||||
return result
|
||||
|
||||
def make_capi_header(header, filename):
|
||||
# structure names that have already been defined
|
||||
defined_names = header.get_defined_structs()
|
||||
|
||||
# map of strings that will be changed in C++ comments
|
||||
translate_map = header.get_capi_translations()
|
||||
|
||||
# header string
|
||||
result = \
|
||||
"""// Copyright (c) $YEAR$ Marshall A. Greenblatt. All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the name Chromium Embedded
|
||||
// Framework nor the names of its contributors may be used to endorse
|
||||
// or promote products derived from this software without specific prior
|
||||
// written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
//
|
||||
// ---------------------------------------------------------------------------
|
||||
//
|
||||
// This file was generated by the CEF translator tool and should not edited
|
||||
// by hand. See the translator.README.txt file in the tools directory for
|
||||
// more information.
|
||||
//
|
||||
|
||||
#ifndef $GUARD$
|
||||
#define $GUARD$
|
||||
#pragma once
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include "include/capi/cef_base_capi.h"
|
||||
|
||||
"""
|
||||
# output global functions
|
||||
funcs = header.get_funcs(filename)
|
||||
if len(funcs) > 0:
|
||||
result += make_capi_global_funcs(funcs, defined_names, translate_map, '')
|
||||
|
||||
# output classes
|
||||
classes = header.get_classes(filename)
|
||||
for cls in classes:
|
||||
# virtual functions are inside the structure
|
||||
classname = cls.get_capi_name()
|
||||
result += '\n'+format_comment(cls.get_comment(), '', translate_map);
|
||||
result += 'typedef struct _'+classname+ \
|
||||
' {\n ///\n // Base structure.\n ///\n cef_base_t base;\n'
|
||||
funcs = cls.get_virtual_funcs()
|
||||
result += make_capi_member_funcs(funcs, defined_names,
|
||||
translate_map, ' ')
|
||||
result += '} '+classname+';\n\n'
|
||||
|
||||
defined_names.append(cls.get_capi_name())
|
||||
|
||||
# static functions become global
|
||||
funcs = cls.get_static_funcs()
|
||||
if len(funcs) > 0:
|
||||
result += make_capi_global_funcs(funcs, defined_names,
|
||||
translate_map, '')+'\n'
|
||||
|
||||
# footer string
|
||||
result += \
|
||||
"""
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // $GUARD$
|
||||
"""
|
||||
|
||||
# add the copyright year
|
||||
result = result.replace('$YEAR$', get_year())
|
||||
# add the guard string
|
||||
guard = 'CEF_INCLUDE_CAPI_'+string.upper(filename.replace('.', '_capi_'))+'_'
|
||||
result = result.replace('$GUARD$', guard)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def write_capi_header(header, filepath, backup):
|
||||
capi_path = get_capi_file_name(filepath)
|
||||
if path_exists(capi_path):
|
||||
oldcontents = read_file(capi_path)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
filename = os.path.split(filepath)[1]
|
||||
newcontents = make_capi_header(header, filename)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(capi_path)
|
||||
write_file(capi_path, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 2:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# dump the result to stdout
|
||||
filename = os.path.split(sys.argv[1])[1]
|
||||
sys.stdout.write(make_capi_header(header, filename))
|
18
tools/make_cppdocs.bat
Normal file
18
tools/make_cppdocs.bat
Normal file
@@ -0,0 +1,18 @@
|
||||
@echo off
|
||||
setlocal
|
||||
|
||||
if "%1"=="" (
|
||||
set CPPDOC_EXE="C:\Program Files (x86)\richfeit\CppDoc\CppDoc.exe"
|
||||
set CPPDOC_REV="XXX"
|
||||
) else (
|
||||
set CPPDOC_EXE="C:\Program Files (x86)\richfeit\CppDoc\cppdoc_cmd.exe"
|
||||
set CPPDOC_REV="%1"
|
||||
)
|
||||
|
||||
if not exist %CPPDOC_EXE% (
|
||||
echo ERROR: Please install CppDoc from http://www.cppdoc.com/
|
||||
) else (
|
||||
%CPPDOC_EXE% -overwrite -title="CEF3 C++ API Docs - Revision %CPPDOC_REV%" -footer="<center><a href="http://code.google.com/p/chromiumembedded" target="_top">Chromium Embedded Framework (CEF)</a> Copyright © 2012 Marshall A. Greenblatt</center>" -namespace-as-project -comment-format="///;//;///" -classdir=projects -module="cppdoc-standard" -extensions=h -languages="c=cpp,cc=cpp,cpp=cpp,cs=csharp,cxx=cpp,h=cpp,hpp=cpp,hxx=cpp,java=java" -D"OS_WIN" -D"USING_CEF_SHARED" -D"__cplusplus" -D"CEF_STRING_TYPE_UTF16" -enable-author=false -enable-deprecations=true -enable-since=true -enable-version=false -file-links-for-globals=false -generate-deprecations-list=false -generate-hierarchy=true -header-background-dark="#ccccff" -header-background-light="#eeeeff" -include-private=false -include-protected=true -index-file-base=index -overview-html=overview.html -reduce-summary-font=true -selected-text-background=navy -selected-text-foreground=white -separate-index-pages=false -show-cppdoc-version=false -show-timestamp=false -summary-html=project.html -suppress-details=false -suppress-frames-links=false -table-background=white -wrap-long-lines=false ..\include #cef_runnable.h #cef_tuple.h #capi "..\docs\index.html"
|
||||
)
|
||||
|
||||
endlocal
|
106
tools/make_cpptoc_header.py
Normal file
106
tools/make_cpptoc_header.py
Normal file
@@ -0,0 +1,106 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_cpptoc_header(header, clsname):
|
||||
cls = header.get_class(clsname)
|
||||
if cls is None:
|
||||
raise Exception('Class does not exist: '+clsname)
|
||||
|
||||
dllside = cls.is_library_side()
|
||||
defname = string.upper(get_capi_name(clsname[3:], False))
|
||||
capiname = cls.get_capi_name()
|
||||
|
||||
result = get_copyright()
|
||||
|
||||
result += '#ifndef CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n'+ \
|
||||
'#define CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n' + \
|
||||
'#pragma once\n'
|
||||
|
||||
if dllside:
|
||||
result += """
|
||||
#ifndef BUILDING_CEF_SHARED
|
||||
#pragma message("Warning: "__FILE__" may be accessed DLL-side only")
|
||||
#else // BUILDING_CEF_SHARED
|
||||
"""
|
||||
else:
|
||||
result += """
|
||||
#ifndef USING_CEF_SHARED
|
||||
#pragma message("Warning: "__FILE__" may be accessed wrapper-side only")
|
||||
#else // USING_CEF_SHARED
|
||||
"""
|
||||
|
||||
# include the headers for this class
|
||||
result += '\n#include "include/'+cls.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+cls.get_capi_file_name()+'"\n'
|
||||
|
||||
# include headers for any forward declared classes that are not in the same file
|
||||
declares = cls.get_forward_declares()
|
||||
for declare in declares:
|
||||
dcls = header.get_class(declare)
|
||||
if dcls.get_file_name() != cls.get_file_name():
|
||||
result += '#include "include/'+dcls.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+dcls.get_capi_file_name()+'"\n'
|
||||
|
||||
result += """#include "libcef_dll/cpptoc/cpptoc.h"
|
||||
|
||||
// Wrap a C++ class with a C structure.
|
||||
"""
|
||||
|
||||
if dllside:
|
||||
result += '// This class may be instantiated and accessed DLL-side only.\n'
|
||||
else:
|
||||
result += '// This class may be instantiated and accessed wrapper-side only.\n'
|
||||
|
||||
result += 'class '+clsname+'CppToC\n'+ \
|
||||
' : public CefCppToC<'+clsname+'CppToC, '+clsname+', '+capiname+'> {\n'+ \
|
||||
' public:\n'+ \
|
||||
' explicit '+clsname+'CppToC('+clsname+'* cls);\n'+ \
|
||||
' virtual ~'+clsname+'CppToC() {}\n'+ \
|
||||
'};\n\n'
|
||||
|
||||
if dllside:
|
||||
result += '#endif // BUILDING_CEF_SHARED\n'
|
||||
else:
|
||||
result += '#endif // USING_CEF_SHARED\n'
|
||||
|
||||
result += '#endif // CEF_LIBCEF_DLL_CPPTOC_'+defname+'_CPPTOC_H_\n'
|
||||
|
||||
return wrap_code(result)
|
||||
|
||||
|
||||
def write_cpptoc_header(header, clsname, dir, backup):
|
||||
file = dir+os.sep+get_capi_name(clsname[3:], False)+'_cpptoc.h'
|
||||
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
newcontents = make_cpptoc_header(header, clsname)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 3:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_cpptoc_header(header, sys.argv[2]))
|
563
tools/make_cpptoc_impl.py
Normal file
563
tools/make_cpptoc_impl.py
Normal file
@@ -0,0 +1,563 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_cpptoc_impl_proto(name, func, parts):
|
||||
if isinstance(func, obj_function_virtual):
|
||||
proto = parts['retval']+' CEF_CALLBACK'
|
||||
else:
|
||||
proto = 'CEF_EXPORT '+parts['retval']
|
||||
|
||||
proto += ' '+name+'('+string.join(parts['args'], ', ')+')'
|
||||
return proto
|
||||
|
||||
def make_cpptoc_function_impl_existing(name, func, impl, defined_names):
|
||||
notify(name+' has manual edits')
|
||||
|
||||
# retrieve the C API prototype parts
|
||||
parts = func.get_capi_parts(defined_names)
|
||||
|
||||
changes = format_translation_changes(impl, parts)
|
||||
if len(changes) > 0:
|
||||
notify(name+' prototype changed')
|
||||
|
||||
return wrap_code(make_cpptoc_impl_proto(name, func, parts))+'{'+ \
|
||||
changes+impl['body']+'\n}\n'
|
||||
return result
|
||||
|
||||
def make_cpptoc_function_impl_new(name, func, defined_names):
|
||||
# retrieve the C API prototype parts
|
||||
parts = func.get_capi_parts(defined_names)
|
||||
result = make_cpptoc_impl_proto(name, func, parts)+' {'
|
||||
|
||||
invalid = []
|
||||
|
||||
# retrieve the function arguments
|
||||
args = func.get_arguments()
|
||||
|
||||
# determine the argument types
|
||||
for arg in args:
|
||||
if arg.get_arg_type() == 'invalid':
|
||||
invalid.append(arg.get_name())
|
||||
|
||||
# retrieve the function return value
|
||||
retval = func.get_retval()
|
||||
retval_type = retval.get_retval_type()
|
||||
if retval_type == 'invalid':
|
||||
invalid.append('(return value)')
|
||||
retval_default = ''
|
||||
else:
|
||||
retval_default = retval.get_retval_default(True)
|
||||
if len(retval_default) > 0:
|
||||
retval_default = ' '+retval_default;
|
||||
|
||||
if len(invalid) > 0:
|
||||
notify(name+' could not be autogenerated')
|
||||
# code could not be auto-generated
|
||||
result += '\n // BEGIN DELETE BEFORE MODIFYING'
|
||||
result += '\n // AUTO-GENERATED CONTENT'
|
||||
result += '\n // COULD NOT IMPLEMENT DUE TO: '+string.join(invalid, ', ')
|
||||
result += '\n #pragma message("Warning: "__FILE__": '+name+' is not implemented")'
|
||||
result += '\n // END DELETE BEFORE MODIFYING'
|
||||
result += '\n}\n\n'
|
||||
return wrap_code(result)
|
||||
|
||||
result += '\n // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING\n'
|
||||
|
||||
result_len = len(result)
|
||||
|
||||
optional = []
|
||||
|
||||
# parameter verification
|
||||
if isinstance(func, obj_function_virtual):
|
||||
result += '\n DCHECK(self);'\
|
||||
'\n if (!self)'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
# skip optional params
|
||||
optional_params = arg.parent.get_attrib_list('optional_param')
|
||||
if not optional_params is None and arg_name in optional_params:
|
||||
optional.append(arg_name)
|
||||
continue
|
||||
|
||||
comment = '\n // Verify param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byref' or arg_type == 'simple_byref_const' or \
|
||||
arg_type == 'simple_byaddr' or arg_type == 'bool_byref' or arg_type == 'bool_byaddr' or \
|
||||
arg_type == 'struct_byref_const' or arg_type == 'struct_byref' or \
|
||||
arg_type == 'string_byref_const' or arg_type == 'string_byref' or \
|
||||
arg_type == 'refptr_same' or arg_type == 'refptr_same_byref' or \
|
||||
arg_type == 'refptr_diff' or arg_type == 'refptr_diff_byref' or \
|
||||
arg_type == 'string_vec_byref' or arg_type == 'string_vec_byref_const' or \
|
||||
arg_type == 'string_map_single_byref' or arg_type == 'string_map_single_byref_const' or \
|
||||
arg_type == 'string_map_multi_byref' or arg_type == 'string_map_multi_byref_const':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+');'\
|
||||
'\n if (!'+arg_name+')'\
|
||||
'\n return'+retval_default+';'
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+'Count && (*'+arg_name+'Count == 0 || '+arg_name+'));'\
|
||||
'\n if (!'+arg_name+'Count || (*'+arg_name+'Count > 0 && !'+arg_name+'))'\
|
||||
'\n return'+retval_default+';'
|
||||
elif arg_type == 'simple_vec_byref_const' or arg_type == 'bool_vec_byref_const' or \
|
||||
arg_type == 'refptr_vec_same_byref_const' or arg_type == 'refptr_vec_diff_byref_const':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+'Count == 0 || '+arg_name+');'\
|
||||
'\n if ('+arg_name+'Count > 0 && !'+arg_name+')'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
# check index params
|
||||
index_params = arg.parent.get_attrib_list('index_param')
|
||||
if not index_params is None and arg_name in index_params:
|
||||
result += comment+\
|
||||
'\n DCHECK_GE('+arg_name+', 0);'\
|
||||
'\n if ('+arg_name+' < 0)'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
if len(optional) > 0:
|
||||
result += '\n // Unverified params: '+string.join(optional,', ')
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# parameter translation
|
||||
params = []
|
||||
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
comment = '\n // Translate param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byval' or arg_type == 'simple_byaddr':
|
||||
params.append(arg_name)
|
||||
elif arg_type == 'simple_byref' or arg_type == 'simple_byref_const':
|
||||
data_type = arg.get_type().get_type()
|
||||
default = arg.get_type().get_result_simple_default()
|
||||
result += comment+\
|
||||
'\n '+data_type+' '+arg_name+'Val = '+arg_name+'?*'+arg_name+':'+default+';'
|
||||
params.append(arg_name+'Val')
|
||||
elif arg_type == 'bool_byval':
|
||||
params.append(arg_name+'?true:false')
|
||||
elif arg_type == 'bool_byref' or arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n bool '+arg_name+'Bool = ('+arg_name+' && *'+arg_name+')?true:false;'
|
||||
if arg_type == 'bool_byref':
|
||||
params.append(arg_name+'Bool')
|
||||
else:
|
||||
params.append('&'+arg_name+'Bool')
|
||||
elif arg_type == 'struct_byref_const':
|
||||
struct_type = arg.get_type().get_type()
|
||||
result += comment+\
|
||||
'\n '+struct_type+' '+arg_name+'Obj;'\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n '+arg_name+'Obj.Set(*'+arg_name+', false);'
|
||||
params.append(arg_name+'Obj')
|
||||
elif arg_type == 'struct_byref':
|
||||
struct_type = arg.get_type().get_type()
|
||||
result += comment+\
|
||||
'\n '+struct_type+' '+arg_name+'Obj;'\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n '+arg_name+'Obj.AttachTo(*'+arg_name+');'
|
||||
params.append(arg_name+'Obj')
|
||||
elif arg_type == 'string_byref_const':
|
||||
params.append('CefString('+arg_name+')')
|
||||
elif arg_type == 'string_byref':
|
||||
result += comment+\
|
||||
'\n CefString '+arg_name+'Str('+arg_name+');'
|
||||
params.append(arg_name+'Str')
|
||||
elif arg_type == 'refptr_same' or arg_type == 'refptr_diff':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
if arg_type == 'refptr_same':
|
||||
params.append(refptr_class+'CppToC::Unwrap('+arg_name+')')
|
||||
else:
|
||||
params.append(refptr_class+'CToCpp::Wrap('+arg_name+')')
|
||||
elif arg_type == 'refptr_same_byref' or arg_type == 'refptr_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
if arg_type == 'refptr_same_byref':
|
||||
assign = refptr_class+'CppToC::Unwrap(*'+arg_name+')'
|
||||
else:
|
||||
assign = refptr_class+'CToCpp::Wrap(*'+arg_name+')'
|
||||
result += comment+\
|
||||
'\n CefRefPtr<'+refptr_class+'> '+arg_name+'Ptr;'\
|
||||
'\n if ('+arg_name+' && *'+arg_name+')'\
|
||||
'\n '+arg_name+'Ptr = '+assign+';'\
|
||||
'\n '+refptr_class+'* '+arg_name+'Orig = '+arg_name+'Ptr.get();'
|
||||
params.append(arg_name+'Ptr')
|
||||
elif arg_type == 'string_vec_byref' or arg_type == 'string_vec_byref_const':
|
||||
result += comment+\
|
||||
'\n std::vector<CefString> '+arg_name+'List;'\
|
||||
'\n transfer_string_list_contents('+arg_name+', '+arg_name+'List);'
|
||||
params.append(arg_name+'List')
|
||||
elif arg_type == 'string_map_single_byref' or arg_type == 'string_map_single_byref_const':
|
||||
result += comment+\
|
||||
'\n std::map<CefString, CefString> '+arg_name+'Map;'\
|
||||
'\n transfer_string_map_contents('+arg_name+', '+arg_name+'Map);'
|
||||
params.append(arg_name+'Map')
|
||||
elif arg_type == 'string_map_multi_byref' or arg_type == 'string_map_multi_byref_const':
|
||||
result += comment+\
|
||||
'\n std::multimap<CefString, CefString> '+arg_name+'Multimap;'\
|
||||
'\n transfer_string_multimap_contents('+arg_name+', '+arg_name+'Multimap);'
|
||||
params.append(arg_name+'Multimap')
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
vec_type = arg.get_type().get_vector_type()
|
||||
if arg_type == 'simple_vec_byref':
|
||||
assign = arg_name+'[i]'
|
||||
elif arg_type == 'bool_vec_byref':
|
||||
assign = arg_name+'[i]?true:false'
|
||||
elif arg_type == 'refptr_vec_same_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Unwrap('+arg_name+'[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Wrap('+arg_name+'[i])'
|
||||
result += comment+\
|
||||
'\n std::vector<'+vec_type+' > '+arg_name+'List;'\
|
||||
'\n if ('+arg_name+'Count && *'+arg_name+'Count > 0 && '+arg_name+') {'\
|
||||
'\n for (size_t i = 0; i < *'+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'List.push_back('+assign+');'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
params.append(arg_name+'List')
|
||||
elif arg_type == 'simple_vec_byref_const' or arg_type == 'bool_vec_byref_const' or \
|
||||
arg_type == 'refptr_vec_same_byref_const' or arg_type == 'refptr_vec_diff_byref_const':
|
||||
vec_type = arg.get_type().get_vector_type()
|
||||
if arg_type == 'simple_vec_byref_const':
|
||||
assign = arg_name+'[i]'
|
||||
elif arg_type == 'bool_vec_byref_const':
|
||||
assign = arg_name+'[i]?true:false'
|
||||
elif arg_type == 'refptr_vec_same_byref_const':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Unwrap('+arg_name+'[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref_const':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Wrap('+arg_name+'[i])'
|
||||
result += comment+\
|
||||
'\n std::vector<'+vec_type+' > '+arg_name+'List;'\
|
||||
'\n if ('+arg_name+'Count > 0) {'\
|
||||
'\n for (size_t i = 0; i < '+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'List.push_back('+assign+');'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
params.append(arg_name+'List')
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# execution
|
||||
result += '\n // Execute\n '
|
||||
|
||||
if retval_type != 'none':
|
||||
# has a return value
|
||||
if retval_type == 'simple':
|
||||
result += retval.get_type().get_result_simple_type()
|
||||
else:
|
||||
result += retval.get_type().get_type()
|
||||
result += ' _retval = '
|
||||
|
||||
if isinstance(func.parent, obj_class):
|
||||
# virtual and static class methods
|
||||
if isinstance(func, obj_function_virtual):
|
||||
result += func.parent.get_name()+'CppToC::Get(self)->'
|
||||
else:
|
||||
result += func.parent.get_name()+'::'
|
||||
result += func.get_name()+'('
|
||||
|
||||
if len(params) > 0:
|
||||
result += '\n '+string.join(params,',\n ')
|
||||
|
||||
result += ');\n'
|
||||
|
||||
result_len = len(result)
|
||||
|
||||
# parameter restoration
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
comment = '\n // Restore param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n *'+arg_name+' = '+arg_name+'Val;'
|
||||
elif arg_type == 'bool_byref' or arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n *'+arg_name+' = '+arg_name+'Bool?true:false;'
|
||||
elif arg_type == 'struct_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n '+arg_name+'Obj.DetachTo(*'+arg_name+');'
|
||||
elif arg_type == 'refptr_same_byref' or arg_type == 'refptr_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
if arg_type == 'refptr_same_byref':
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+'Ptr)'
|
||||
else:
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+'Ptr)'
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+') {'\
|
||||
'\n if ('+arg_name+'Ptr.get()) {'\
|
||||
'\n if ('+arg_name+'Ptr.get() != '+arg_name+'Orig) {'\
|
||||
'\n *'+arg_name+' = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n } else {'\
|
||||
'\n *'+arg_name+' = NULL;'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_vec_byref':
|
||||
result += comment+\
|
||||
'\n cef_string_list_clear('+arg_name+');'\
|
||||
'\n transfer_string_list_contents('+arg_name+'List, '+arg_name+');'
|
||||
elif arg_type == 'string_map_single_byref':
|
||||
result += comment+\
|
||||
'\n cef_string_map_clear('+arg_name+');'\
|
||||
'\n transfer_string_map_contents('+arg_name+'Map, '+arg_name+');'
|
||||
elif arg_type == 'string_map_multi_byref':
|
||||
result += comment+\
|
||||
'\n cef_string_multimap_clear('+arg_name+');'\
|
||||
'\n transfer_string_multimap_contents('+arg_name+'Multimap, '+arg_name+');'
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
if arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref':
|
||||
assign = arg_name+'List[i]'
|
||||
elif arg_type == 'refptr_vec_same_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+'List[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+'List[i])'
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Count && '+arg_name+') {'\
|
||||
'\n *'+arg_name+'Count = std::min('+arg_name+'List.size(), *'+arg_name+'Count);'\
|
||||
'\n if (*'+arg_name+'Count > 0) {'\
|
||||
'\n for (size_t i = 0; i < *'+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'[i] = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# special handling for the global cef_shutdown function
|
||||
if name == 'cef_shutdown' and isinstance(func.parent, obj_header):
|
||||
classes = func.parent.get_classes()
|
||||
|
||||
names = []
|
||||
for cls in classes:
|
||||
if cls.has_attrib('no_debugct_check'):
|
||||
continue;
|
||||
|
||||
if cls.is_library_side():
|
||||
names.append(cls.get_name()+'CppToC')
|
||||
else:
|
||||
names.append(cls.get_name()+'CToCpp')
|
||||
|
||||
if len(names) > 0:
|
||||
names = sorted(names)
|
||||
result += '\n#ifndef NDEBUG'\
|
||||
'\n // Check that all wrapper objects have been destroyed'
|
||||
for name in names:
|
||||
result += '\n DCHECK_EQ('+name+'::DebugObjCt, 0);';
|
||||
result += '\n#endif // !NDEBUG'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# return translation
|
||||
if retval_type != 'none':
|
||||
# has a return value
|
||||
result += '\n // Return type: '+retval_type
|
||||
if retval_type == 'simple' or retval_type == 'bool':
|
||||
result += '\n return _retval;'
|
||||
elif retval_type == 'string':
|
||||
result += '\n return _retval.DetachToUserFree();'
|
||||
elif retval_type == 'refptr_same':
|
||||
refptr_class = retval.get_type().get_refptr_type()
|
||||
result += '\n return '+refptr_class+'CppToC::Wrap(_retval);'
|
||||
elif retval_type == 'refptr_diff':
|
||||
refptr_class = retval.get_type().get_refptr_type()
|
||||
result += '\n return '+refptr_class+'CToCpp::Unwrap(_retval);'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
|
||||
result += '}\n'
|
||||
return wrap_code(result)
|
||||
|
||||
def make_cpptoc_function_impl(funcs, existing, prefixname, defined_names):
|
||||
impl = ''
|
||||
|
||||
for func in funcs:
|
||||
if not prefixname is None:
|
||||
name = prefixname+'_'+func.get_capi_name()
|
||||
else:
|
||||
name = func.get_capi_name()
|
||||
value = get_next_function_impl(existing, name)
|
||||
if not value is None \
|
||||
and value['body'].find('// AUTO-GENERATED CONTENT') < 0:
|
||||
# an implementation exists that was not auto-generated
|
||||
impl += make_cpptoc_function_impl_existing(name, func, value, defined_names)
|
||||
else:
|
||||
impl += make_cpptoc_function_impl_new(name, func, defined_names)
|
||||
|
||||
return impl
|
||||
|
||||
def make_cpptoc_class_impl(header, clsname, impl):
|
||||
# structure names that have already been defined
|
||||
defined_names = header.get_defined_structs()
|
||||
|
||||
# retrieve the class and populate the defined names
|
||||
cls = header.get_class(clsname, defined_names)
|
||||
if cls is None:
|
||||
raise Exception('Class does not exist: '+clsname)
|
||||
|
||||
capiname = cls.get_capi_name()
|
||||
prefixname = get_capi_name(clsname[3:], False)
|
||||
|
||||
# retrieve the existing virtual function implementations
|
||||
existing = get_function_impls(impl, 'CEF_CALLBACK')
|
||||
|
||||
# generate virtual functions
|
||||
virtualimpl = make_cpptoc_function_impl(cls.get_virtual_funcs(), existing, prefixname, defined_names)
|
||||
if len(virtualimpl) > 0:
|
||||
virtualimpl = '\n// MEMBER FUNCTIONS - Body may be edited by hand.\n\n'+virtualimpl
|
||||
|
||||
# the current class is already defined for static functions
|
||||
defined_names.append(cls.get_capi_name())
|
||||
|
||||
# retrieve the existing static function implementations
|
||||
existing = get_function_impls(impl, 'CEF_EXPORT')
|
||||
|
||||
# generate static functions
|
||||
staticimpl = make_cpptoc_function_impl(cls.get_static_funcs(), existing, None, defined_names)
|
||||
if len(staticimpl) > 0:
|
||||
staticimpl = '\n// GLOBAL FUNCTIONS - Body may be edited by hand.\n\n'+staticimpl
|
||||
|
||||
resultingimpl = staticimpl + virtualimpl
|
||||
|
||||
# determine what includes are required by identifying what translation
|
||||
# classes are being used
|
||||
includes = format_translation_includes(resultingimpl)
|
||||
|
||||
# build the final output
|
||||
result = get_copyright()
|
||||
|
||||
result += includes+'\n'+resultingimpl+'\n'
|
||||
|
||||
const = '// CONSTRUCTOR - Do not edit by hand.\n\n'+ \
|
||||
clsname+'CppToC::'+clsname+'CppToC('+clsname+'* cls)\n'+ \
|
||||
' : CefCppToC<'+clsname+'CppToC, '+clsname+', '+capiname+'>(cls) '+ \
|
||||
'{\n';
|
||||
|
||||
funcs = cls.get_virtual_funcs()
|
||||
for func in funcs:
|
||||
name = func.get_capi_name()
|
||||
const += ' struct_.struct_.'+name+' = '+prefixname+'_'+name+';\n'
|
||||
|
||||
const += '}\n\n'+ \
|
||||
'#ifndef NDEBUG\n'+ \
|
||||
'template<> long CefCppToC<'+clsname+'CppToC, '+clsname+', '+capiname+'>::DebugObjCt = 0;\n'+ \
|
||||
'#endif\n'
|
||||
result += wrap_code(const)
|
||||
|
||||
return result
|
||||
|
||||
def make_cpptoc_global_impl(header, impl):
|
||||
# structure names that have already been defined
|
||||
defined_names = header.get_defined_structs()
|
||||
|
||||
# retrieve the existing global function implementations
|
||||
existing = get_function_impls(impl, 'CEF_EXPORT')
|
||||
|
||||
# generate global functions
|
||||
impl = make_cpptoc_function_impl(header.get_funcs(), existing, None, defined_names)
|
||||
if len(impl) > 0:
|
||||
impl = '\n// GLOBAL FUNCTIONS - Body may be edited by hand.\n\n'+impl
|
||||
|
||||
includes = ''
|
||||
|
||||
# include required headers for global functions
|
||||
filenames = []
|
||||
for func in header.get_funcs():
|
||||
filename = func.get_file_name()
|
||||
if not filename in filenames:
|
||||
includes += '#include "include/'+func.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+func.get_capi_file_name()+'"\n'
|
||||
filenames.append(filename)
|
||||
|
||||
# determine what includes are required by identifying what translation
|
||||
# classes are being used
|
||||
includes += format_translation_includes(impl)
|
||||
|
||||
# build the final output
|
||||
result = get_copyright()
|
||||
|
||||
result += includes+'\n'+impl
|
||||
|
||||
return result
|
||||
|
||||
def write_cpptoc_impl(header, clsname, dir, backup):
|
||||
if clsname is None:
|
||||
# global file
|
||||
file = dir
|
||||
else:
|
||||
# class file
|
||||
file = dir+os.sep+get_capi_name(clsname[3:], False)+'_cpptoc.cc'
|
||||
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
if clsname is None:
|
||||
newcontents = make_cpptoc_global_impl(header, oldcontents)
|
||||
else:
|
||||
newcontents = make_cpptoc_class_impl(header, clsname, oldcontents)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 4:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname> <existing_impl>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# read the existing implementation file into memory
|
||||
try:
|
||||
f = open(sys.argv[3], 'r')
|
||||
data = f.read()
|
||||
except IOError, (errno, strerror):
|
||||
raise Exception('Failed to read file '+sys.argv[3]+': '+strerror)
|
||||
else:
|
||||
f.close()
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_cpptoc_class_impl(header, sys.argv[2], data))
|
122
tools/make_ctocpp_header.py
Normal file
122
tools/make_ctocpp_header.py
Normal file
@@ -0,0 +1,122 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_ctocpp_header(header, clsname):
|
||||
cls = header.get_class(clsname)
|
||||
if cls is None:
|
||||
raise Exception('Class does not exist: '+clsname)
|
||||
|
||||
clientside = cls.is_client_side()
|
||||
defname = string.upper(get_capi_name(clsname[3:], False))
|
||||
capiname = cls.get_capi_name()
|
||||
|
||||
result = get_copyright()
|
||||
|
||||
result += '#ifndef CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n'+ \
|
||||
'#define CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n' + \
|
||||
'#pragma once\n'
|
||||
|
||||
if clientside:
|
||||
result += """
|
||||
#ifndef BUILDING_CEF_SHARED
|
||||
#pragma message("Warning: "__FILE__" may be accessed DLL-side only")
|
||||
#else // BUILDING_CEF_SHARED
|
||||
"""
|
||||
else:
|
||||
result += """
|
||||
#ifndef USING_CEF_SHARED
|
||||
#pragma message("Warning: "__FILE__" may be accessed wrapper-side only")
|
||||
#else // USING_CEF_SHARED
|
||||
"""
|
||||
|
||||
# build the function body
|
||||
func_body = ''
|
||||
funcs = cls.get_virtual_funcs()
|
||||
for func in funcs:
|
||||
func_body += ' virtual '+func.get_cpp_proto()+' OVERRIDE;\n'
|
||||
|
||||
# include standard headers
|
||||
if func_body.find('std::map') > 0 or func_body.find('std::multimap') > 0:
|
||||
result += '\n#include <map>'
|
||||
if func_body.find('std::vector') > 0:
|
||||
result += '\n#include <vector>'
|
||||
|
||||
# include the headers for this class
|
||||
result += '\n#include "include/'+cls.get_file_name()+'"'+ \
|
||||
'\n#include "include/capi/'+cls.get_capi_file_name()+'"\n'
|
||||
|
||||
# include headers for any forward declared classes that are not in the same file
|
||||
declares = cls.get_forward_declares()
|
||||
for declare in declares:
|
||||
dcls = header.get_class(declare)
|
||||
if dcls.get_file_name() != cls.get_file_name():
|
||||
result += '#include "include/'+dcls.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+dcls.get_capi_file_name()+'"\n'
|
||||
|
||||
result += """#include "libcef_dll/ctocpp/ctocpp.h"
|
||||
|
||||
// Wrap a C structure with a C++ class.
|
||||
"""
|
||||
|
||||
if clientside:
|
||||
result += '// This class may be instantiated and accessed DLL-side only.\n'
|
||||
else:
|
||||
result += '// This class may be instantiated and accessed wrapper-side only.\n'
|
||||
|
||||
result += 'class '+clsname+'CToCpp\n'+ \
|
||||
' : public CefCToCpp<'+clsname+'CToCpp, '+clsname+', '+capiname+'> {\n'+ \
|
||||
' public:\n'+ \
|
||||
' explicit '+clsname+'CToCpp('+capiname+'* str)\n'+ \
|
||||
' : CefCToCpp<'+clsname+'CToCpp, '+clsname+', '+capiname+'>(str) {}\n'+ \
|
||||
' virtual ~'+clsname+'CToCpp() {}\n\n'+ \
|
||||
' // '+clsname+' methods\n';
|
||||
|
||||
result += func_body
|
||||
result += '};\n\n'
|
||||
|
||||
if clientside:
|
||||
result += '#endif // BUILDING_CEF_SHARED\n'
|
||||
else:
|
||||
result += '#endif // USING_CEF_SHARED\n'
|
||||
|
||||
result += '#endif // CEF_LIBCEF_DLL_CTOCPP_'+defname+'_CTOCPP_H_\n'
|
||||
|
||||
return wrap_code(result)
|
||||
|
||||
|
||||
def write_ctocpp_header(header, clsname, dir, backup):
|
||||
file = dir+os.sep+get_capi_name(clsname[3:], False)+'_ctocpp.h'
|
||||
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
newcontents = make_ctocpp_header(header, clsname)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 3:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_ctocpp_header(header, sys.argv[2]))
|
576
tools/make_ctocpp_impl.py
Normal file
576
tools/make_ctocpp_impl.py
Normal file
@@ -0,0 +1,576 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_ctocpp_impl_proto(clsname, name, func, parts):
|
||||
const = ''
|
||||
|
||||
if clsname is None:
|
||||
proto = 'CEF_GLOBAL '+parts['retval']+' '
|
||||
else:
|
||||
proto = parts['retval']+' '+clsname
|
||||
if isinstance(func, obj_function_virtual):
|
||||
proto += 'CToCpp'
|
||||
if func.is_const():
|
||||
const = ' const'
|
||||
|
||||
proto += '::'
|
||||
|
||||
proto += name+'('+string.join(parts['args'], ', ')+')'+const
|
||||
return proto
|
||||
|
||||
def make_ctocpp_function_impl_existing(clsname, name, func, impl):
|
||||
notify(name+' has manual edits')
|
||||
|
||||
# retrieve the C++ prototype parts
|
||||
parts = func.get_cpp_parts(True)
|
||||
|
||||
changes = format_translation_changes(impl, parts)
|
||||
if len(changes) > 0:
|
||||
notify(name+' prototype changed')
|
||||
|
||||
return wrap_code(make_ctocpp_impl_proto(clsname, name, func, parts))+'{'+ \
|
||||
changes+impl['body']+'\n}\n'
|
||||
|
||||
def make_ctocpp_function_impl_new(clsname, name, func):
|
||||
# build the C++ prototype
|
||||
parts = func.get_cpp_parts(True)
|
||||
result = make_ctocpp_impl_proto(clsname, name, func, parts)+' {'
|
||||
|
||||
invalid = []
|
||||
|
||||
# retrieve the function arguments
|
||||
args = func.get_arguments()
|
||||
|
||||
# determine the argument types
|
||||
for arg in args:
|
||||
if arg.get_arg_type() == 'invalid':
|
||||
invalid.append(arg.get_name())
|
||||
|
||||
# retrieve the function return value
|
||||
retval = func.get_retval()
|
||||
retval_type = retval.get_retval_type()
|
||||
if retval_type == 'invalid':
|
||||
invalid.append('(return value)')
|
||||
retval_default = ''
|
||||
else:
|
||||
retval_default = retval.get_retval_default(False)
|
||||
if len(retval_default) > 0:
|
||||
retval_default = ' '+retval_default;
|
||||
|
||||
# add revision check
|
||||
if func.has_attrib('revision_check'):
|
||||
result += '\n int build_revision = cef_build_revision();'\
|
||||
'\n if (build_revision != CEF_REVISION) {'\
|
||||
'\n // The libcef build revision does not match the CEF API revision.'\
|
||||
'\n DCHECK(false);'\
|
||||
'\n return'+retval_default+';'\
|
||||
'\n }\n'
|
||||
|
||||
if isinstance(func, obj_function_virtual):
|
||||
# add the structure size check
|
||||
result += '\n if (CEF_MEMBER_MISSING(struct_, '+func.get_capi_name()+'))'
|
||||
result += '\n return'+retval_default+';\n'
|
||||
|
||||
if len(invalid) > 0:
|
||||
notify(name+' could not be autogenerated')
|
||||
# code could not be auto-generated
|
||||
result += '\n // BEGIN DELETE BEFORE MODIFYING'
|
||||
result += '\n // AUTO-GENERATED CONTENT'
|
||||
result += '\n // COULD NOT IMPLEMENT DUE TO: '+string.join(invalid, ', ')
|
||||
result += '\n #pragma message("Warning: "__FILE__": '+name+' is not implemented")'
|
||||
result += '\n // END DELETE BEFORE MODIFYING'
|
||||
result += '\n}\n\n'
|
||||
return wrap_code(result)
|
||||
|
||||
result += '\n // AUTO-GENERATED CONTENT - DELETE THIS COMMENT BEFORE MODIFYING\n'
|
||||
|
||||
result_len = len(result)
|
||||
|
||||
optional = []
|
||||
|
||||
# parameter verification
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
# skip optional params
|
||||
optional_params = arg.parent.get_attrib_list('optional_param')
|
||||
if not optional_params is None and arg_name in optional_params:
|
||||
optional.append(arg_name)
|
||||
continue
|
||||
|
||||
comment = '\n // Verify param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byaddr' or arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+');'\
|
||||
'\n if (!'+arg_name+')'\
|
||||
'\n return'+retval_default+';'
|
||||
elif arg_type == 'refptr_same' or arg_type == 'refptr_diff':
|
||||
result += comment+\
|
||||
'\n DCHECK('+arg_name+'.get());'\
|
||||
'\n if (!'+arg_name+'.get())'\
|
||||
'\n return'+retval_default+';'
|
||||
elif arg_type == 'string_byref_const':
|
||||
result += comment+\
|
||||
'\n DCHECK(!'+arg_name+'.empty());'\
|
||||
'\n if ('+arg_name+'.empty())'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
# check index params
|
||||
index_params = arg.parent.get_attrib_list('index_param')
|
||||
if not index_params is None and arg_name in index_params:
|
||||
result += comment+\
|
||||
'\n DCHECK_GE('+arg_name+', 0);'\
|
||||
'\n if ('+arg_name+' < 0)'\
|
||||
'\n return'+retval_default+';'
|
||||
|
||||
if len(optional) > 0:
|
||||
result += '\n // Unverified params: '+string.join(optional,', ')
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# parameter translation
|
||||
params = []
|
||||
if isinstance(func, obj_function_virtual):
|
||||
params.append('struct_')
|
||||
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
comment = '\n // Translate param: '+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'simple_byval' or arg_type == 'simple_byaddr' or \
|
||||
arg_type == 'bool_byval':
|
||||
params.append(arg_name)
|
||||
elif arg_type == 'simple_byref' or arg_type == 'simple_byref_const' or \
|
||||
arg_type == 'struct_byref_const' or arg_type == 'struct_byref':
|
||||
params.append('&'+arg_name)
|
||||
elif arg_type == 'bool_byref':
|
||||
result += comment+\
|
||||
'\n int '+arg_name+'Int = '+arg_name+';'
|
||||
params.append('&'+arg_name+'Int')
|
||||
elif arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n int '+arg_name+'Int = '+arg_name+'?*'+arg_name+':0;'
|
||||
params.append('&'+arg_name+'Int')
|
||||
elif arg_type == 'string_byref_const':
|
||||
params.append(arg_name+'.GetStruct()')
|
||||
elif arg_type == 'string_byref':
|
||||
params.append(arg_name+'.GetWritableStruct()')
|
||||
elif arg_type == 'refptr_same':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
params.append(refptr_class+'CToCpp::Unwrap('+arg_name+')')
|
||||
elif arg_type == 'refptr_diff':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
params.append(refptr_class+'CppToC::Wrap('+arg_name+')')
|
||||
elif arg_type == 'refptr_same_byref' or arg_type == 'refptr_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
refptr_struct = arg.get_type().get_result_refptr_type_root()
|
||||
if arg_type == 'refptr_same_byref':
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+')'
|
||||
else:
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+')'
|
||||
result += comment+\
|
||||
'\n '+refptr_struct+'* '+arg_name+'Struct = NULL;'\
|
||||
'\n if ('+arg_name+'.get())'\
|
||||
'\n '+arg_name+'Struct = '+assign+';'\
|
||||
'\n '+refptr_struct+'* '+arg_name+'Orig = '+arg_name+'Struct;'
|
||||
params.append('&'+arg_name+'Struct')
|
||||
elif arg_type == 'string_vec_byref' or arg_type == 'string_vec_byref_const':
|
||||
result += comment+\
|
||||
'\n cef_string_list_t '+arg_name+'List = cef_string_list_alloc();'\
|
||||
'\n DCHECK('+arg_name+'List);'\
|
||||
'\n if ('+arg_name+'List)'\
|
||||
'\n transfer_string_list_contents('+arg_name+', '+arg_name+'List);'
|
||||
params.append(arg_name+'List')
|
||||
elif arg_type == 'string_map_single_byref' or arg_type == 'string_map_single_byref_const':
|
||||
result += comment+\
|
||||
'\n cef_string_map_t '+arg_name+'Map = cef_string_map_alloc();'\
|
||||
'\n DCHECK('+arg_name+'Map);'\
|
||||
'\n if ('+arg_name+'Map)'\
|
||||
'\n transfer_string_map_contents('+arg_name+', '+arg_name+'Map);'
|
||||
params.append(arg_name+'Map')
|
||||
elif arg_type == 'string_map_multi_byref' or arg_type == 'string_map_multi_byref_const':
|
||||
result += comment+\
|
||||
'\n cef_string_multimap_t '+arg_name+'Multimap = cef_string_multimap_alloc();'\
|
||||
'\n DCHECK('+arg_name+'Multimap);'\
|
||||
'\n if ('+arg_name+'Multimap)'\
|
||||
'\n transfer_string_multimap_contents('+arg_name+', '+arg_name+'Multimap);'
|
||||
params.append(arg_name+'Multimap')
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
count_func = arg.get_attrib_count_func()
|
||||
vec_type = arg.get_type().get_result_vector_type_root()
|
||||
if arg_type == 'refptr_vec_same_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+'[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+'[i])'
|
||||
else:
|
||||
assign = arg_name+'[i]'
|
||||
result += comment+\
|
||||
'\n size_t '+arg_name+'Size = '+arg_name+'.size();'\
|
||||
'\n size_t '+arg_name+'Count = std::max('+count_func+'(), '+arg_name+'Size);'\
|
||||
'\n '+vec_type+'* '+arg_name+'List = NULL;'\
|
||||
'\n if ('+arg_name+'Count > 0) {'\
|
||||
'\n '+arg_name+'List = new '+vec_type+'['+arg_name+'Count];'\
|
||||
'\n DCHECK('+arg_name+'List);'\
|
||||
'\n if ('+arg_name+'List) {'\
|
||||
'\n memset('+arg_name+'List, 0, sizeof('+vec_type+')*'+arg_name+'Count);'\
|
||||
'\n }'\
|
||||
'\n if ('+arg_name+'List && '+arg_name+'Size > 0) {'\
|
||||
'\n for (size_t i = 0; i < '+arg_name+'Size; ++i) {'\
|
||||
'\n '+arg_name+'List[i] = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
params.append('&'+arg_name+'Count')
|
||||
params.append(arg_name+'List')
|
||||
elif arg_type == 'simple_vec_byref_const' or arg_type == 'bool_vec_byref_const' or \
|
||||
arg_type == 'refptr_vec_same_byref_const' or arg_type == 'refptr_vec_diff_byref_const':
|
||||
count_func = arg.get_attrib_count_func()
|
||||
vec_type = arg.get_type().get_result_vector_type_root()
|
||||
if arg_type == 'refptr_vec_same_byref_const':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Unwrap('+arg_name+'[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref_const':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Wrap('+arg_name+'[i])'
|
||||
else:
|
||||
assign = arg_name+'[i]'
|
||||
result += comment+\
|
||||
'\n const size_t '+arg_name+'Count = '+arg_name+'.size();'\
|
||||
'\n '+vec_type+'* '+arg_name+'List = NULL;'\
|
||||
'\n if ('+arg_name+'Count > 0) {'\
|
||||
'\n '+arg_name+'List = new '+vec_type+'['+arg_name+'Count];'\
|
||||
'\n DCHECK('+arg_name+'List);'\
|
||||
'\n if ('+arg_name+'List) {'\
|
||||
'\n for (size_t i = 0; i < '+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'List[i] = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n }'\
|
||||
'\n }'
|
||||
params.append(arg_name+'Count')
|
||||
params.append(arg_name+'List')
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# execution
|
||||
result += '\n // Execute\n '
|
||||
|
||||
if retval_type != 'none':
|
||||
# has a return value
|
||||
if retval_type == 'simple' or retval_type == 'bool':
|
||||
result += retval.get_type().get_result_simple_type_root()
|
||||
elif retval_type == 'string':
|
||||
result += 'cef_string_userfree_t'
|
||||
elif retval_type == 'refptr_same' or retval_type == 'refptr_diff':
|
||||
refptr_struct = retval.get_type().get_result_refptr_type_root()
|
||||
result += refptr_struct+'*'
|
||||
|
||||
result += ' _retval = '
|
||||
|
||||
if isinstance(func, obj_function_virtual):
|
||||
result += 'struct_->'
|
||||
result += func.get_capi_name()+'('
|
||||
|
||||
if len(params) > 0:
|
||||
if not isinstance(func, obj_function_virtual):
|
||||
result += '\n '
|
||||
result += string.join(params,',\n ')
|
||||
|
||||
result += ');\n'
|
||||
|
||||
result_len = len(result)
|
||||
|
||||
# parameter restoration
|
||||
for arg in args:
|
||||
arg_type = arg.get_arg_type()
|
||||
arg_name = arg.get_type().get_name()
|
||||
|
||||
comment = '\n // Restore param:'+arg_name+'; type: '+arg_type
|
||||
|
||||
if arg_type == 'bool_byref':
|
||||
result += comment+\
|
||||
'\n '+arg_name+' = '+arg_name+'Int?true:false;'
|
||||
elif arg_type == 'bool_byaddr':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+')'\
|
||||
'\n *'+arg_name+' = '+arg_name+'Int?true:false;'
|
||||
elif arg_type == 'refptr_same_byref' or arg_type == 'refptr_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
refptr_struct = arg.get_type().get_result_refptr_type_root()
|
||||
if arg_type == 'refptr_same_byref':
|
||||
assign = refptr_class+'CToCpp::Wrap('+arg_name+'Struct)'
|
||||
else:
|
||||
assign = refptr_class+'CppToC::Unwrap('+arg_name+'Struct)'
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Struct) {'\
|
||||
'\n if ('+arg_name+'Struct != '+arg_name+'Orig) {'\
|
||||
'\n '+arg_name+' = '+assign+';'\
|
||||
'\n }'\
|
||||
'\n } else {'\
|
||||
'\n '+arg_name+' = NULL;'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_vec_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'List) {'\
|
||||
'\n '+arg_name+'.clear();'\
|
||||
'\n transfer_string_list_contents('+arg_name+'List, '+arg_name+');'\
|
||||
'\n cef_string_list_free('+arg_name+'List);'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_vec_byref_const':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'List)'\
|
||||
'\n cef_string_list_free('+arg_name+'List);'
|
||||
elif arg_type == 'string_map_single_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Map) {'\
|
||||
'\n '+arg_name+'.clear();'\
|
||||
'\n transfer_string_map_contents('+arg_name+'Map, '+arg_name+');'\
|
||||
'\n cef_string_map_free('+arg_name+'Map);'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_map_single_byref_const':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Map)'\
|
||||
'\n cef_string_map_free('+arg_name+'Map);'
|
||||
elif arg_type == 'string_map_multi_byref':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Multimap) {'\
|
||||
'\n '+arg_name+'.clear();'\
|
||||
'\n transfer_string_multimap_contents('+arg_name+'Multimap, '+arg_name+');'\
|
||||
'\n cef_string_multimap_free('+arg_name+'Multimap);'\
|
||||
'\n }'
|
||||
elif arg_type == 'string_map_multi_byref_const':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'Multimap)'\
|
||||
'\n cef_string_multimap_free('+arg_name+'Multimap);'
|
||||
elif arg_type == 'simple_vec_byref' or arg_type == 'bool_vec_byref' or \
|
||||
arg_type == 'refptr_vec_same_byref' or arg_type == 'refptr_vec_diff_byref':
|
||||
count_func = arg.get_attrib_count_func()
|
||||
vec_type = arg.get_type().get_result_vector_type_root()
|
||||
if arg_type == 'refptr_vec_same_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CToCpp::Wrap('+arg_name+'List[i])'
|
||||
elif arg_type == 'refptr_vec_diff_byref':
|
||||
refptr_class = arg.get_type().get_refptr_type()
|
||||
assign = refptr_class+'CppToC::Unwrap('+arg_name+'List[i])'
|
||||
elif arg_type == 'bool_vec_byref':
|
||||
assign = arg_name+'List[i]?true:false'
|
||||
else:
|
||||
assign = arg_name+'List[i]'
|
||||
result += comment+\
|
||||
'\n '+arg_name+'.clear();'\
|
||||
'\n if ('+arg_name+'Count > 0 && '+arg_name+'List) {'\
|
||||
'\n for (size_t i = 0; i < '+arg_name+'Count; ++i) {'\
|
||||
'\n '+arg_name+'.push_back('+assign+');'\
|
||||
'\n }'\
|
||||
'\n delete [] '+arg_name+'List;'\
|
||||
'\n }'
|
||||
elif arg_type == 'simple_vec_byref_const' or arg_type == 'bool_vec_byref_const' or \
|
||||
arg_type == 'refptr_vec_same_byref_const' or arg_type == 'refptr_vec_diff_byref_const':
|
||||
result += comment+\
|
||||
'\n if ('+arg_name+'List)'\
|
||||
'\n delete [] '+arg_name+'List;'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# special handling for the global CefShutdown function
|
||||
if name == 'CefShutdown' and isinstance(func.parent, obj_header):
|
||||
classes = func.parent.get_classes()
|
||||
|
||||
names = []
|
||||
for cls in classes:
|
||||
if cls.has_attrib('no_debugct_check'):
|
||||
continue;
|
||||
|
||||
if cls.is_library_side():
|
||||
names.append(cls.get_name()+'CToCpp')
|
||||
else:
|
||||
names.append(cls.get_name()+'CppToC')
|
||||
|
||||
if len(names) > 0:
|
||||
names = sorted(names)
|
||||
result += '\n#ifndef NDEBUG'\
|
||||
'\n // Check that all wrapper objects have been destroyed'
|
||||
for name in names:
|
||||
result += '\n DCHECK_EQ('+name+'::DebugObjCt, 0);';
|
||||
result += '\n#endif // !NDEBUG'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
result_len = len(result)
|
||||
|
||||
# return translation
|
||||
if retval_type != 'none':
|
||||
# has a return value
|
||||
result += '\n // Return type: '+retval_type
|
||||
if retval_type == 'simple':
|
||||
result += '\n return _retval;'
|
||||
elif retval_type == 'bool':
|
||||
result += '\n return _retval?true:false;'
|
||||
elif retval_type == 'string':
|
||||
result += '\n CefString _retvalStr;'\
|
||||
'\n _retvalStr.AttachToUserFree(_retval);'\
|
||||
'\n return _retvalStr;'
|
||||
elif retval_type == 'refptr_same':
|
||||
refptr_class = retval.get_type().get_refptr_type()
|
||||
result += '\n return '+refptr_class+'CToCpp::Wrap(_retval);'
|
||||
elif retval_type == 'refptr_diff':
|
||||
refptr_class = retval.get_type().get_refptr_type()
|
||||
result += '\n return '+refptr_class+'CppToC::Unwrap(_retval);'
|
||||
|
||||
if len(result) != result_len:
|
||||
result += '\n'
|
||||
|
||||
result += '}\n'
|
||||
return wrap_code(result)
|
||||
|
||||
def make_ctocpp_function_impl(clsname, funcs, existing):
|
||||
impl = ''
|
||||
|
||||
for func in funcs:
|
||||
name = func.get_name()
|
||||
value = get_next_function_impl(existing, name)
|
||||
if not value is None \
|
||||
and value['body'].find('// AUTO-GENERATED CONTENT') < 0:
|
||||
# an implementation exists that was not auto-generated
|
||||
impl += make_ctocpp_function_impl_existing(clsname, name, func, value)
|
||||
else:
|
||||
impl += make_ctocpp_function_impl_new(clsname, name, func)
|
||||
|
||||
return impl
|
||||
|
||||
def make_ctocpp_class_impl(header, clsname, impl):
|
||||
cls = header.get_class(clsname)
|
||||
if cls is None:
|
||||
raise Exception('Class does not exist: '+clsname)
|
||||
|
||||
capiname = cls.get_capi_name()
|
||||
|
||||
# retrieve the existing virtual function implementations
|
||||
existing = get_function_impls(impl, clsname+'CToCpp::')
|
||||
|
||||
# generate virtual functions
|
||||
virtualimpl = make_ctocpp_function_impl(clsname, cls.get_virtual_funcs(), existing)
|
||||
if len(virtualimpl) > 0:
|
||||
virtualimpl = '\n// VIRTUAL METHODS - Body may be edited by hand.\n\n'+virtualimpl
|
||||
|
||||
# retrieve the existing static function implementations
|
||||
existing = get_function_impls(impl, clsname+'::')
|
||||
|
||||
# generate static functions
|
||||
staticimpl = make_ctocpp_function_impl(clsname, cls.get_static_funcs(), existing)
|
||||
if len(staticimpl) > 0:
|
||||
staticimpl = '\n// STATIC METHODS - Body may be edited by hand.\n\n'+staticimpl
|
||||
|
||||
resultingimpl = staticimpl + virtualimpl
|
||||
|
||||
# determine what includes are required by identifying what translation
|
||||
# classes are being used
|
||||
includes = format_translation_includes(resultingimpl)
|
||||
|
||||
# build the final output
|
||||
result = get_copyright()
|
||||
|
||||
result += includes+'\n'+resultingimpl+'\n'
|
||||
|
||||
result += wrap_code('#ifndef NDEBUG\n'+ \
|
||||
'template<> long CefCToCpp<'+clsname+'CToCpp, '+clsname+', '+capiname+'>::DebugObjCt = 0;\n'+ \
|
||||
'#endif\n')
|
||||
|
||||
return result
|
||||
|
||||
def make_ctocpp_global_impl(header, impl):
|
||||
# retrieve the existing global function implementations
|
||||
existing = get_function_impls(impl, 'CEF_GLOBAL')
|
||||
|
||||
# generate static functions
|
||||
impl = make_ctocpp_function_impl(None, header.get_funcs(), existing)
|
||||
if len(impl) > 0:
|
||||
impl = '\n// GLOBAL METHODS - Body may be edited by hand.\n\n'+impl
|
||||
|
||||
includes = ''
|
||||
|
||||
# include required headers for global functions
|
||||
filenames = []
|
||||
for func in header.get_funcs():
|
||||
filename = func.get_file_name()
|
||||
if not filename in filenames:
|
||||
includes += '#include "include/'+func.get_file_name()+'"\n' \
|
||||
'#include "include/capi/'+func.get_capi_file_name()+'"\n'
|
||||
filenames.append(filename)
|
||||
|
||||
# determine what includes are required by identifying what translation
|
||||
# classes are being used
|
||||
includes += format_translation_includes(impl)
|
||||
|
||||
# build the final output
|
||||
result = get_copyright()
|
||||
|
||||
result += includes+'\n// Define used to facilitate parsing.\n#define CEF_GLOBAL\n\n'+impl
|
||||
|
||||
return result
|
||||
|
||||
def write_ctocpp_impl(header, clsname, dir, backup):
|
||||
if clsname is None:
|
||||
# global file
|
||||
file = dir
|
||||
else:
|
||||
# class file
|
||||
file = dir+os.sep+get_capi_name(clsname[3:], False)+'_ctocpp.cc'
|
||||
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
if clsname is None:
|
||||
newcontents = make_ctocpp_global_impl(header, oldcontents)
|
||||
else:
|
||||
newcontents = make_ctocpp_class_impl(header, clsname, oldcontents)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 4:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile> <classname> <existing_impl>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# read the existing implementation file into memory
|
||||
try:
|
||||
f = open(sys.argv[3], 'r')
|
||||
data = f.read()
|
||||
except IOError, (errno, strerror):
|
||||
raise Exception('Failed to read file '+sys.argv[3]+': '+strerror)
|
||||
else:
|
||||
f.close()
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_ctocpp_class_impl(header, sys.argv[2], data))
|
2
tools/make_distrib.bat
Normal file
2
tools/make_distrib.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
..\..\third_party\python_26\python.exe make_distrib.py --output-dir ..\binary_distrib\ %*
|
468
tools/make_distrib.py
Normal file
468
tools/make_distrib.py
Normal file
@@ -0,0 +1,468 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from date_util import *
|
||||
from file_util import *
|
||||
from gclient_util import *
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import subprocess
|
||||
from svn_util import *
|
||||
import sys
|
||||
import zipfile
|
||||
|
||||
def create_archive(input_dir, zip_file):
|
||||
""" Creates a zip archive of the specified input directory. """
|
||||
zf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED)
|
||||
def addDir(dir):
|
||||
for f in os.listdir(dir):
|
||||
full_path = os.path.join(dir, f)
|
||||
if os.path.isdir(full_path):
|
||||
addDir(full_path)
|
||||
else:
|
||||
zf.write(full_path, os.path.relpath(full_path, \
|
||||
os.path.join(input_dir, os.pardir)))
|
||||
addDir(input_dir)
|
||||
zf.close()
|
||||
|
||||
def create_readme(src, output_dir, cef_url, cef_rev, cef_ver, chromium_url, \
|
||||
chromium_rev, chromium_ver, date):
|
||||
""" Creates the README.TXT file. """
|
||||
data = read_file(src)
|
||||
data = data.replace('$CEF_URL$', cef_url)
|
||||
data = data.replace('$CEF_REV$', cef_rev)
|
||||
data = data.replace('$CEF_VER$', cef_ver)
|
||||
data = data.replace('$CHROMIUM_URL$', chromium_url)
|
||||
data = data.replace('$CHROMIUM_REV$', chromium_rev)
|
||||
data = data.replace('$CHROMIUM_VER$', chromium_ver)
|
||||
data = data.replace('$DATE$', date)
|
||||
write_file(os.path.join(output_dir, 'README.txt'), data)
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Creating README.TXT file.\n')
|
||||
|
||||
def eval_file(src):
|
||||
""" Loads and evaluates the contents of the specified file. """
|
||||
return eval(read_file(src), {'__builtins__': None}, None)
|
||||
|
||||
def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet):
|
||||
""" Transfer files from one location to another. """
|
||||
for path in gypi_paths:
|
||||
# skip gyp includes
|
||||
if path[:2] == '<@':
|
||||
continue
|
||||
src = os.path.join(src_dir, path)
|
||||
dst = os.path.join(dst_dir, path.replace(gypi_path_prefix, ''))
|
||||
dst_path = os.path.dirname(dst)
|
||||
make_dir(dst_path, quiet)
|
||||
copy_file(src, dst, quiet)
|
||||
|
||||
def normalize_headers(file, new_path = ''):
|
||||
""" Normalize headers post-processing. Remove the path component from any
|
||||
project include directives. """
|
||||
data = read_file(file)
|
||||
data = re.sub(r'''#include \"(?!include\/)[a-zA-Z0-9_\/]+\/+([a-zA-Z0-9_\.]+)\"''', \
|
||||
"// Include path modified for CEF Binary Distribution.\n#include \""+new_path+"\\1\"", data)
|
||||
write_file(file, data)
|
||||
|
||||
def transfer_files(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
|
||||
""" Transfer files based on the specified configuration. """
|
||||
if not path_exists(transfer_cfg):
|
||||
return
|
||||
|
||||
configs = eval_file(transfer_cfg)
|
||||
for cfg in configs:
|
||||
dst = os.path.join(output_dir, cfg['target'])
|
||||
|
||||
# perform a copy if source is specified
|
||||
if not cfg['source'] is None:
|
||||
src = os.path.join(cef_dir, cfg['source'])
|
||||
dst_path = os.path.dirname(dst)
|
||||
make_dir(dst_path, quiet)
|
||||
copy_file(src, dst, quiet)
|
||||
|
||||
# place a readme file in the destination directory
|
||||
readme = os.path.join(dst_path, 'README-TRANSFER.txt')
|
||||
if not path_exists(readme):
|
||||
copy_file(os.path.join(script_dir, 'distrib/README-TRANSFER.txt'), readme)
|
||||
open(readme, 'ab').write(cfg['source']+"\n")
|
||||
|
||||
# perform any required post-processing
|
||||
if 'post-process' in cfg:
|
||||
post = cfg['post-process']
|
||||
if post == 'normalize_headers':
|
||||
new_path = ''
|
||||
if cfg.has_key('new_header_path'):
|
||||
new_path = cfg['new_header_path']
|
||||
normalize_headers(dst, new_path)
|
||||
|
||||
def generate_msvs_projects(version):
|
||||
""" Generate MSVS projects for the specified version. """
|
||||
sys.stdout.write('Generating '+version+' project files...')
|
||||
os.environ['GYP_MSVS_VERSION'] = version
|
||||
gyper = [ 'python', 'tools/gyp_cef', os.path.relpath(os.path.join(output_dir, 'cefclient.gyp'), cef_dir) ]
|
||||
RunAction(cef_dir, gyper);
|
||||
move_file(os.path.relpath(os.path.join(output_dir, 'cefclient.sln')), \
|
||||
os.path.relpath(os.path.join(output_dir, 'cefclient'+version+'.sln')))
|
||||
|
||||
def fix_msvs_projects():
|
||||
""" Fix the output directory path in all .vcproj and .vcxproj files. """
|
||||
files = []
|
||||
for file in get_files(os.path.join(output_dir, '*.vcproj')):
|
||||
files.append(file)
|
||||
for file in get_files(os.path.join(output_dir, '*.vcxproj')):
|
||||
files.append(file)
|
||||
for file in files:
|
||||
data = read_file(file)
|
||||
data = data.replace('../../..\\build\\', '')
|
||||
write_file(file, data)
|
||||
|
||||
def run(command_line, working_dir):
|
||||
""" Run a command. """
|
||||
sys.stdout.write('-------- Running "'+command_line+'" in "'+\
|
||||
working_dir+'"...'+"\n")
|
||||
args = shlex.split(command_line.replace('\\', '\\\\'))
|
||||
return subprocess.check_call(args, cwd=working_dir, env=os.environ,
|
||||
shell=(sys.platform == 'win32'))
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility builds the CEF Binary Distribution.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--output-dir', dest='outputdir', metavar='DIR',
|
||||
help='output directory [required]')
|
||||
parser.add_option('--allow-partial',
|
||||
action='store_true', dest='allowpartial', default=False,
|
||||
help='allow creation of partial distributions')
|
||||
parser.add_option('--no-symbols',
|
||||
action='store_true', dest='nosymbols', default=False,
|
||||
help='do not create symbol files')
|
||||
parser.add_option('-q', '--quiet',
|
||||
action='store_true', dest='quiet', default=False,
|
||||
help='do not output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the outputdir option is required
|
||||
if options.outputdir is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
# script directory
|
||||
script_dir = os.path.dirname(__file__)
|
||||
|
||||
# CEF root directory
|
||||
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
|
||||
|
||||
# src directory
|
||||
src_dir = os.path.abspath(os.path.join(cef_dir, os.pardir))
|
||||
|
||||
# retrieve url, revision and date information
|
||||
cef_info = get_svn_info(cef_dir)
|
||||
cef_url = cef_info['url']
|
||||
cef_rev = cef_info['revision']
|
||||
chromium_info = get_svn_info(os.path.join(cef_dir, os.pardir))
|
||||
chromium_url = chromium_info['url']
|
||||
chromium_rev = chromium_info['revision']
|
||||
date = get_date()
|
||||
|
||||
# Read and parse the version file (key=value pairs, one per line)
|
||||
chrome = {}
|
||||
lines = read_file(os.path.join(cef_dir, '../chrome/VERSION')).split("\n")
|
||||
for line in lines:
|
||||
parts = line.split('=', 1)
|
||||
if len(parts) == 2:
|
||||
chrome[parts[0]] = parts[1]
|
||||
|
||||
cef_ver = '3.'+chrome['BUILD']+'.'+cef_rev
|
||||
chromium_ver = chrome['MAJOR']+'.'+chrome['MINOR']+'.'+chrome['BUILD']+'.'+chrome['PATCH']
|
||||
|
||||
# Test the operating system.
|
||||
platform = '';
|
||||
if sys.platform == 'win32':
|
||||
platform = 'windows'
|
||||
elif sys.platform == 'darwin':
|
||||
platform = 'macosx'
|
||||
elif sys.platform.startswith('linux'):
|
||||
platform = 'linux'
|
||||
|
||||
# output directory
|
||||
output_dir = os.path.abspath(os.path.join(options.outputdir, \
|
||||
'cef_binary_'+cef_ver+'_'+platform))
|
||||
remove_dir(output_dir, options.quiet)
|
||||
make_dir(output_dir, options.quiet)
|
||||
|
||||
if not options.nosymbols:
|
||||
# symbol directory
|
||||
symbol_dir = os.path.abspath(os.path.join(options.outputdir, \
|
||||
'cef_binary_'+cef_ver+'_'+platform+'_symbols'))
|
||||
remove_dir(symbol_dir, options.quiet)
|
||||
make_dir(symbol_dir, options.quiet)
|
||||
|
||||
# transfer the LICENSE.txt file
|
||||
copy_file(os.path.join(cef_dir, 'LICENSE.txt'), output_dir, options.quiet)
|
||||
|
||||
# read the variables list from the autogenerated cef_paths.gypi file
|
||||
cef_paths = eval_file(os.path.join(cef_dir, 'cef_paths.gypi'))
|
||||
cef_paths = cef_paths['variables']
|
||||
|
||||
# read the variables list from the manually edited cef_paths2.gypi file
|
||||
cef_paths2 = eval_file(os.path.join(cef_dir, 'cef_paths2.gypi'))
|
||||
cef_paths2 = cef_paths2['variables']
|
||||
|
||||
# create the include directory
|
||||
include_dir = os.path.join(output_dir, 'include')
|
||||
make_dir(include_dir, options.quiet)
|
||||
|
||||
# create the cefclient directory
|
||||
cefclient_dir = os.path.join(output_dir, 'cefclient')
|
||||
make_dir(cefclient_dir, options.quiet)
|
||||
|
||||
# create the libcef_dll_wrapper directory
|
||||
wrapper_dir = os.path.join(output_dir, 'libcef_dll')
|
||||
make_dir(wrapper_dir, options.quiet)
|
||||
|
||||
# transfer common include files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_common'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_capi'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_wrapper'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths['autogen_cpp_includes'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths['autogen_capi_includes'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
|
||||
# transfer common cefclient files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_common'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
|
||||
# transfer common libcef_dll_wrapper files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['libcef_dll_wrapper_sources_common'], \
|
||||
'libcef_dll/', wrapper_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths['autogen_client_side'], \
|
||||
'libcef_dll/', wrapper_dir, options.quiet)
|
||||
|
||||
# transfer gyp files
|
||||
copy_file(os.path.join(script_dir, 'distrib/cefclient.gyp'), output_dir, options.quiet)
|
||||
paths_gypi = os.path.join(cef_dir, 'cef_paths2.gypi')
|
||||
data = read_file(paths_gypi)
|
||||
data = data.replace('tests/cefclient/', 'cefclient/')
|
||||
write_file(os.path.join(output_dir, 'cef_paths2.gypi'), data)
|
||||
copy_file(os.path.join(cef_dir, 'cef_paths.gypi'), \
|
||||
os.path.join(output_dir, 'cef_paths.gypi'), options.quiet)
|
||||
|
||||
# transfer additional files
|
||||
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib/transfer.cfg'), \
|
||||
output_dir, options.quiet)
|
||||
|
||||
if platform == 'windows':
|
||||
# create the README.TXT file
|
||||
create_readme(os.path.join(script_dir, 'distrib/win/README.txt'), output_dir, cef_url, \
|
||||
cef_rev, cef_ver, chromium_url, chromium_rev, chromium_ver, date)
|
||||
|
||||
# transfer include files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_win'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
|
||||
# transfer cefclient files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_win'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
|
||||
# transfer build/Debug files
|
||||
if not options.allowpartial or path_exists(os.path.join(cef_dir, 'Debug')):
|
||||
binary_dir = os.path.join(src_dir, 'build/Debug');
|
||||
|
||||
dst_dir = os.path.join(output_dir, 'Debug')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_files(os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
|
||||
copy_files(os.path.join(binary_dir, '*.dll'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(binary_dir, 'cefclient.exe'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(binary_dir, 'cef.pak'), dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(binary_dir, 'locales'), os.path.join(dst_dir, 'locales'), \
|
||||
options.quiet)
|
||||
|
||||
# transfer lib/Debug files
|
||||
dst_dir = os.path.join(output_dir, 'lib/Debug')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_file(os.path.join(binary_dir, 'lib/libcef.lib'), dst_dir, options.quiet)
|
||||
else:
|
||||
sys.stderr.write("No Debug build files.\n")
|
||||
|
||||
# transfer build/Release files
|
||||
if not options.allowpartial or path_exists(os.path.join(cef_dir, 'Release')):
|
||||
binary_dir = os.path.join(src_dir, 'build/Release');
|
||||
|
||||
dst_dir = os.path.join(output_dir, 'Release')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_files(os.path.join(script_dir, 'distrib/win/*.dll'), dst_dir, options.quiet)
|
||||
copy_files(os.path.join(binary_dir, '*.dll'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(binary_dir, 'cefclient.exe'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(binary_dir, 'cef.pak'), dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(binary_dir, 'locales'), os.path.join(dst_dir, 'locales'), \
|
||||
options.quiet)
|
||||
|
||||
# transfer lib/Release files
|
||||
dst_dir = os.path.join(output_dir, 'lib/Release')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_file(os.path.join(binary_dir, 'lib/libcef.lib'), dst_dir, options.quiet)
|
||||
|
||||
if not options.nosymbols:
|
||||
# transfer symbols
|
||||
copy_file(os.path.join(binary_dir, 'libcef.pdb'), symbol_dir, options.quiet)
|
||||
else:
|
||||
sys.stderr.write("No Release build files.\n")
|
||||
|
||||
# generate doc files
|
||||
os.popen('make_cppdocs.bat '+cef_rev)
|
||||
|
||||
# transfer docs files
|
||||
dst_dir = os.path.join(output_dir, 'docs')
|
||||
src_dir = os.path.join(cef_dir, 'docs')
|
||||
if path_exists(src_dir):
|
||||
copy_dir(src_dir, dst_dir, options.quiet)
|
||||
|
||||
# transfer additional files, if any
|
||||
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib/win/transfer.cfg'), \
|
||||
output_dir, options.quiet)
|
||||
|
||||
# generate the project files
|
||||
generate_msvs_projects('2005');
|
||||
generate_msvs_projects('2008');
|
||||
generate_msvs_projects('2010');
|
||||
fix_msvs_projects();
|
||||
|
||||
elif platform == 'macosx':
|
||||
# create the README.TXT file
|
||||
create_readme(os.path.join(script_dir, 'distrib/mac/README.txt'), output_dir, cef_url, \
|
||||
cef_rev, cef_ver, chromium_url, chromium_rev, chromium_ver, date)
|
||||
|
||||
# transfer include files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_mac'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
|
||||
# transfer cefclient files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_mac'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_mac_helper'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
|
||||
# transfer cefclient/mac files
|
||||
copy_dir(os.path.join(cef_dir, 'tests/cefclient/mac/'), os.path.join(output_dir, 'cefclient/mac/'), \
|
||||
options.quiet)
|
||||
|
||||
# transfer xcodebuild/Debug files
|
||||
build_dir = os.path.join(src_dir, 'xcodebuild/Debug')
|
||||
if not options.allowpartial or path_exists(build_dir):
|
||||
dst_dir = os.path.join(output_dir, 'Debug')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'ffmpegsumo.so'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'libcef.dylib'), dst_dir, options.quiet)
|
||||
else:
|
||||
build_dir = None
|
||||
|
||||
# transfer xcodebuild/Release files
|
||||
build_dir = os.path.join(src_dir, 'xcodebuild/Release')
|
||||
if not options.allowpartial or path_exists(build_dir):
|
||||
dst_dir = os.path.join(output_dir, 'Release')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'ffmpegsumo.so'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(build_dir, 'libcef.dylib'), dst_dir, options.quiet)
|
||||
|
||||
if not options.nosymbols:
|
||||
# create the real dSYM file from the "fake" dSYM file
|
||||
sys.stdout.write("Creating the real dSYM file...\n")
|
||||
src_path = os.path.join(build_dir, 'libcef.dylib.dSYM/Contents/Resources/DWARF/libcef.dylib')
|
||||
dst_path = os.path.join(symbol_dir, 'libcef.dylib.dSYM')
|
||||
run('dsymutil '+src_path+' -o '+dst_path, cef_dir)
|
||||
else:
|
||||
build_dir = None
|
||||
|
||||
if not build_dir is None:
|
||||
# transfer resource files
|
||||
dst_dir = os.path.join(output_dir, 'Resources')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_files(os.path.join(build_dir, 'cefclient.app/Contents/Frameworks/Chromium Embedded Framework.framework/Resources/*.*'), \
|
||||
dst_dir, options.quiet)
|
||||
|
||||
# transfer additional files, if any
|
||||
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib/mac/transfer.cfg'), \
|
||||
output_dir, options.quiet)
|
||||
|
||||
# Generate Xcode project files
|
||||
sys.stdout.write('Generating Xcode project files...')
|
||||
gyper = [ 'python', 'tools/gyp_cef', os.path.relpath(os.path.join(output_dir, 'cefclient.gyp'), cef_dir) ]
|
||||
RunAction(cef_dir, gyper);
|
||||
|
||||
# Post-process the Xcode project to fix file paths
|
||||
src_file = os.path.join(output_dir, 'cefclient.xcodeproj/project.pbxproj')
|
||||
data = read_file(src_file)
|
||||
data = data.replace('../../../build/mac/', 'tools/')
|
||||
data = data.replace('../../../build', 'build')
|
||||
data = data.replace('../../../xcodebuild', 'xcodebuild')
|
||||
write_file(src_file, data)
|
||||
|
||||
elif platform == 'linux':
|
||||
linux_build_dir = os.path.join(cef_dir, os.pardir, 'out')
|
||||
|
||||
# create the README.TXT file
|
||||
create_readme(os.path.join(script_dir, 'distrib/linux/README.txt'), output_dir, cef_url, \
|
||||
cef_rev, cef_ver, chromium_url, chromium_rev, chromium_ver, date)
|
||||
|
||||
# transfer build/Debug files
|
||||
if not options.allowpartial or path_exists(os.path.join(linux_build_dir, 'Debug')):
|
||||
dst_dir = os.path.join(output_dir, 'Debug')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(linux_build_dir, 'Debug/lib.target'), os.path.join(dst_dir, 'lib.target'), options.quiet)
|
||||
copy_file(os.path.join(linux_build_dir, 'Debug/cefclient'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(linux_build_dir, 'Debug/cef.pak'), dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(linux_build_dir, 'Debug/locales'), os.path.join(dst_dir, 'locales'), options.quiet)
|
||||
|
||||
else:
|
||||
sys.stderr.write("No Debug build files.\n")
|
||||
|
||||
# transfer build/Release files
|
||||
if not options.allowpartial or path_exists(os.path.join(linux_build_dir, 'Release')):
|
||||
dst_dir = os.path.join(output_dir, 'Release')
|
||||
make_dir(dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(linux_build_dir, 'Release/lib.target'), os.path.join(dst_dir, 'lib.target'), options.quiet)
|
||||
copy_file(os.path.join(linux_build_dir, 'Release/cefclient'), dst_dir, options.quiet)
|
||||
copy_file(os.path.join(linux_build_dir, 'Release/cef.pak'), dst_dir, options.quiet)
|
||||
copy_dir(os.path.join(linux_build_dir, 'Release/locales'), os.path.join(dst_dir, 'locales'), options.quiet)
|
||||
|
||||
else:
|
||||
sys.stderr.write("No Release build files.\n")
|
||||
|
||||
# transfer include files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['includes_linux'], \
|
||||
'include/', include_dir, options.quiet)
|
||||
|
||||
# transfer cefclient files
|
||||
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_linux'], \
|
||||
'tests/cefclient/', cefclient_dir, options.quiet)
|
||||
|
||||
# transfer additional files, if any
|
||||
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib/linux/transfer.cfg'), \
|
||||
output_dir, options.quiet)
|
||||
|
||||
# Create an archive of the output directory
|
||||
zip_file = os.path.split(output_dir)[1] + '.zip'
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Creating '+zip_file+"...\n")
|
||||
create_archive(output_dir, os.path.join(output_dir, os.pardir, zip_file))
|
||||
|
||||
if not options.nosymbols:
|
||||
# Create an archive of the symbol directory
|
||||
zip_file = os.path.split(symbol_dir)[1] + '.zip'
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Creating '+zip_file+"...\n")
|
||||
create_archive(symbol_dir, os.path.join(symbol_dir, os.pardir, zip_file))
|
2
tools/make_distrib.sh
Executable file
2
tools/make_distrib.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python make_distrib.py --output-dir ../binary_distrib/ $@
|
108
tools/make_gypi_file.py
Normal file
108
tools/make_gypi_file.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from cef_parser import *
|
||||
|
||||
def make_gypi_file(header):
|
||||
# header string
|
||||
result = \
|
||||
"""# Copyright (c) $YEAR$ The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
#
|
||||
# ---------------------------------------------------------------------------
|
||||
#
|
||||
# This file was generated by the CEF translator tool and should not edited
|
||||
# by hand. See the translator.README.txt file in the tools directory for
|
||||
# more information.
|
||||
#
|
||||
|
||||
{
|
||||
'variables': {
|
||||
"""
|
||||
|
||||
filenames = sorted(header.get_file_names())
|
||||
|
||||
# cpp includes
|
||||
result += " 'autogen_cpp_includes': [\n"
|
||||
for filename in filenames:
|
||||
result += " 'include/"+filename+"',\n"
|
||||
result += " ],\n"
|
||||
|
||||
# capi includes
|
||||
result += " 'autogen_capi_includes': [\n"
|
||||
for filename in filenames:
|
||||
result += " 'include/capi/"+get_capi_file_name(filename)+"',\n"
|
||||
result += " ],\n"
|
||||
|
||||
classes = sorted(header.get_class_names())
|
||||
|
||||
# library side includes
|
||||
result += " 'autogen_library_side': [\n"
|
||||
for clsname in classes:
|
||||
cls = header.get_class(clsname)
|
||||
filename = get_capi_name(clsname[3:], False)
|
||||
if cls.is_library_side():
|
||||
result += " 'libcef_dll/cpptoc/"+filename+"_cpptoc.cc',\n" \
|
||||
" 'libcef_dll/cpptoc/"+filename+"_cpptoc.h',\n"
|
||||
else:
|
||||
result += " 'libcef_dll/ctocpp/"+filename+"_ctocpp.cc',\n" \
|
||||
" 'libcef_dll/ctocpp/"+filename+"_ctocpp.h',\n"
|
||||
result += " ],\n"
|
||||
|
||||
# client side includes
|
||||
result += " 'autogen_client_side': [\n"
|
||||
for clsname in classes:
|
||||
cls = header.get_class(clsname)
|
||||
filename = get_capi_name(clsname[3:], False)
|
||||
if cls.is_library_side():
|
||||
result += " 'libcef_dll/ctocpp/"+filename+"_ctocpp.cc',\n" \
|
||||
" 'libcef_dll/ctocpp/"+filename+"_ctocpp.h',\n"
|
||||
else:
|
||||
result += " 'libcef_dll/cpptoc/"+filename+"_cpptoc.cc',\n" \
|
||||
" 'libcef_dll/cpptoc/"+filename+"_cpptoc.h',\n"
|
||||
result += " ],\n"
|
||||
|
||||
# footer string
|
||||
result += \
|
||||
""" },
|
||||
}
|
||||
"""
|
||||
|
||||
# add the copyright year
|
||||
result = result.replace('$YEAR$', get_year())
|
||||
|
||||
return result
|
||||
|
||||
def write_gypi_file(header, file, backup):
|
||||
if path_exists(file):
|
||||
oldcontents = read_file(file)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
newcontents = make_gypi_file(header)
|
||||
if newcontents != oldcontents:
|
||||
if backup and oldcontents != '':
|
||||
backup_file(file)
|
||||
write_file(file, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# test the module
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
# verify that the correct number of command-line arguments are provided
|
||||
if len(sys.argv) < 2:
|
||||
sys.stderr.write('Usage: '+sys.argv[0]+' <infile>')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
header = obj_header()
|
||||
header.add_file(sys.argv[1])
|
||||
|
||||
# dump the result to stdout
|
||||
sys.stdout.write(make_gypi_file(header))
|
2
tools/make_version_header.bat
Normal file
2
tools/make_version_header.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
..\third_party\python_26\python.exe tools\make_version_header.py --header include\cef_version.h --version ../chrome/VERSION
|
133
tools/make_version_header.py
Normal file
133
tools/make_version_header.py
Normal file
@@ -0,0 +1,133 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from date_util import *
|
||||
from file_util import *
|
||||
from optparse import OptionParser
|
||||
import svn_util as svn
|
||||
import git_util as git
|
||||
import sys
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility creates the version header file.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--header', dest='header', metavar='FILE',
|
||||
help='output version header file [required]')
|
||||
parser.add_option('--version', dest='version', metavar='FILE',
|
||||
help='input Chrome version config file [required]')
|
||||
parser.add_option('-q', '--quiet',
|
||||
action='store_true', dest='quiet', default=False,
|
||||
help='do not output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the header option is required
|
||||
if options.header is None or options.version is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
def write_svn_header(header, version):
|
||||
""" Creates the header file for the current revision and Chrome version information
|
||||
if the information has changed or if the file doesn't already exist. """
|
||||
|
||||
if not path_exists(version):
|
||||
raise Exception('Version file '+version+' does not exist.')
|
||||
|
||||
# Read and parse the version file (key=value pairs, one per line)
|
||||
chrome = {}
|
||||
lines = read_file(version).split("\n")
|
||||
for line in lines:
|
||||
parts = line.split('=', 1)
|
||||
if len(parts) == 2:
|
||||
chrome[parts[0]] = parts[1]
|
||||
|
||||
if path_exists(header):
|
||||
oldcontents = read_file(header)
|
||||
else:
|
||||
oldcontents = ''
|
||||
|
||||
year = get_year()
|
||||
|
||||
try:
|
||||
revision = svn.get_revision()
|
||||
except:
|
||||
revision = git.get_svn_revision()
|
||||
|
||||
newcontents = '// Copyright (c) '+year+' Marshall A. Greenblatt. All rights reserved.\n'+\
|
||||
'//\n'+\
|
||||
'// Redistribution and use in source and binary forms, with or without\n'+\
|
||||
'// modification, are permitted provided that the following conditions are\n'+\
|
||||
'// met:\n'+\
|
||||
'//\n'+\
|
||||
'// * Redistributions of source code must retain the above copyright\n'+\
|
||||
'// notice, this list of conditions and the following disclaimer.\n'+\
|
||||
'// * Redistributions in binary form must reproduce the above\n'+\
|
||||
'// copyright notice, this list of conditions and the following disclaimer\n'+\
|
||||
'// in the documentation and/or other materials provided with the\n'+\
|
||||
'// distribution.\n'+\
|
||||
'// * Neither the name of Google Inc. nor the name Chromium Embedded\n'+\
|
||||
'// Framework nor the names of its contributors may be used to endorse\n'+\
|
||||
'// or promote products derived from this software without specific prior\n'+\
|
||||
'// written permission.\n'+\
|
||||
'//\n'+\
|
||||
'// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n'+\
|
||||
'// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n'+\
|
||||
'// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n'+\
|
||||
'// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n'+\
|
||||
'// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n'+\
|
||||
'// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n'+\
|
||||
'// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n'+\
|
||||
'// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n'+\
|
||||
'// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n'+\
|
||||
'// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n'+\
|
||||
'// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n'+\
|
||||
'//\n'+\
|
||||
'// ---------------------------------------------------------------------------\n'+\
|
||||
'//\n'+\
|
||||
'// This file is generated by the make_version_header.py tool.\n'+\
|
||||
'//\n\n'+\
|
||||
'#ifndef CEF_INCLUDE_CEF_VERSION_H_\n'+\
|
||||
'#define CEF_INCLUDE_CEF_VERSION_H_\n\n'+\
|
||||
'#define CEF_REVISION ' + revision + '\n'+\
|
||||
'#define COPYRIGHT_YEAR ' + year + '\n\n'+\
|
||||
'#define CHROME_VERSION_MAJOR ' + chrome['MAJOR'] + '\n'+\
|
||||
'#define CHROME_VERSION_MINOR ' + chrome['MINOR'] + '\n'+\
|
||||
'#define CHROME_VERSION_BUILD ' + chrome['BUILD'] + '\n'+\
|
||||
'#define CHROME_VERSION_PATCH ' + chrome['PATCH'] + '\n\n'+\
|
||||
'#define DO_MAKE_STRING(p) #p\n'+\
|
||||
'#define MAKE_STRING(p) DO_MAKE_STRING(p)\n\n'+\
|
||||
'#ifndef APSTUDIO_HIDDEN_SYMBOLS\n\n'\
|
||||
'#ifdef __cplusplus\n'+\
|
||||
'extern "C" {\n'+\
|
||||
'#endif\n\n'+\
|
||||
'#include "internal/cef_export.h"\n\n'+\
|
||||
'///\n'+\
|
||||
'// Returns the CEF build revision of the libcef library.\n'+\
|
||||
'///\n'+\
|
||||
'CEF_EXPORT int cef_build_revision();\n\n'+\
|
||||
'#ifdef __cplusplus\n'+\
|
||||
'}\n'+\
|
||||
'#endif\n\n'+\
|
||||
'#endif // APSTUDIO_HIDDEN_SYMBOLS\n\n'+\
|
||||
'#endif // CEF_INCLUDE_CEF_VERSION_H_\n'
|
||||
if newcontents != oldcontents:
|
||||
write_file(header, newcontents)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
written = write_svn_header(options.header, options.version)
|
||||
if not options.quiet:
|
||||
if written:
|
||||
sys.stdout.write('File '+options.header+' updated.\n')
|
||||
else:
|
||||
sys.stdout.write('File '+options.header+' is already up to date.\n')
|
2
tools/patch.bat
Normal file
2
tools/patch.bat
Normal file
@@ -0,0 +1,2 @@
|
||||
@echo off
|
||||
..\third_party\python_26\python.exe tools\patcher.py --patch-config patch/patch.cfg
|
2
tools/patch.sh
Executable file
2
tools/patch.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python tools/patcher.py --patch-config patch/patch.cfg
|
560
tools/patch_util.py
Normal file
560
tools/patch_util.py
Normal file
@@ -0,0 +1,560 @@
|
||||
""" Patch utility to apply unified diffs """
|
||||
""" Brute-force line-by-line parsing
|
||||
|
||||
Project home: http://code.google.com/p/python-patch/
|
||||
|
||||
This file is subject to the MIT license available here:
|
||||
http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
CEF Changes
|
||||
-----------
|
||||
|
||||
2009/07/22
|
||||
- Add a 'root_directory' argument to PatchInfo::apply
|
||||
- Fix a Python 2.4 compile error in PatchInfo::parse_stream
|
||||
|
||||
"""
|
||||
|
||||
__author__ = "techtonik.rainforce.org"
|
||||
__version__ = "8.12-1"
|
||||
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from stat import *
|
||||
# cStringIO doesn't support unicode in 2.5
|
||||
from StringIO import StringIO
|
||||
from logging import debug, info, warning
|
||||
|
||||
from os.path import exists, isfile
|
||||
from os import unlink
|
||||
|
||||
debugmode = False
|
||||
|
||||
|
||||
def from_file(filename):
|
||||
""" read and parse patch file
|
||||
return PatchInfo() object
|
||||
"""
|
||||
|
||||
info("reading patch from file %s" % filename)
|
||||
fp = open(filename, "rb")
|
||||
patch = PatchInfo(fp)
|
||||
fp.close()
|
||||
return patch
|
||||
|
||||
|
||||
def from_string(s):
|
||||
""" parse text string and return PatchInfo() object """
|
||||
return PatchInfo(
|
||||
StringIO.StringIO(s)
|
||||
)
|
||||
|
||||
|
||||
class HunkInfo(object):
|
||||
""" parsed hunk data (hunk starts with @@ -R +R @@) """
|
||||
|
||||
def __init__(self):
|
||||
# define HunkInfo data members
|
||||
self.startsrc=None
|
||||
self.linessrc=None
|
||||
self.starttgt=None
|
||||
self.linestgt=None
|
||||
self.invalid=False
|
||||
self.text=[]
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
# def apply(self, estream):
|
||||
# """ write hunk data into enumerable stream
|
||||
# return strings one by one until hunk is
|
||||
# over
|
||||
#
|
||||
# enumerable stream are tuples (lineno, line)
|
||||
# where lineno starts with 0
|
||||
# """
|
||||
# pass
|
||||
|
||||
|
||||
|
||||
|
||||
class PatchInfo(object):
|
||||
""" patch information container """
|
||||
|
||||
def __init__(self, stream=None):
|
||||
""" parse incoming stream """
|
||||
|
||||
# define PatchInfo data members
|
||||
# table with a row for every source file
|
||||
|
||||
#: list of source filenames
|
||||
self.source=None
|
||||
self.target=None
|
||||
#: list of lists of hunks
|
||||
self.hunks=None
|
||||
#: file endings statistics for every hunk
|
||||
self.hunkends=None
|
||||
|
||||
if stream:
|
||||
self.parse_stream(stream)
|
||||
|
||||
def copy(self):
|
||||
return copy.copy(self)
|
||||
|
||||
def parse_stream(self, stream):
|
||||
""" parse unified diff """
|
||||
self.source = []
|
||||
self.target = []
|
||||
self.hunks = []
|
||||
self.hunkends = []
|
||||
|
||||
# define possible file regions that will direct the parser flow
|
||||
header = False # comments before the patch body
|
||||
filenames = False # lines starting with --- and +++
|
||||
|
||||
hunkhead = False # @@ -R +R @@ sequence
|
||||
hunkbody = False #
|
||||
hunkskip = False # skipping invalid hunk mode
|
||||
|
||||
header = True
|
||||
lineends = dict(lf=0, crlf=0, cr=0)
|
||||
nextfileno = 0
|
||||
nexthunkno = 0 #: even if index starts with 0 user messages number hunks from 1
|
||||
|
||||
# hunkinfo holds parsed values, hunkactual - calculated
|
||||
hunkinfo = HunkInfo()
|
||||
hunkactual = dict(linessrc=None, linestgt=None)
|
||||
|
||||
fe = enumerate(stream)
|
||||
for lineno, line in fe:
|
||||
|
||||
# analyze state
|
||||
if header and line.startswith("--- "):
|
||||
header = False
|
||||
# switch to filenames state
|
||||
filenames = True
|
||||
#: skip hunkskip and hunkbody code until you read definition of hunkhead
|
||||
if hunkbody:
|
||||
# process line first
|
||||
if re.match(r"^[- \+\\]", line):
|
||||
# gather stats about line endings
|
||||
if line.endswith("\r\n"):
|
||||
self.hunkends[nextfileno-1]["crlf"] += 1
|
||||
elif line.endswith("\n"):
|
||||
self.hunkends[nextfileno-1]["lf"] += 1
|
||||
elif line.endswith("\r"):
|
||||
self.hunkends[nextfileno-1]["cr"] += 1
|
||||
|
||||
if line.startswith("-"):
|
||||
hunkactual["linessrc"] += 1
|
||||
elif line.startswith("+"):
|
||||
hunkactual["linestgt"] += 1
|
||||
elif not line.startswith("\\"):
|
||||
hunkactual["linessrc"] += 1
|
||||
hunkactual["linestgt"] += 1
|
||||
hunkinfo.text.append(line)
|
||||
# todo: handle \ No newline cases
|
||||
else:
|
||||
warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||
# add hunk status node
|
||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
||||
# switch to hunkskip state
|
||||
hunkbody = False
|
||||
hunkskip = True
|
||||
|
||||
# check exit conditions
|
||||
if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
|
||||
warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||
# add hunk status node
|
||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
||||
# switch to hunkskip state
|
||||
hunkbody = False
|
||||
hunkskip = True
|
||||
elif hunkinfo.linessrc == hunkactual["linessrc"] and hunkinfo.linestgt == hunkactual["linestgt"]:
|
||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||
# switch to hunkskip state
|
||||
hunkbody = False
|
||||
hunkskip = True
|
||||
|
||||
# detect mixed window/unix line ends
|
||||
ends = self.hunkends[nextfileno-1]
|
||||
if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
|
||||
warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
|
||||
if debugmode:
|
||||
debuglines = dict(ends)
|
||||
debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
|
||||
debug("crlf: %(crlf)d lf: %(lf)d cr: %(cr)d\t - file: %(file)s hunk: %(hunk)d" % debuglines)
|
||||
|
||||
if hunkskip:
|
||||
match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
|
||||
if match:
|
||||
# switch to hunkhead state
|
||||
hunkskip = False
|
||||
hunkhead = True
|
||||
elif line.startswith("--- "):
|
||||
# switch to filenames state
|
||||
hunkskip = False
|
||||
filenames = True
|
||||
if debugmode and len(self.source) > 0:
|
||||
debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
|
||||
|
||||
if filenames:
|
||||
if line.startswith("--- "):
|
||||
if nextfileno in self.source:
|
||||
warning("skipping invalid patch for %s" % self.source[nextfileno])
|
||||
del self.source[nextfileno]
|
||||
# double source filename line is encountered
|
||||
# attempt to restart from this second line
|
||||
re_filename = "^--- ([^\t]+)"
|
||||
match = re.match(re_filename, line)
|
||||
if not match:
|
||||
warning("skipping invalid filename at line %d" % lineno)
|
||||
# switch back to header state
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
self.source.append(match.group(1))
|
||||
elif not line.startswith("+++ "):
|
||||
if nextfileno in self.source:
|
||||
warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
|
||||
del self.source[nextfileno]
|
||||
else:
|
||||
# this should be unreachable
|
||||
warning("skipping invalid target patch")
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
if nextfileno in self.target:
|
||||
warning("skipping invalid patch - double target at line %d" % lineno)
|
||||
del self.source[nextfileno]
|
||||
del self.target[nextfileno]
|
||||
nextfileno -= 1
|
||||
# double target filename line is encountered
|
||||
# switch back to header state
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
re_filename = "^\+\+\+ ([^\t]+)"
|
||||
match = re.match(re_filename, line)
|
||||
if not match:
|
||||
warning("skipping invalid patch - no target filename at line %d" % lineno)
|
||||
# switch back to header state
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
self.target.append(match.group(1))
|
||||
nextfileno += 1
|
||||
# switch to hunkhead state
|
||||
filenames = False
|
||||
hunkhead = True
|
||||
nexthunkno = 0
|
||||
self.hunks.append([])
|
||||
self.hunkends.append(lineends.copy())
|
||||
continue
|
||||
|
||||
|
||||
if hunkhead:
|
||||
match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
|
||||
if not match:
|
||||
if nextfileno-1 not in self.hunks:
|
||||
warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
|
||||
# switch to header state
|
||||
hunkhead = False
|
||||
header = True
|
||||
continue
|
||||
else:
|
||||
# switch to header state
|
||||
hunkhead = False
|
||||
header = True
|
||||
else:
|
||||
hunkinfo.startsrc = int(match.group(1))
|
||||
if match.group(3):
|
||||
hunkinfo.linessrc = int(match.group(3))
|
||||
else:
|
||||
hunkinfo.linessrc = 1
|
||||
hunkinfo.starttgt = int(match.group(4))
|
||||
if match.group(6):
|
||||
hunkinfo.linestgt = int(match.group(6))
|
||||
else:
|
||||
hunkinfo.linestgt = 1
|
||||
hunkinfo.invalid = False
|
||||
hunkinfo.text = []
|
||||
|
||||
hunkactual["linessrc"] = hunkactual["linestgt"] = 0
|
||||
|
||||
# switch to hunkbody state
|
||||
hunkhead = False
|
||||
hunkbody = True
|
||||
nexthunkno += 1
|
||||
continue
|
||||
else:
|
||||
if not hunkskip:
|
||||
warning("patch file incomplete - %s" % filename)
|
||||
# sys.exit(?)
|
||||
else:
|
||||
# duplicated message when an eof is reached
|
||||
if debugmode and len(self.source) > 0:
|
||||
debug("- %2d hunks for %s" % (len(self.hunks[nextfileno-1]), self.source[nextfileno-1]))
|
||||
|
||||
info("total files: %d total hunks: %d" % (len(self.source), sum(len(hset) for hset in self.hunks)))
|
||||
|
||||
def apply(self, root_directory = None):
|
||||
""" apply parsed patch """
|
||||
|
||||
total = len(self.source)
|
||||
for fileno, filename in enumerate(self.source):
|
||||
|
||||
f2patch = filename
|
||||
if not root_directory is None:
|
||||
f2patch = root_directory + f2patch
|
||||
if not exists(f2patch):
|
||||
f2patch = self.target[fileno]
|
||||
if not exists(f2patch):
|
||||
warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
|
||||
continue
|
||||
if not isfile(f2patch):
|
||||
warning("not a file - %s" % f2patch)
|
||||
continue
|
||||
filename = f2patch
|
||||
|
||||
info("processing %d/%d:\t %s" % (fileno+1, total, filename))
|
||||
|
||||
# validate before patching
|
||||
f2fp = open(filename)
|
||||
hunkno = 0
|
||||
hunk = self.hunks[fileno][hunkno]
|
||||
hunkfind = []
|
||||
hunkreplace = []
|
||||
validhunks = 0
|
||||
canpatch = False
|
||||
for lineno, line in enumerate(f2fp):
|
||||
if lineno+1 < hunk.startsrc:
|
||||
continue
|
||||
elif lineno+1 == hunk.startsrc:
|
||||
hunkfind = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " -"]
|
||||
hunkreplace = [x[1:].rstrip("\r\n") for x in hunk.text if x[0] in " +"]
|
||||
#pprint(hunkreplace)
|
||||
hunklineno = 0
|
||||
|
||||
# todo \ No newline at end of file
|
||||
|
||||
# check hunks in source file
|
||||
if lineno+1 < hunk.startsrc+len(hunkfind)-1:
|
||||
if line.rstrip("\r\n") == hunkfind[hunklineno]:
|
||||
hunklineno+=1
|
||||
else:
|
||||
debug("hunk no.%d doesn't match source file %s" % (hunkno+1, filename))
|
||||
# file may be already patched, but we will check other hunks anyway
|
||||
hunkno += 1
|
||||
if hunkno < len(self.hunks[fileno]):
|
||||
hunk = self.hunks[fileno][hunkno]
|
||||
continue
|
||||
else:
|
||||
break
|
||||
|
||||
# check if processed line is the last line
|
||||
if lineno+1 == hunk.startsrc+len(hunkfind)-1:
|
||||
debug("file %s hunk no.%d -- is ready to be patched" % (filename, hunkno+1))
|
||||
hunkno+=1
|
||||
validhunks+=1
|
||||
if hunkno < len(self.hunks[fileno]):
|
||||
hunk = self.hunks[fileno][hunkno]
|
||||
else:
|
||||
if validhunks == len(self.hunks[fileno]):
|
||||
# patch file
|
||||
canpatch = True
|
||||
break
|
||||
else:
|
||||
if hunkno < len(self.hunks[fileno]):
|
||||
warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
|
||||
|
||||
f2fp.close()
|
||||
|
||||
if validhunks < len(self.hunks[fileno]):
|
||||
if check_patched(filename, self.hunks[fileno]):
|
||||
warning("already patched %s" % filename)
|
||||
else:
|
||||
warning("source file is different - %s" % filename)
|
||||
if canpatch:
|
||||
backupname = filename+".orig"
|
||||
if exists(backupname):
|
||||
warning("can't backup original file to %s - aborting" % backupname)
|
||||
else:
|
||||
import shutil
|
||||
shutil.move(filename, backupname)
|
||||
if patch_hunks(backupname, filename, self.hunks[fileno]):
|
||||
warning("successfully patched %s" % filename)
|
||||
unlink(backupname)
|
||||
else:
|
||||
warning("error patching file %s" % filename)
|
||||
shutil.copy(filename, filename+".invalid")
|
||||
warning("invalid version is saved to %s" % filename+".invalid")
|
||||
# todo: proper rejects
|
||||
shutil.move(backupname, filename)
|
||||
|
||||
# todo: check for premature eof
|
||||
|
||||
|
||||
|
||||
def check_patched(filename, hunks):
|
||||
matched = True
|
||||
fp = open(filename)
|
||||
|
||||
class NoMatch(Exception):
|
||||
pass
|
||||
|
||||
lineno = 1
|
||||
line = fp.readline()
|
||||
hno = None
|
||||
try:
|
||||
if not len(line):
|
||||
raise NoMatch
|
||||
for hno, h in enumerate(hunks):
|
||||
# skip to line just before hunk starts
|
||||
while lineno < h.starttgt-1:
|
||||
line = fp.readline()
|
||||
lineno += 1
|
||||
if not len(line):
|
||||
raise NoMatch
|
||||
for hline in h.text:
|
||||
# todo: \ No newline at the end of file
|
||||
if not hline.startswith("-") and not hline.startswith("\\"):
|
||||
line = fp.readline()
|
||||
lineno += 1
|
||||
if not len(line):
|
||||
raise NoMatch
|
||||
if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
|
||||
warning("file is not patched - failed hunk: %d" % (hno+1))
|
||||
raise NoMatch
|
||||
except NoMatch:
|
||||
matched = False
|
||||
# todo: display failed hunk, i.e. expected/found
|
||||
|
||||
fp.close()
|
||||
return matched
|
||||
|
||||
|
||||
|
||||
def patch_stream(instream, hunks):
|
||||
""" given a source stream and hunks iterable, yield patched stream
|
||||
|
||||
converts lineends in hunk lines to the best suitable format
|
||||
autodetected from input
|
||||
"""
|
||||
|
||||
# todo: At the moment substituted lineends may not be the same
|
||||
# at the start and at the end of patching. Also issue a
|
||||
# warning/throw about mixed lineends (is it really needed?)
|
||||
|
||||
hunks = iter(hunks)
|
||||
|
||||
srclineno = 1
|
||||
|
||||
lineends = {'\n':0, '\r\n':0, '\r':0}
|
||||
def get_line():
|
||||
"""
|
||||
local utility function - return line from source stream
|
||||
collecting line end statistics on the way
|
||||
"""
|
||||
line = instream.readline()
|
||||
# 'U' mode works only with text files
|
||||
if line.endswith("\r\n"):
|
||||
lineends["\r\n"] += 1
|
||||
elif line.endswith("\n"):
|
||||
lineends["\n"] += 1
|
||||
elif line.endswith("\r"):
|
||||
lineends["\r"] += 1
|
||||
return line
|
||||
|
||||
|
||||
for hno, h in enumerate(hunks):
|
||||
debug("hunk %d" % (hno+1))
|
||||
# skip to line just before hunk starts
|
||||
while srclineno < h.startsrc:
|
||||
yield get_line()
|
||||
srclineno += 1
|
||||
|
||||
for hline in h.text:
|
||||
# todo: check \ No newline at the end of file
|
||||
if hline.startswith("-") or hline.startswith("\\"):
|
||||
get_line()
|
||||
srclineno += 1
|
||||
continue
|
||||
else:
|
||||
if not hline.startswith("+"):
|
||||
get_line()
|
||||
srclineno += 1
|
||||
line2write = hline[1:]
|
||||
# detect if line ends are consistent in source file
|
||||
if sum([bool(lineends[x]) for x in lineends]) == 1:
|
||||
newline = [x for x in lineends if lineends[x] != 0][0]
|
||||
yield line2write.rstrip("\r\n")+newline
|
||||
else: # newlines are mixed
|
||||
yield line2write
|
||||
|
||||
for line in instream:
|
||||
yield line
|
||||
|
||||
|
||||
|
||||
def patch_hunks(srcname, tgtname, hunks):
|
||||
# get the current file mode
|
||||
mode = os.stat(srcname)[ST_MODE]
|
||||
|
||||
src = open(srcname, "rb")
|
||||
tgt = open(tgtname, "wb")
|
||||
|
||||
debug("processing target file %s" % tgtname)
|
||||
|
||||
tgt.writelines(patch_stream(src, hunks))
|
||||
|
||||
tgt.close()
|
||||
src.close()
|
||||
|
||||
# restore the file mode
|
||||
os.chmod(tgtname, mode)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
from optparse import OptionParser
|
||||
from os.path import exists
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
opt = OptionParser(usage="%prog [options] unipatch-file", version="python-patch %s" % __version__)
|
||||
opt.add_option("-d", action="store_true", dest="debugmode", help="debug mode")
|
||||
(options, args) = opt.parse_args()
|
||||
|
||||
if not args:
|
||||
opt.print_version()
|
||||
print("")
|
||||
opt.print_help()
|
||||
sys.exit()
|
||||
debugmode = options.debugmode
|
||||
patchfile = args[0]
|
||||
if not exists(patchfile) or not isfile(patchfile):
|
||||
sys.exit("patch file does not exist - %s" % patchfile)
|
||||
|
||||
|
||||
if debugmode:
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)8s %(message)s")
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
||||
|
||||
|
||||
|
||||
patch = from_file(patchfile)
|
||||
#pprint(patch)
|
||||
patch.apply()
|
||||
|
||||
# todo: document and test line ends handling logic - patch.py detects proper line-endings
|
||||
# for inserted hunks and issues a warning if patched file has incosistent line ends
|
32
tools/patcher.README.txt
Normal file
32
tools/patcher.README.txt
Normal file
@@ -0,0 +1,32 @@
|
||||
Chromium Embedded Framework (CEF) Patch Application Tool -- patcher.py
|
||||
-------------------------------------------------------------------------------
|
||||
|
||||
Document Last Updated: July 23, 2009
|
||||
|
||||
|
||||
OVERVIEW
|
||||
--------
|
||||
|
||||
The CEF patch application tool is used by the patch project to apply patches
|
||||
to the Chromium and WebKit code bases. Currently only unified diff format is
|
||||
supported. See the README.txt file in the patch directory for information on
|
||||
how the patch project uses this tool.
|
||||
|
||||
The 'patcher.bat' file can be used to run the patch application tool with
|
||||
command-line arguments that match the default CEF directory structure and
|
||||
output options. Run 'patcher.py -h' for a complete list of available command-
|
||||
line arguments.
|
||||
|
||||
|
||||
CREDITS
|
||||
-------
|
||||
|
||||
Thanks go to techtonik for developing the python-patch script. The
|
||||
patch_util.py file is a slightly modified version of the original script which
|
||||
can be found here: http://code.google.com/p/python-patch/
|
||||
|
||||
|
||||
WORK REMAINING
|
||||
--------------
|
||||
|
||||
o Add support for the GIT patch format.
|
98
tools/patcher.py
Normal file
98
tools/patcher.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# Copyright (c) 2009 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import pickle
|
||||
from optparse import OptionParser
|
||||
import os
|
||||
import sys
|
||||
from file_util import *
|
||||
from patch_util import *
|
||||
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility applies patch files.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--patch-config', dest='patchconfig', metavar='DIR',
|
||||
help='patch configuration file')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the patchconfig option is required
|
||||
if options.patchconfig is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
# normalize the patch directory value
|
||||
patchdir = os.path.dirname(os.path.abspath(options.patchconfig)).replace('\\', '/')
|
||||
if patchdir[-1] != '/':
|
||||
patchdir += '/'
|
||||
|
||||
# check if the patching should be skipped
|
||||
if os.path.isfile(patchdir + 'NOPATCH'):
|
||||
nopatch = True
|
||||
sys.stdout.write('NOPATCH exists -- files have not been patched.\n')
|
||||
else:
|
||||
nopatch = False
|
||||
# locate the patch configuration file
|
||||
if not os.path.isfile(options.patchconfig):
|
||||
sys.stderr.write('File '+options.patchconfig+' does not exist.\n')
|
||||
sys.exit()
|
||||
|
||||
scope = {}
|
||||
execfile(options.patchconfig, scope)
|
||||
patches = scope["patches"]
|
||||
|
||||
for patch in patches:
|
||||
file = patchdir+'patches/'+patch['name']+'.patch'
|
||||
dopatch = True
|
||||
|
||||
if 'condition' in patch:
|
||||
# Check that the environment variable is set.
|
||||
if patch['condition'] not in os.environ:
|
||||
sys.stderr.write('Skipping patch file '+file+'\n')
|
||||
dopatch = False
|
||||
|
||||
if dopatch:
|
||||
if not os.path.isfile(file):
|
||||
sys.stderr.write('Patch file '+file+' does not exist.\n')
|
||||
else:
|
||||
sys.stderr.write('Reading patch file '+file+'\n')
|
||||
dir = patch['path']
|
||||
patchObj = from_file(file)
|
||||
patchObj.apply(dir)
|
||||
|
||||
# read the current include file, if any
|
||||
incfile = patchdir + 'patch_state.h'
|
||||
if nopatch:
|
||||
incnew = """// This file is generated by the patch tool and should not be edited manually.
|
||||
#ifndef _PATCH_STATE_H
|
||||
#define _PATCH_STATE_H
|
||||
// No patches have been applied to the Chromium/WebKit source base.
|
||||
#define CEF_PATCHES_APPLIED 0
|
||||
#endif // _PATCH_STATE_H
|
||||
"""
|
||||
else:
|
||||
incnew = """// This file is generated by the patch tool and should not be edited manually.
|
||||
#ifndef _PATCH_STATE_H
|
||||
#define _PATCH_STATE_H
|
||||
// Patches have been applied to the Chromium/WebKit source base.
|
||||
#define CEF_PATCHES_APPLIED 1
|
||||
#endif // _PATCH_STATE_H
|
||||
"""
|
||||
|
||||
inccur = ''
|
||||
if os.path.isfile(incfile):
|
||||
inccur = read_file(incfile)
|
||||
|
||||
if inccur != incnew:
|
||||
sys.stdout.write('Writing file '+incfile+'.\n')
|
||||
write_file(incfile, incnew)
|
180
tools/repack_locales.py
Normal file
180
tools/repack_locales.py
Normal file
@@ -0,0 +1,180 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
|
||||
# Use of this source code is governed by a BSD-style license that can be
|
||||
# found in the LICENSE file.
|
||||
|
||||
"""Helper script to repack paks for a list of locales.
|
||||
|
||||
Gyp doesn't have any built-in looping capability, so this just provides a way to
|
||||
loop over a list of locales when repacking pak files, thus avoiding a
|
||||
proliferation of mostly duplicate, cut-n-paste gyp actions.
|
||||
"""
|
||||
|
||||
import getopt
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..',
|
||||
'tools', 'grit'))
|
||||
from grit.format import data_pack
|
||||
|
||||
# Some build paths defined by gyp.
|
||||
GRIT_DIR = None
|
||||
SHARE_INT_DIR = None
|
||||
INT_DIR = None
|
||||
|
||||
|
||||
class Usage(Exception):
|
||||
def __init__(self, msg):
|
||||
self.msg = msg
|
||||
|
||||
|
||||
def calc_output(locale):
|
||||
"""Determine the file that will be generated for the given locale."""
|
||||
#e.g. '<(INTERMEDIATE_DIR)/repack/da.pak',
|
||||
# For Fake Bidi, generate it at a fixed path so that tests can safely
|
||||
# reference it.
|
||||
if locale == 'fake-bidi':
|
||||
return '%s/%s.pak' % (INT_DIR, locale)
|
||||
if sys.platform in ('darwin',):
|
||||
# For Cocoa to find the locale at runtime, it needs to use '_' instead
|
||||
# of '-' (http://crbug.com/20441). Also, 'en-US' should be represented
|
||||
# simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
|
||||
if locale == 'en-US':
|
||||
locale = 'en'
|
||||
return '%s/repack/%s.lproj/locale.pak' % (INT_DIR, locale.replace('-', '_'))
|
||||
else:
|
||||
return os.path.join(INT_DIR, 'repack', locale + '.pak')
|
||||
|
||||
|
||||
def calc_inputs(locale):
|
||||
"""Determine the files that need processing for the given locale."""
|
||||
inputs = []
|
||||
|
||||
#e.g. '<(grit_out_dir)/generated_resources_da.pak'
|
||||
#inputs.append(os.path.join(GRIT_DIR, 'generated_resources_%s.pak' % locale))
|
||||
|
||||
#e.g. '<(grit_out_dir)/locale_settings_da.pak'
|
||||
#inputs.append(os.path.join(GRIT_DIR, 'locale_settings_%s.pak' % locale))
|
||||
|
||||
#e.g. '<(grit_out_dir)/platform_locale_settings_da.pak'
|
||||
#inputs.append(os.path.join(GRIT_DIR,
|
||||
# 'platform_locale_settings_%s.pak' % locale))
|
||||
|
||||
#e.g. '<(SHARED_INTERMEDIATE_DIR)/webkit/webkit_strings_da.pak'
|
||||
inputs.append(os.path.join(SHARE_INT_DIR, 'webkit',
|
||||
'webkit_strings_%s.pak' % locale))
|
||||
|
||||
#e.g. '<(SHARED_INTERMEDIATE_DIR)/ui/ui_strings_da.pak',
|
||||
inputs.append(os.path.join(SHARE_INT_DIR, 'ui', 'ui_strings',
|
||||
'ui_strings_%s.pak' % locale))
|
||||
|
||||
#e.g. '<(SHARED_INTERMEDIATE_DIR)/ui/app_locale_settings_da.pak',
|
||||
inputs.append(os.path.join(SHARE_INT_DIR, 'ui', 'app_locale_settings',
|
||||
'app_locale_settings_%s.pak' % locale))
|
||||
|
||||
return inputs
|
||||
|
||||
|
||||
def list_outputs(locales):
|
||||
"""Returns the names of files that will be generated for the given locales.
|
||||
|
||||
This is to provide gyp the list of output files, so build targets can
|
||||
properly track what needs to be built.
|
||||
"""
|
||||
outputs = []
|
||||
for locale in locales:
|
||||
outputs.append(calc_output(locale))
|
||||
# Quote each element so filename spaces don't mess up gyp's attempt to parse
|
||||
# it into a list.
|
||||
return " ".join(['"%s"' % x for x in outputs])
|
||||
|
||||
|
||||
def list_inputs(locales):
|
||||
"""Returns the names of files that will be processed for the given locales.
|
||||
|
||||
This is to provide gyp the list of input files, so build targets can properly
|
||||
track their prerequisites.
|
||||
"""
|
||||
inputs = []
|
||||
for locale in locales:
|
||||
inputs += calc_inputs(locale)
|
||||
# Quote each element so filename spaces don't mess up gyp's attempt to parse
|
||||
# it into a list.
|
||||
return " ".join(['"%s"' % x for x in inputs])
|
||||
|
||||
|
||||
def repack_locales(locales):
|
||||
""" Loop over and repack the given locales."""
|
||||
for locale in locales:
|
||||
inputs = []
|
||||
inputs += calc_inputs(locale)
|
||||
output = calc_output(locale)
|
||||
data_pack.DataPack.RePack(output, inputs)
|
||||
|
||||
|
||||
def DoMain(argv):
|
||||
global GRIT_DIR
|
||||
global SHARE_INT_DIR
|
||||
global INT_DIR
|
||||
|
||||
short_options = 'iog:s:x:b:h'
|
||||
long_options = 'help'
|
||||
|
||||
print_inputs = False
|
||||
print_outputs = False
|
||||
usage_msg = ''
|
||||
|
||||
helpstr = """\
|
||||
Usage: %s [-h] [-i | -o] -g <DIR> -x <DIR> -s <DIR> <locale> [...]
|
||||
-h, --help Print this help, then exit.
|
||||
-i Print the expected input file list, then exit.
|
||||
-o Print the expected output file list, then exit.
|
||||
-g DIR GRIT build files output directory.
|
||||
-x DIR Intermediate build files output directory.
|
||||
-s DIR Shared intermediate build files output directory.
|
||||
locale [...] One or more locales to repack.""" % (
|
||||
os.path.basename(__file__))
|
||||
|
||||
try:
|
||||
opts, locales = getopt.getopt(argv, short_options, long_options)
|
||||
except getopt.GetoptError, msg:
|
||||
raise Usage(str(msg))
|
||||
|
||||
if not locales:
|
||||
usage_msg = 'Please specificy at least one locale to process.\n'
|
||||
|
||||
for o, a in opts:
|
||||
if o in ('-i'):
|
||||
print_inputs = True
|
||||
elif o in ('-o'):
|
||||
print_outputs = True
|
||||
elif o in ('-g'):
|
||||
GRIT_DIR = a
|
||||
elif o in ('-s'):
|
||||
SHARE_INT_DIR = a
|
||||
elif o in ('-x'):
|
||||
INT_DIR = a
|
||||
elif o in ('-h', '--help'):
|
||||
raise Usage(helpstr)
|
||||
|
||||
if not (GRIT_DIR and INT_DIR and SHARE_INT_DIR):
|
||||
usage_msg += 'Please specify all of "-g" and "-x" and "-s".\n'
|
||||
if print_inputs and print_outputs:
|
||||
usage_msg += 'Please specify only one of "-i" or "-o".\n'
|
||||
|
||||
if usage_msg:
|
||||
raise Usage(usage_msg)
|
||||
|
||||
if print_inputs:
|
||||
return list_inputs(locales)
|
||||
|
||||
if print_outputs:
|
||||
return list_outputs(locales)
|
||||
|
||||
return repack_locales(locales)
|
||||
|
||||
if __name__ == '__main__':
|
||||
results = DoMain(sys.argv[1:])
|
||||
if results:
|
||||
print results
|
57
tools/svn_util.py
Normal file
57
tools/svn_util.py
Normal file
@@ -0,0 +1,57 @@
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
def check_url(url):
|
||||
""" Check the URL and raise an exception if invalid. """
|
||||
if ':' in url[:7]:
|
||||
parts = url.split(':', 1)
|
||||
if (parts[0] == 'http' or parts[0] == 'https' or parts[0] == 'svn') and \
|
||||
parts[1] == urllib.quote(parts[1]):
|
||||
return url
|
||||
sys.stderr.write('Invalid URL: '+url+"\n")
|
||||
raise Exception('Invalid URL: '+url)
|
||||
|
||||
def get_svn_info(path):
|
||||
""" Retrieves the URL and revision from svn info. """
|
||||
url = 'None'
|
||||
rev = 'None'
|
||||
if path[0:4] == 'http' or os.path.exists(path):
|
||||
try:
|
||||
stream = os.popen('svn info '+path)
|
||||
for line in stream:
|
||||
if line[0:4] == "URL:":
|
||||
url = check_url(line[5:-1])
|
||||
elif line[0:9] == "Revision:":
|
||||
rev = str(int(line[10:-1]))
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read svn info: '+strerror+"\n")
|
||||
raise
|
||||
return {'url': url, 'revision': rev}
|
||||
|
||||
def get_revision(path = '.'):
|
||||
""" Retrieves the revision from svn info. """
|
||||
info = get_svn_info(path)
|
||||
if info['revision'] == 'None':
|
||||
raise Exception('Unable to retrieve SVN revision for "'+path+'"')
|
||||
return info['revision']
|
||||
|
||||
def get_changed_files(path = '.'):
|
||||
""" Retrieves the list of changed files from svn status. """
|
||||
files = []
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
stream = os.popen('svn status '+path)
|
||||
for line in stream:
|
||||
status = line[0]
|
||||
# Return paths with add, modify and switch status.
|
||||
if status == 'A' or status == 'M' or status == 'S':
|
||||
files.append(line[8:].strip())
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read svn status: '+strerror+"\n")
|
||||
raise
|
||||
return files
|
1697
tools/translator.README.txt
Normal file
1697
tools/translator.README.txt
Normal file
File diff suppressed because it is too large
Load Diff
3
tools/translator.bat
Normal file
3
tools/translator.bat
Normal file
@@ -0,0 +1,3 @@
|
||||
@echo off
|
||||
..\..\third_party\python_26\python.exe translator.py --cpp-header-dir ..\include --capi-header-dir ..\include\capi --cpptoc-global-impl ..\libcef_dll\libcef_dll.cc --ctocpp-global-impl ..\libcef_dll\wrapper\libcef_dll_wrapper.cc --cpptoc-dir ..\libcef_dll\cpptoc --ctocpp-dir ..\libcef_dll\ctocpp --gypi-file ..\cef_paths.gypi
|
||||
pause
|
163
tools/translator.py
Normal file
163
tools/translator.py
Normal file
@@ -0,0 +1,163 @@
|
||||
# Copyright (c) 2009 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import sys
|
||||
from cef_parser import *
|
||||
from make_capi_header import *
|
||||
from make_cpptoc_header import *
|
||||
from make_cpptoc_impl import *
|
||||
from make_ctocpp_header import *
|
||||
from make_ctocpp_impl import *
|
||||
from make_gypi_file import *
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
# cannot be loaded as a module
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility generates files for the CEF C++ to C API translation layer.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--cpp-header-dir', dest='cppheaderdir', metavar='DIR',
|
||||
help='input directory for C++ header files [required]')
|
||||
parser.add_option('--capi-header-dir', dest='capiheaderdir', metavar='DIR',
|
||||
help='output directory for C API header files')
|
||||
parser.add_option('--cpptoc-global-impl', dest='cpptocglobalimpl', metavar='FILE',
|
||||
help='input/output file for CppToC global translations')
|
||||
parser.add_option('--ctocpp-global-impl', dest='ctocppglobalimpl', metavar='FILE',
|
||||
help='input/output file for CppToC global translations')
|
||||
parser.add_option('--cpptoc-dir', dest='cpptocdir', metavar='DIR',
|
||||
help='input/output directory for CppToC class translations')
|
||||
parser.add_option('--ctocpp-dir', dest='ctocppdir', metavar='DIR',
|
||||
help='input/output directory for CppToC class translations')
|
||||
parser.add_option('--gypi-file', dest='gypifile', metavar='FILE',
|
||||
help='output file for path information')
|
||||
parser.add_option('--no-cpptoc-header',
|
||||
action='store_true', dest='nocpptocheader', default=False,
|
||||
help='do not output the CppToC headers')
|
||||
parser.add_option('--no-cpptoc-impl',
|
||||
action='store_true', dest='nocpptocimpl', default=False,
|
||||
help='do not output the CppToC implementations')
|
||||
parser.add_option('--no-ctocpp-header',
|
||||
action='store_true', dest='noctocppheader', default=False,
|
||||
help='do not output the CToCpp headers')
|
||||
parser.add_option('--no-ctocpp-impl',
|
||||
action='store_true', dest='noctocppimpl', default=False,
|
||||
help='do not output the CToCpp implementations')
|
||||
parser.add_option('--no-backup',
|
||||
action='store_true', dest='nobackup', default=False,
|
||||
help='do not create a backup of modified files')
|
||||
parser.add_option('-c', '--classes', dest='classes', action='append',
|
||||
help='only translate the specified classes')
|
||||
parser.add_option('-q', '--quiet',
|
||||
action='store_true', dest='quiet', default=False,
|
||||
help='do not output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the cppheader option is required
|
||||
if options.cppheaderdir is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
# make sure the header exists
|
||||
if not path_exists(options.cppheaderdir):
|
||||
sys.stderr.write('File '+options.cppheaderdir+' does not exist.')
|
||||
sys.exit()
|
||||
|
||||
# create the header object
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Parsing C++ headers from '+options.cppheaderdir+'...\n')
|
||||
header = obj_header()
|
||||
header.add_directory(options.cppheaderdir)
|
||||
|
||||
writect = 0
|
||||
|
||||
if not options.capiheaderdir is None:
|
||||
#output the C API header
|
||||
if not options.quiet:
|
||||
sys.stdout.write('In C API header directory '+options.capiheaderdir+'...\n')
|
||||
filenames = sorted(header.get_file_names())
|
||||
for filename in filenames:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+filename+' C API header...\n')
|
||||
writect += write_capi_header(header,
|
||||
os.path.join(options.capiheaderdir, filename),
|
||||
not options.nobackup)
|
||||
|
||||
# build the list of classes to parse
|
||||
allclasses = header.get_class_names()
|
||||
if not options.classes is None:
|
||||
for cls in options.classes:
|
||||
if not cls in allclasses:
|
||||
sys.stderr.write('ERROR: Unknown class: '+cls)
|
||||
sys.exit()
|
||||
classes = options.classes
|
||||
else:
|
||||
classes = allclasses
|
||||
|
||||
classes = sorted(classes)
|
||||
|
||||
if not options.cpptocglobalimpl is None:
|
||||
# output CppToC global file
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating CppToC global implementation...\n')
|
||||
writect += write_cpptoc_impl(header, None, options.cpptocglobalimpl, \
|
||||
not options.nobackup)
|
||||
|
||||
if not options.ctocppglobalimpl is None:
|
||||
# output CToCpp global file
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating CToCpp global implementation...\n')
|
||||
writect += write_ctocpp_impl(header, None, options.ctocppglobalimpl, \
|
||||
not options.nobackup)
|
||||
|
||||
if not options.cpptocdir is None:
|
||||
# output CppToC class files
|
||||
if not options.quiet:
|
||||
sys.stdout.write('In CppToC directory '+options.cpptocdir+'...\n')
|
||||
|
||||
for cls in classes:
|
||||
if not options.nocpptocheader:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+cls+'CppToC class header...\n')
|
||||
writect += write_cpptoc_header(header, cls, options.cpptocdir,
|
||||
not options.nobackup)
|
||||
if not options.nocpptocimpl:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+cls+'CppToC class implementation...\n')
|
||||
writect += write_cpptoc_impl(header, cls, options.cpptocdir,
|
||||
not options.nobackup)
|
||||
|
||||
if not options.ctocppdir is None:
|
||||
# output CppToC class files
|
||||
if not options.quiet:
|
||||
sys.stdout.write('In CToCpp directory '+options.ctocppdir+'...\n')
|
||||
for cls in classes:
|
||||
if not options.nocpptocheader:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+cls+'CToCpp class header...\n')
|
||||
writect += write_ctocpp_header(header, cls, options.ctocppdir,
|
||||
not options.nobackup)
|
||||
if not options.nocpptocimpl:
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+cls+'CToCpp class implementation...\n')
|
||||
writect += write_ctocpp_impl(header, cls, options.ctocppdir,
|
||||
not options.nobackup)
|
||||
|
||||
if not options.gypifile is None:
|
||||
# output the gypi file
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Generating '+options.gypifile+' file...\n')
|
||||
writect += write_gypi_file(header, options.gypifile, not options.nobackup)
|
||||
|
||||
if not options.quiet:
|
||||
sys.stdout.write('Done - Wrote '+str(writect)+' files.\n')
|
||||
|
||||
|
2
tools/translator.sh
Executable file
2
tools/translator.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
#!/bin/sh
|
||||
python translator.py --cpp-header-dir ../include --capi-header-dir ../include/capi --cpptoc-global-impl ../libcef_dll/libcef_dll.cc --ctocpp-global-impl ../libcef_dll/wrapper/libcef_dll_wrapper.cc --cpptoc-dir ../libcef_dll/cpptoc --ctocpp-dir ../libcef_dll/ctocpp --gypi-file ../cef_paths.gypi
|
Reference in New Issue
Block a user