2012-04-03 03:34:16 +02:00
|
|
|
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
|
|
|
# reserved. Use of this source code is governed by a BSD-style license that
|
|
|
|
# can be found in the LICENSE file.
|
|
|
|
|
|
|
|
from optparse import OptionParser
|
2013-07-19 22:44:46 +02:00
|
|
|
import httplib
|
2012-04-03 03:34:16 +02:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shlex
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
2013-07-19 22:44:46 +02:00
|
|
|
import tempfile
|
2012-04-03 03:34:16 +02:00
|
|
|
import urllib
|
2013-08-22 19:57:26 +02:00
|
|
|
import xml.etree.ElementTree as ET
|
2013-07-19 22:44:46 +02:00
|
|
|
import zipfile
|
2012-04-03 03:34:16 +02:00
|
|
|
|
|
|
|
# default URL values
|
|
|
|
cef_url = 'http://chromiumembedded.googlecode.com/svn/trunk/cef3'
|
|
|
|
depot_tools_url = 'http://src.chromium.org/svn/trunk/tools/depot_tools'
|
|
|
|
|
2013-09-10 22:02:09 +02:00
|
|
|
def run(command_line, working_dir, depot_tools_dir=None, output_file=None):
|
2012-04-03 03:34:16 +02:00
|
|
|
# add depot_tools to the path
|
|
|
|
env = os.environ
|
|
|
|
if not depot_tools_dir is None:
|
|
|
|
env['PATH'] = depot_tools_dir+os.pathsep+env['PATH']
|
2013-09-10 22:02:09 +02:00
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
sys.stdout.write('-------- Running "'+command_line+'" in "'+\
|
|
|
|
working_dir+'"...'+"\n")
|
2013-04-10 01:02:58 +02:00
|
|
|
if not options.dryrun:
|
|
|
|
args = shlex.split(command_line.replace('\\', '\\\\'))
|
2013-09-10 22:02:09 +02:00
|
|
|
|
|
|
|
if not output_file:
|
|
|
|
return subprocess.check_call(args, cwd=working_dir, env=env,
|
2013-04-10 01:02:58 +02:00
|
|
|
shell=(sys.platform == 'win32'))
|
2013-09-10 22:02:09 +02:00
|
|
|
with open(output_file, "w") as f:
|
|
|
|
return subprocess.check_call(args, cwd=working_dir, env=env,
|
|
|
|
shell=(sys.platform == 'win32'),
|
|
|
|
stderr=subprocess.STDOUT, stdout=f)
|
2012-04-03 03:34:16 +02:00
|
|
|
|
|
|
|
def check_url(url):
|
|
|
|
""" Check the URL and raise an exception if invalid. """
|
|
|
|
if ':' in url[:7]:
|
|
|
|
parts = url.split(':', 1)
|
|
|
|
if (parts[0] == 'http' or parts[0] == 'https') and \
|
|
|
|
parts[1] == urllib.quote(parts[1]):
|
|
|
|
return url
|
|
|
|
sys.stderr.write('Invalid URL: '+url+"\n")
|
|
|
|
raise Exception('Invalid URL: '+url)
|
2013-08-22 19:57:26 +02:00
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
def get_svn_info(path):
|
|
|
|
""" Retrieves the URL and revision from svn info. """
|
|
|
|
url = 'None'
|
|
|
|
rev = 'None'
|
2013-10-29 22:30:43 +01:00
|
|
|
sys.stdout.write("-------- Running \"%s info --xml %s\"...\n" % (svn_exe, path))
|
2012-04-03 03:34:16 +02:00
|
|
|
if path[0:4] == 'http' or os.path.exists(path):
|
|
|
|
try:
|
2013-08-22 22:14:11 +02:00
|
|
|
p = subprocess.Popen([svn_exe, 'info', '--xml', path], \
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
out, err = p.communicate()
|
2013-08-22 21:13:26 +02:00
|
|
|
if err == '':
|
2013-08-22 22:14:11 +02:00
|
|
|
tree = ET.ElementTree(ET.fromstring(out))
|
2013-08-22 21:13:26 +02:00
|
|
|
entry = tree.getroot().find('entry')
|
|
|
|
url = entry.find('url').text
|
|
|
|
rev = entry.attrib['revision']
|
|
|
|
else:
|
|
|
|
raise Exception("Failed to execute svn info:\n"+err+"\n")
|
2012-04-03 03:34:16 +02:00
|
|
|
except IOError, (errno, strerror):
|
|
|
|
sys.stderr.write('Failed to read svn info: '+strerror+"\n")
|
|
|
|
raise
|
2013-08-22 21:13:26 +02:00
|
|
|
except:
|
|
|
|
raise
|
2012-04-03 03:34:16 +02:00
|
|
|
return {'url': url, 'revision': rev}
|
2013-01-17 19:15:42 +01:00
|
|
|
|
2013-07-19 22:44:46 +02:00
|
|
|
def download_and_extract(src, target, contents_prefix):
|
|
|
|
""" Extracts the contents of src, which may be a URL or local file, to the
|
|
|
|
target directory. """
|
|
|
|
sys.stdout.write('Extracting %s to %s.\n' % (src, target))
|
|
|
|
temporary = False
|
|
|
|
|
|
|
|
if src[:4] == 'http':
|
|
|
|
# Attempt to download a URL.
|
|
|
|
opener = urllib.FancyURLopener({})
|
|
|
|
response = opener.open(src)
|
|
|
|
|
|
|
|
temporary = True
|
|
|
|
handle, archive_path = tempfile.mkstemp(suffix = '.zip')
|
|
|
|
os.write(handle, response.read())
|
|
|
|
os.close(handle)
|
|
|
|
elif os.path.exists(src):
|
|
|
|
# Use a local file.
|
|
|
|
archive_path = src
|
|
|
|
else:
|
|
|
|
raise Exception('Path type is unsupported or does not exist: ' + src)
|
|
|
|
|
|
|
|
if not zipfile.is_zipfile(archive_path):
|
|
|
|
raise Exception('Not a valid zip archive: ' + src)
|
|
|
|
|
|
|
|
def remove_prefix(zip, prefix):
|
|
|
|
offset = len(prefix)
|
|
|
|
for zipinfo in zip.infolist():
|
|
|
|
name = zipinfo.filename
|
|
|
|
if len(name) > offset and name[:offset] == prefix:
|
|
|
|
zipinfo.filename = name[offset:]
|
|
|
|
yield zipinfo
|
|
|
|
|
|
|
|
# Attempt to extract the archive file.
|
|
|
|
try:
|
|
|
|
os.makedirs(target)
|
|
|
|
zf = zipfile.ZipFile(archive_path, 'r')
|
|
|
|
zf.extractall(target, remove_prefix(zf, contents_prefix))
|
|
|
|
except:
|
|
|
|
shutil.rmtree(target, onerror=onerror)
|
|
|
|
raise
|
|
|
|
zf.close()
|
|
|
|
|
|
|
|
# Delete the archive file if temporary.
|
|
|
|
if temporary and os.path.exists(archive_path):
|
|
|
|
os.remove(archive_path)
|
|
|
|
|
2013-01-17 19:15:42 +01:00
|
|
|
def onerror(func, path, exc_info):
|
|
|
|
"""
|
|
|
|
Error handler for ``shutil.rmtree``.
|
|
|
|
|
|
|
|
If the error is due to an access error (read only file)
|
|
|
|
it attempts to add write permission and then retries.
|
|
|
|
|
|
|
|
If the error is for another reason it re-raises the error.
|
|
|
|
|
|
|
|
Usage : ``shutil.rmtree(path, onerror=onerror)``
|
|
|
|
"""
|
|
|
|
import stat
|
|
|
|
if not os.access(path, os.W_OK):
|
|
|
|
# Is the error an access error ?
|
|
|
|
os.chmod(path, stat.S_IWUSR)
|
|
|
|
func(path)
|
|
|
|
else:
|
|
|
|
raise
|
2013-09-10 22:02:09 +02:00
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
# cannot be loaded as a module
|
|
|
|
if __name__ != "__main__":
|
|
|
|
sys.stderr.write('This file cannot be loaded as a module!')
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
# parse command-line options
|
|
|
|
disc = """
|
|
|
|
This utility implements automation for the download, update, build and
|
|
|
|
distribution of CEF.
|
|
|
|
"""
|
|
|
|
|
|
|
|
parser = OptionParser(description=disc)
|
|
|
|
parser.add_option('--download-dir', dest='downloaddir', metavar='DIR',
|
|
|
|
help='download directory with no spaces [required]')
|
|
|
|
parser.add_option('--revision', dest='revision', type="int",
|
|
|
|
help='CEF source revision')
|
|
|
|
parser.add_option('--url', dest='url',
|
|
|
|
help='CEF source URL')
|
2013-04-10 01:02:58 +02:00
|
|
|
parser.add_option('--depot-tools', dest='depottools', metavar='DIR',
|
|
|
|
help='download directory for depot_tools', default='')
|
2013-07-19 22:44:46 +02:00
|
|
|
parser.add_option('--depot-tools-archive', dest='depottoolsarchive',
|
|
|
|
help='zip archive file that contains a single top-level '+\
|
|
|
|
'depot_tools directory', default='')
|
2012-04-03 03:34:16 +02:00
|
|
|
parser.add_option('--force-config',
|
|
|
|
action='store_true', dest='forceconfig', default=False,
|
|
|
|
help='force Chromium configuration')
|
|
|
|
parser.add_option('--force-clean',
|
|
|
|
action='store_true', dest='forceclean', default=False,
|
|
|
|
help='force revert of all Chromium changes, deletion of '+\
|
|
|
|
'all unversioned files including the CEF folder and '+\
|
|
|
|
'trigger the force-update, force-build and '+\
|
|
|
|
'force-distrib options')
|
|
|
|
parser.add_option('--force-update',
|
|
|
|
action='store_true', dest='forceupdate', default=False,
|
|
|
|
help='force Chromium and CEF update')
|
2013-09-10 22:02:09 +02:00
|
|
|
parser.add_option('--no-update',
|
|
|
|
action='store_true', dest='noupdate', default=False,
|
|
|
|
help='do not update Chromium and CEF.' +\
|
|
|
|
'Cannot be used along with --force[update|config|clean]')
|
2012-04-03 03:34:16 +02:00
|
|
|
parser.add_option('--force-build',
|
|
|
|
action='store_true', dest='forcebuild', default=False,
|
|
|
|
help='force CEF debug and release builds')
|
2013-09-10 22:02:09 +02:00
|
|
|
parser.add_option('--build-tests',
|
|
|
|
action='store_true', dest='buildtests', default=False,
|
|
|
|
help='build cef_unittests target besides cefclient')
|
2012-04-03 03:34:16 +02:00
|
|
|
parser.add_option('--force-distrib',
|
|
|
|
action='store_true', dest='forcedistrib', default=False,
|
|
|
|
help='force creation of CEF binary distribution')
|
|
|
|
parser.add_option('--no-debug-build',
|
|
|
|
action='store_true', dest='nodebugbuild', default=False,
|
|
|
|
help="don't perform the CEF debug build")
|
|
|
|
parser.add_option('--no-release-build',
|
|
|
|
action='store_true', dest='noreleasebuild', default=False,
|
|
|
|
help="don't perform the CEF release build")
|
|
|
|
parser.add_option('--no-distrib',
|
|
|
|
action='store_true', dest='nodistrib', default=False,
|
2013-04-10 23:02:37 +02:00
|
|
|
help="don't create any CEF binary distribution")
|
|
|
|
parser.add_option('--minimal-distrib',
|
|
|
|
action='store_true', dest='minimaldistrib', default=False,
|
|
|
|
help='create a minimal CEF binary distribution')
|
|
|
|
parser.add_option('--minimal-distrib-only',
|
|
|
|
action='store_true', dest='minimaldistribonly', default=False,
|
|
|
|
help='create a minimal CEF binary distribution only')
|
2013-04-12 01:07:43 +02:00
|
|
|
parser.add_option('--client-distrib',
|
|
|
|
action='store_true', dest='clientdistrib', default=False,
|
|
|
|
help='create a client CEF binary distribution')
|
|
|
|
parser.add_option('--client-distrib-only',
|
|
|
|
action='store_true', dest='clientdistribonly', default=False,
|
|
|
|
help='create a client CEF binary distribution only')
|
|
|
|
parser.add_option('--no-distrib-docs',
|
|
|
|
action='store_true', dest='nodistribdocs', default=False,
|
|
|
|
help="don't create CEF documentation")
|
|
|
|
parser.add_option('--no-distrib-archive',
|
|
|
|
action='store_true', dest='nodistribarchive', default=False,
|
|
|
|
help="don't create archives for output directories")
|
2013-04-10 01:02:58 +02:00
|
|
|
parser.add_option('--ninja-build',
|
|
|
|
action='store_true', dest='ninjabuild', default=False,
|
|
|
|
help="build using ninja")
|
2013-09-10 22:02:09 +02:00
|
|
|
parser.add_option('--verbose',
|
|
|
|
action='store_true', dest='verbose', default=False,
|
|
|
|
help='show all command lines while building')
|
|
|
|
parser.add_option('--build-log-file',
|
|
|
|
action='store_true', dest='buildlogfile', default=False,
|
|
|
|
help='write build logs to files')
|
2013-08-23 18:22:11 +02:00
|
|
|
parser.add_option('--x64-build',
|
|
|
|
action='store_true', dest='x64build', default=False,
|
|
|
|
help='build for 64-bit systems (Windows and Mac OS X only)')
|
2013-07-19 20:36:09 +02:00
|
|
|
parser.add_option('--clean-artifacts',
|
|
|
|
action='store_true', dest='cleanartifacts', default=False,
|
|
|
|
help='clean the artifacts output directory')
|
2013-04-10 01:02:58 +02:00
|
|
|
parser.add_option('--dry-run',
|
|
|
|
action='store_true', dest='dryrun', default=False,
|
|
|
|
help="output commands without executing them")
|
2012-04-03 03:34:16 +02:00
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
|
2013-08-23 18:22:11 +02:00
|
|
|
# Test the operating system.
|
|
|
|
platform = '';
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
platform = 'windows'
|
|
|
|
elif sys.platform == 'darwin':
|
|
|
|
platform = 'macosx'
|
|
|
|
elif sys.platform.startswith('linux'):
|
|
|
|
platform = 'linux'
|
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
# the downloaddir option is required
|
|
|
|
if options.downloaddir is None:
|
|
|
|
parser.print_help(sys.stderr)
|
|
|
|
sys.exit()
|
|
|
|
|
2013-04-12 01:07:43 +02:00
|
|
|
if (options.noreleasebuild and (options.minimaldistrib or options.minimaldistribonly or \
|
|
|
|
options.clientdistrib or options.clientdistribonly)) or \
|
|
|
|
(options.minimaldistribonly and options.clientdistribonly):
|
2013-04-10 23:02:37 +02:00
|
|
|
print 'Invalid combination of options'
|
|
|
|
parser.print_help(sys.stderr)
|
|
|
|
sys.exit()
|
|
|
|
|
2013-09-10 22:02:09 +02:00
|
|
|
if options.noupdate and (options.forceclean or options.forceupdate or options.forceconfig):
|
|
|
|
print "Invalid combination of options."
|
|
|
|
print "--no-update cannot be used along with --force-[update|config|clean]\n"
|
|
|
|
sys.exit()
|
|
|
|
|
2013-08-23 18:22:11 +02:00
|
|
|
if options.x64build and platform != 'windows' and platform != 'macosx':
|
|
|
|
print 'The x64 build option is only used on Windows and Mac OS X.'
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
if options.x64build and platform == 'windows' and not options.ninjabuild:
|
|
|
|
print 'The x64 build option on Windows requires ninja.'
|
|
|
|
sys.exit()
|
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
# script directory
|
|
|
|
script_dir = os.path.dirname(__file__)
|
|
|
|
|
2013-08-22 21:20:10 +02:00
|
|
|
download_dir = os.path.abspath(options.downloaddir)
|
2013-10-29 22:30:43 +01:00
|
|
|
if not options.dryrun and not os.path.exists(download_dir):
|
2013-08-22 21:20:10 +02:00
|
|
|
# create the download directory
|
|
|
|
os.makedirs(download_dir)
|
|
|
|
|
|
|
|
# set the expected script extension
|
|
|
|
if platform == 'windows':
|
|
|
|
script_ext = '.bat'
|
|
|
|
else:
|
|
|
|
script_ext = '.sh'
|
|
|
|
|
|
|
|
# check if the "depot_tools" directory exists
|
|
|
|
if options.depottools != '':
|
|
|
|
depot_tools_dir = os.path.abspath(options.depottools)
|
|
|
|
else:
|
|
|
|
depot_tools_dir = os.path.join(download_dir, 'depot_tools')
|
|
|
|
if not os.path.exists(depot_tools_dir):
|
|
|
|
if options.depottoolsarchive != '':
|
|
|
|
# extract depot_tools from an archive file
|
|
|
|
download_and_extract(options.depottoolsarchive, depot_tools_dir,
|
|
|
|
'depot_tools/')
|
|
|
|
else:
|
|
|
|
# checkout depot_tools
|
|
|
|
run('svn checkout '+depot_tools_url+' '+depot_tools_dir, download_dir)
|
|
|
|
|
2013-11-05 21:26:02 +01:00
|
|
|
if not options.noupdate:
|
2013-09-10 22:02:09 +02:00
|
|
|
# Update depot_tools. It will download required scripts (svn, python, ...)
|
2013-10-29 22:30:43 +01:00
|
|
|
if platform == 'windows':
|
2013-09-19 17:32:11 +02:00
|
|
|
run('update_depot_tools.bat', depot_tools_dir, depot_tools_dir);
|
2013-09-10 22:02:09 +02:00
|
|
|
else:
|
2013-09-19 17:32:11 +02:00
|
|
|
run('update_depot_tools', depot_tools_dir, depot_tools_dir);
|
2013-09-10 22:02:09 +02:00
|
|
|
|
2013-10-29 22:30:43 +01:00
|
|
|
if platform == 'windows':
|
2013-08-22 21:52:25 +02:00
|
|
|
# Force use of the SVN version bundled with depot_tools.
|
|
|
|
svn_exe = os.path.join(depot_tools_dir, 'svn.bat')
|
2013-10-29 22:30:43 +01:00
|
|
|
if options.dryrun and not os.path.exists(svn_exe):
|
|
|
|
sys.stdout.write("WARNING: --dry-run assumes that depot_tools" \
|
|
|
|
" is already in your PATH. If it isn't\nplease" \
|
|
|
|
" specify a --depot-tools value.\n")
|
|
|
|
svn_exe = 'svn.bat'
|
2013-08-22 21:52:25 +02:00
|
|
|
else:
|
|
|
|
svn_exe = 'svn'
|
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
if not options.url is None:
|
|
|
|
# set the CEF URL
|
|
|
|
cef_url = check_url(options.url)
|
|
|
|
|
|
|
|
if not options.revision is None:
|
|
|
|
# set the CEF revision
|
|
|
|
cef_rev = str(options.revision)
|
|
|
|
else:
|
|
|
|
# retrieve the CEF revision from the remote repo
|
|
|
|
info = get_svn_info(cef_url)
|
|
|
|
cef_rev = info['revision']
|
|
|
|
if cef_rev == 'None':
|
|
|
|
sys.stderr.write('No SVN info for: '+cef_url+"\n")
|
|
|
|
raise Exception('No SVN info for: '+cef_url)
|
|
|
|
|
|
|
|
# Retrieve the Chromium URL and revision from the CEF repo
|
|
|
|
compat_url = cef_url + "/CHROMIUM_BUILD_COMPATIBILITY.txt?r="+cef_rev
|
|
|
|
|
|
|
|
release_url = None
|
|
|
|
chromium_url = None
|
|
|
|
chromium_rev = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Read the remote URL contents
|
|
|
|
handle = urllib.urlopen(compat_url)
|
|
|
|
compat_value = handle.read().strip()
|
|
|
|
handle.close()
|
|
|
|
|
|
|
|
# Parse the contents
|
|
|
|
config = eval(compat_value, {'__builtins__': None}, None)
|
|
|
|
|
|
|
|
if 'release_url' in config:
|
|
|
|
# building from a release
|
|
|
|
release_url = check_url(config['release_url'])
|
|
|
|
else:
|
|
|
|
# building from chromium src
|
|
|
|
if not 'chromium_url' in config:
|
|
|
|
raise Exception("Missing chromium_url value")
|
|
|
|
if not 'chromium_revision' in config:
|
|
|
|
raise Exception("Missing chromium_revision value")
|
|
|
|
|
|
|
|
chromium_url = check_url(config['chromium_url'])
|
|
|
|
chromium_rev = str(int(config['chromium_revision']))
|
|
|
|
except Exception, e:
|
|
|
|
sys.stderr.write('Failed to read URL and revision information from '+ \
|
|
|
|
compat_url+"\n")
|
|
|
|
raise
|
|
|
|
|
|
|
|
# check if the "chromium" directory exists
|
|
|
|
chromium_dir = os.path.join(download_dir, 'chromium')
|
2013-10-29 22:30:43 +01:00
|
|
|
if not options.dryrun and not os.path.exists(chromium_dir):
|
2012-04-03 03:34:16 +02:00
|
|
|
# create the "chromium" directory
|
|
|
|
os.makedirs(chromium_dir)
|
|
|
|
|
|
|
|
chromium_src_dir = os.path.join(chromium_dir, 'src')
|
|
|
|
cef_src_dir = os.path.join(chromium_src_dir, 'cef')
|
|
|
|
cef_tools_dir = os.path.join(cef_src_dir, 'tools')
|
|
|
|
|
|
|
|
# retrieve the current CEF URL and revision
|
|
|
|
info = get_svn_info(cef_src_dir)
|
|
|
|
current_cef_url = info['url']
|
|
|
|
current_cef_rev = info['revision']
|
|
|
|
|
|
|
|
if release_url is None:
|
|
|
|
# retrieve the current Chromium URL and revision
|
|
|
|
info = get_svn_info(chromium_src_dir)
|
|
|
|
current_chromium_url = info['url']
|
|
|
|
current_chromium_rev = info['revision']
|
|
|
|
|
2013-09-10 22:02:09 +02:00
|
|
|
changed_to_message = ' -> CHANGED TO: '
|
|
|
|
if options.noupdate:
|
|
|
|
changed_to_message = ' -> AVAILABLE: '
|
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
# test if the CEF URL changed
|
|
|
|
cef_url_changed = current_cef_url != cef_url
|
|
|
|
sys.stdout.write('CEF URL: '+current_cef_url+"\n")
|
|
|
|
if cef_url_changed:
|
2013-09-10 22:02:09 +02:00
|
|
|
sys.stdout.write(changed_to_message+cef_url+"\n")
|
2012-04-03 03:34:16 +02:00
|
|
|
|
|
|
|
# test if the CEF revision changed
|
|
|
|
cef_rev_changed = current_cef_rev != cef_rev
|
|
|
|
sys.stdout.write('CEF Revision: '+current_cef_rev+"\n")
|
|
|
|
if cef_rev_changed:
|
2013-09-10 22:02:09 +02:00
|
|
|
sys.stdout.write(changed_to_message+cef_rev+"\n")
|
2012-04-03 03:34:16 +02:00
|
|
|
|
|
|
|
release_url_changed = False
|
|
|
|
chromium_url_changed = False
|
|
|
|
chromium_rev_changed = False
|
|
|
|
|
|
|
|
if release_url is None:
|
|
|
|
# test if the Chromium URL changed
|
|
|
|
chromium_url_changed = current_chromium_url != chromium_url
|
|
|
|
sys.stdout.write('Chromium URL: '+current_chromium_url+"\n")
|
|
|
|
if chromium_url_changed:
|
2013-09-10 22:02:09 +02:00
|
|
|
sys.stdout.write(changed_to_message+chromium_url+"\n")
|
2012-04-03 03:34:16 +02:00
|
|
|
|
|
|
|
# test if the Chromium revision changed
|
|
|
|
chromium_rev_changed = current_chromium_rev != chromium_rev
|
|
|
|
sys.stdout.write('Chromium Revision: '+current_chromium_rev+"\n")
|
|
|
|
if chromium_rev_changed:
|
2013-09-10 22:02:09 +02:00
|
|
|
sys.stdout.write(changed_to_message+chromium_rev+"\n")
|
2012-04-03 03:34:16 +02:00
|
|
|
else:
|
|
|
|
# test if the release URL changed
|
|
|
|
current_release_url = 'None'
|
|
|
|
|
|
|
|
path = os.path.join(chromium_dir, '.gclient')
|
|
|
|
if os.path.exists(path):
|
|
|
|
# read the .gclient file
|
|
|
|
fp = open(path, 'r')
|
|
|
|
data = fp.read()
|
|
|
|
fp.close()
|
|
|
|
|
|
|
|
# Parse the contents
|
|
|
|
config_dict = {}
|
|
|
|
try:
|
|
|
|
exec(data, config_dict)
|
|
|
|
current_release_url = config_dict['solutions'][0]['url']
|
|
|
|
except Exception, e:
|
|
|
|
sys.stderr.write('Failed to parse existing .glient file.\n')
|
|
|
|
raise
|
|
|
|
|
|
|
|
release_url_changed = current_release_url != release_url
|
|
|
|
sys.stdout.write('Release URL: '+current_release_url+"\n")
|
|
|
|
if release_url_changed:
|
2013-09-10 22:02:09 +02:00
|
|
|
sys.stdout.write(changed_to_message+release_url+"\n")
|
2012-04-03 03:34:16 +02:00
|
|
|
|
|
|
|
# true if anything changed
|
|
|
|
any_changed = release_url_changed or chromium_url_changed or \
|
|
|
|
chromium_rev_changed or cef_url_changed or cef_rev_changed
|
|
|
|
if not any_changed:
|
|
|
|
sys.stdout.write("No changes.\n")
|
2013-09-10 22:02:09 +02:00
|
|
|
elif options.noupdate:
|
|
|
|
sys.stdout.write("You have updates. Remove --no-update flag to update source code\n")
|
|
|
|
release_url_changed = False
|
|
|
|
chromium_url_changed = False
|
|
|
|
chromium_rev_changed = False
|
|
|
|
cef_url_changed = False
|
|
|
|
cef_rev_changed = False
|
|
|
|
any_changed = False
|
|
|
|
|
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
if release_url_changed or chromium_url_changed or options.forceconfig:
|
|
|
|
if release_url is None:
|
|
|
|
url = chromium_url
|
|
|
|
else:
|
|
|
|
url = release_url
|
|
|
|
|
|
|
|
# run gclient config to create the .gclient file
|
|
|
|
run('gclient config '+url, chromium_dir, depot_tools_dir)
|
|
|
|
|
2013-04-10 01:02:58 +02:00
|
|
|
if not options.dryrun:
|
|
|
|
path = os.path.join(chromium_dir, '.gclient')
|
|
|
|
if not os.path.exists(path):
|
|
|
|
sys.stderr.write(".gclient file was not created\n")
|
|
|
|
raise Exception('.gclient file was not created')
|
|
|
|
|
|
|
|
# read the resulting .gclient file
|
|
|
|
fp = open(path, 'r')
|
|
|
|
data = fp.read()
|
|
|
|
fp.close()
|
|
|
|
|
|
|
|
custom_deps = \
|
|
|
|
"\n "+'"src/third_party/WebKit/LayoutTests": None,'+\
|
|
|
|
"\n "+'"src/chrome_frame/tools/test/reference_build/chrome": None,'+\
|
|
|
|
"\n "+'"src/chrome/tools/test/reference_build/chrome_mac": None,'+\
|
|
|
|
"\n "+'"src/chrome/tools/test/reference_build/chrome_win": None,'+\
|
|
|
|
"\n "+'"src/chrome/tools/test/reference_build/chrome_linux": None,'
|
|
|
|
|
|
|
|
if not release_url is None:
|
|
|
|
# TODO: Read the DEPS file and exclude all non-src directories.
|
|
|
|
custom_deps += \
|
|
|
|
"\n "+'"chromeos": None,'+\
|
|
|
|
"\n "+'"depot_tools": None,'
|
|
|
|
|
|
|
|
# populate "custom_deps" section
|
|
|
|
data = data.replace('"custom_deps" : {', '"custom_deps" : {'+custom_deps)
|
|
|
|
|
|
|
|
# write the new .gclient file
|
|
|
|
fp = open(path, 'w')
|
|
|
|
fp.write(data)
|
|
|
|
fp.close()
|
2012-04-03 03:34:16 +02:00
|
|
|
|
|
|
|
if options.forceclean:
|
|
|
|
if os.path.exists(chromium_src_dir):
|
|
|
|
# revert all Chromium changes and delete all unversioned files
|
|
|
|
run('gclient revert -n', chromium_dir, depot_tools_dir)
|
|
|
|
|
2013-04-10 01:02:58 +02:00
|
|
|
if not options.dryrun:
|
|
|
|
# remove the build output directories
|
|
|
|
output_dirs = []
|
|
|
|
if platform == 'windows':
|
|
|
|
output_dirs.append(os.path.join(chromium_src_dir, 'build\\Debug'))
|
|
|
|
output_dirs.append(os.path.join(chromium_src_dir, 'build\\Release'))
|
|
|
|
elif platform == 'macosx':
|
|
|
|
output_dirs.append(os.path.join(chromium_src_dir, 'xcodebuild'))
|
|
|
|
elif platform == 'linux':
|
|
|
|
output_dirs.append(os.path.join(chromium_src_dir, 'out'))
|
2013-01-17 19:15:42 +01:00
|
|
|
|
2013-04-10 01:02:58 +02:00
|
|
|
if options.ninjabuild:
|
|
|
|
output_dirs.append(os.path.join(chromium_src_dir, 'out'))
|
|
|
|
|
|
|
|
for output_dir in output_dirs:
|
|
|
|
if os.path.exists(output_dir):
|
|
|
|
shutil.rmtree(output_dir, onerror=onerror)
|
2013-01-17 19:15:42 +01:00
|
|
|
|
2012-04-03 03:34:16 +02:00
|
|
|
# force update, build and distrib steps
|
|
|
|
options.forceupdate = True
|
|
|
|
options.forcebuild = True
|
|
|
|
options.forcedistrib = True
|
|
|
|
|
|
|
|
if release_url is None:
|
|
|
|
if chromium_url_changed or chromium_rev_changed or options.forceupdate:
|
|
|
|
# download/update the Chromium source code
|
|
|
|
run('gclient sync --revision src@'+chromium_rev+' --jobs 8 --force', \
|
|
|
|
chromium_dir, depot_tools_dir)
|
|
|
|
elif release_url_changed or options.forceupdate:
|
|
|
|
# download/update the release source code
|
|
|
|
run('gclient sync --jobs 8 --force', chromium_dir, depot_tools_dir)
|
|
|
|
|
|
|
|
if not os.path.exists(cef_src_dir) or cef_url_changed:
|
2013-04-10 01:02:58 +02:00
|
|
|
if not options.dryrun and cef_url_changed and os.path.exists(cef_src_dir):
|
2012-04-03 03:34:16 +02:00
|
|
|
# delete the cef directory (it will be re-downloaded)
|
|
|
|
shutil.rmtree(cef_src_dir)
|
|
|
|
|
|
|
|
# download the CEF source code
|
2013-08-22 21:52:25 +02:00
|
|
|
run(svn_exe+' checkout '+cef_url+' -r '+cef_rev+' '+cef_src_dir, download_dir)
|
2012-04-03 03:34:16 +02:00
|
|
|
elif cef_rev_changed or options.forceupdate:
|
|
|
|
# update the CEF source code
|
2013-08-22 21:52:25 +02:00
|
|
|
run(svn_exe+' update -r '+cef_rev+' '+cef_src_dir, download_dir)
|
2012-04-03 03:34:16 +02:00
|
|
|
|
|
|
|
if any_changed or options.forceupdate:
|
|
|
|
# create CEF projects
|
2013-04-10 01:02:58 +02:00
|
|
|
if options.ninjabuild:
|
|
|
|
os.environ['GYP_GENERATORS'] = 'ninja'
|
2013-08-23 18:22:11 +02:00
|
|
|
if options.x64build:
|
|
|
|
if 'GYP_DEFINES' in os.environ.keys():
|
|
|
|
os.environ['GYP_DEFINES'] = os.environ['GYP_DEFINES'] + ' ' + 'target_arch=x64'
|
|
|
|
else:
|
|
|
|
os.environ['GYP_DEFINES'] = 'target_arch=x64'
|
2012-04-03 03:34:16 +02:00
|
|
|
path = os.path.join(cef_src_dir, 'cef_create_projects'+script_ext)
|
|
|
|
run(path, cef_src_dir, depot_tools_dir)
|
|
|
|
|
|
|
|
if any_changed or options.forcebuild:
|
2013-04-10 01:02:58 +02:00
|
|
|
if options.ninjabuild:
|
|
|
|
command = 'ninja -C '
|
2013-09-10 22:02:09 +02:00
|
|
|
if options.verbose:
|
|
|
|
command = 'ninja -v -C'
|
2013-04-10 01:02:58 +02:00
|
|
|
target = ' cefclient'
|
2013-09-10 22:02:09 +02:00
|
|
|
if options.buildtests:
|
2013-11-15 19:47:02 +01:00
|
|
|
target = target + ' cef_unittests'
|
|
|
|
if platform == 'linux':
|
|
|
|
target = target + ' chrome_sandbox'
|
2013-08-23 18:22:11 +02:00
|
|
|
build_dir_suffix = ''
|
2013-08-23 21:29:00 +02:00
|
|
|
if platform == 'windows' and options.x64build:
|
2013-08-23 18:22:11 +02:00
|
|
|
build_dir_suffix = '_x64'
|
|
|
|
|
2013-04-10 01:02:58 +02:00
|
|
|
if not options.nodebugbuild:
|
|
|
|
# make CEF Debug build
|
2013-08-23 18:22:11 +02:00
|
|
|
run(command + os.path.join('out', 'Debug' + build_dir_suffix) + target, \
|
2013-09-10 22:02:09 +02:00
|
|
|
chromium_src_dir, depot_tools_dir,
|
2013-09-19 17:32:11 +02:00
|
|
|
os.path.join(chromium_src_dir, 'ninja-build-debug.log') if options.buildlogfile else None)
|
2013-04-10 01:02:58 +02:00
|
|
|
|
|
|
|
if not options.noreleasebuild:
|
|
|
|
# make CEF Release build
|
2013-08-23 18:22:11 +02:00
|
|
|
run(command + os.path.join('out', 'Release' + build_dir_suffix) + target, \
|
2013-09-10 22:02:09 +02:00
|
|
|
chromium_src_dir, depot_tools_dir,
|
2013-09-19 17:32:11 +02:00
|
|
|
os.path.join(chromium_src_dir, 'ninja-build-release.log') if options.buildlogfile else None)
|
2013-04-10 01:02:58 +02:00
|
|
|
else:
|
|
|
|
path = os.path.join(cef_tools_dir, 'build_projects'+script_ext)
|
2012-04-03 03:34:16 +02:00
|
|
|
|
2013-04-10 01:02:58 +02:00
|
|
|
if not options.nodebugbuild:
|
|
|
|
# make CEF Debug build
|
2013-09-10 22:02:09 +02:00
|
|
|
run(path+' Debug', cef_tools_dir, depot_tools_dir,
|
2013-09-19 17:32:11 +02:00
|
|
|
os.path.join(chromium_src_dir, 'build-debug.log') if options.buildlogfile else None)
|
2012-04-03 03:34:16 +02:00
|
|
|
|
2013-04-10 01:02:58 +02:00
|
|
|
if not options.noreleasebuild:
|
|
|
|
# make CEF Release build
|
2013-09-10 22:02:09 +02:00
|
|
|
run(path+' Release', cef_tools_dir, depot_tools_dir,
|
2013-09-19 17:32:11 +02:00
|
|
|
os.path.join(chromium_src_dir, 'build-release.log') if options.buildlogfile else None)
|
2012-04-03 03:34:16 +02:00
|
|
|
|
2013-04-10 23:02:37 +02:00
|
|
|
if (any_changed or options.forcedistrib) and not options.nodistrib:
|
2013-07-19 20:36:09 +02:00
|
|
|
if not options.forceclean and options.cleanartifacts:
|
|
|
|
# clean the artifacts output directory
|
|
|
|
artifacts_path = os.path.join(cef_src_dir, 'binary_distrib')
|
|
|
|
if os.path.exists(artifacts_path):
|
|
|
|
shutil.rmtree(artifacts_path, onerror=onerror)
|
|
|
|
|
2013-04-12 01:07:43 +02:00
|
|
|
# determine the requested distribution types
|
|
|
|
distrib_types = []
|
|
|
|
if options.minimaldistribonly:
|
|
|
|
distrib_types.append('minimal')
|
|
|
|
elif options.clientdistribonly:
|
|
|
|
distrib_types.append('client')
|
|
|
|
else:
|
|
|
|
distrib_types.append('standard')
|
|
|
|
if options.minimaldistrib:
|
|
|
|
distrib_types.append('minimal')
|
|
|
|
if options.clientdistrib:
|
|
|
|
distrib_types.append('client')
|
|
|
|
|
|
|
|
first_type = True
|
|
|
|
|
|
|
|
# create the requested distribution types
|
|
|
|
for type in distrib_types:
|
|
|
|
path = os.path.join(cef_tools_dir, 'make_distrib'+script_ext)
|
|
|
|
if options.nodebugbuild or options.noreleasebuild or type != 'standard':
|
|
|
|
path = path + ' --allow-partial'
|
|
|
|
if options.ninjabuild:
|
|
|
|
path = path + ' --ninja-build'
|
2013-08-23 18:22:11 +02:00
|
|
|
if options.x64build:
|
|
|
|
path = path + ' --x64-build'
|
2013-04-12 01:07:43 +02:00
|
|
|
|
|
|
|
if type == 'minimal':
|
|
|
|
path = path + ' --minimal'
|
|
|
|
elif type == 'client':
|
|
|
|
path = path + ' --client'
|
|
|
|
|
|
|
|
if first_type:
|
|
|
|
if options.nodistribdocs:
|
|
|
|
path = path + ' --no-docs'
|
|
|
|
if options.nodistribarchive:
|
|
|
|
path = path + ' --no-archive'
|
|
|
|
first_type = False
|
|
|
|
else:
|
|
|
|
# don't create the symbol archives or documentation more than once
|
|
|
|
path = path + ' --no-symbols --no-docs'
|
|
|
|
|
|
|
|
# create the distribution
|
2012-04-03 03:34:16 +02:00
|
|
|
run(path, cef_tools_dir, depot_tools_dir)
|