Add ability to patch Chromium DEPS file (issue #1612)
This commit is contained in:
parent
1c6da5fe86
commit
939a6598c1
|
@ -216,7 +216,7 @@ def write_branch_config_file(path, branch):
|
|||
write_config_file(config_file, {'branch': branch})
|
||||
|
||||
def remove_deps_entry(path, entry):
|
||||
""" Remove an entry from the DEPS file at the specified path. """
|
||||
""" Remove an entry from the Chromium DEPS file at the specified path. """
|
||||
msg('Updating DEPS file: %s' % path)
|
||||
if not options.dryrun:
|
||||
if not os.path.isfile(path):
|
||||
|
@ -241,6 +241,22 @@ def remove_deps_entry(path, entry):
|
|||
fp.write(line)
|
||||
fp.close()
|
||||
|
||||
def apply_deps_patch():
|
||||
""" Patch the Chromium DEPS file if necessary. """
|
||||
deps_file = '.DEPS.git'
|
||||
patch_file = os.path.join(cef_dir, 'patch', 'patches', deps_file + '.patch')
|
||||
if os.path.exists(patch_file):
|
||||
# Attempt to apply the DEPS patch file that may exist with newer branches.
|
||||
patch_tool = os.path.join(cef_dir, 'tools', 'patcher.py')
|
||||
run('%s %s --patch-file "%s" --patch-dir "%s"' %
|
||||
(python_exe, patch_tool, patch_file, chromium_src_dir),
|
||||
chromium_src_dir, depot_tools_dir)
|
||||
elif cef_branch != 'trunk':
|
||||
# Older release branch DEPS files may include a 'src' entry. This entry
|
||||
# needs to be removed otherwise `gclient sync` will fail.
|
||||
deps_path = os.path.join(chromium_src_dir, deps_file)
|
||||
remove_deps_entry(deps_path, "'src'")
|
||||
|
||||
def onerror(func, path, exc_info):
|
||||
"""
|
||||
Error handler for ``shutil.rmtree``.
|
||||
|
@ -526,17 +542,20 @@ if not options.noupdate:
|
|||
else:
|
||||
run('update_depot_tools', depot_tools_dir, depot_tools_dir);
|
||||
|
||||
# Determine the git executables to use.
|
||||
# Determine the executables to use.
|
||||
if platform == 'windows':
|
||||
# Force use of the version bundled with depot_tools.
|
||||
git_exe = os.path.join(depot_tools_dir, 'git.bat')
|
||||
python_exe = os.path.join(depot_tools_dir, 'python.bat')
|
||||
if options.dryrun and not os.path.exists(git_exe):
|
||||
sys.stdout.write("WARNING: --dry-run assumes that depot_tools" \
|
||||
" is already in your PATH. If it isn't\nplease" \
|
||||
" specify a --depot-tools-dir value.\n")
|
||||
git_exe = 'git.bat'
|
||||
python_exe = 'python.bat'
|
||||
else:
|
||||
git_exe = 'git'
|
||||
python_exe = 'git'
|
||||
|
||||
|
||||
##
|
||||
|
@ -652,6 +671,7 @@ if not os.path.exists(gclient_file) or options.forceconfig:
|
|||
"u'build/scripts/command_wrapper/bin': None, "+\
|
||||
"u'build/scripts/gsd_generate_index': None, "+\
|
||||
"u'build/scripts/private/data/reliability': None, "+\
|
||||
"u'build/scripts/tools/deps2git': None, "+\
|
||||
"u'build/third_party/lighttpd': None, "+\
|
||||
"u'commit-queue': None, "+\
|
||||
"u'depot_tools': None, "+\
|
||||
|
@ -686,7 +706,6 @@ if os.path.exists(chromium_src_dir):
|
|||
msg("Chromium URL: %s" % (get_git_url(chromium_src_dir)))
|
||||
|
||||
# Determine the Chromium checkout options required by CEF.
|
||||
chromium_nohooks = False
|
||||
if options.chromiumcheckout == '':
|
||||
# Read the build compatibility file to identify the checkout name.
|
||||
compat_path = os.path.join(cef_dir, 'CHROMIUM_BUILD_COMPATIBILITY.txt')
|
||||
|
@ -696,10 +715,6 @@ if options.chromiumcheckout == '':
|
|||
chromium_checkout = config['chromium_checkout']
|
||||
else:
|
||||
raise Exception("Missing chromium_checkout value in %s" % (compat_path))
|
||||
|
||||
# Some branches run hooks using CEF instead of Chromium.
|
||||
if 'chromium_nohooks' in config:
|
||||
chromium_nohooks = config['chromium_nohooks']
|
||||
else:
|
||||
chromium_checkout = options.chromiumcheckout
|
||||
|
||||
|
@ -755,20 +770,16 @@ if chromium_checkout_changed:
|
|||
(git_exe, ('--force ' if options.forceclean else ''), chromium_checkout), \
|
||||
chromium_src_dir, depot_tools_dir)
|
||||
|
||||
if cef_branch != 'trunk':
|
||||
# Remove the 'src' entry from .DEPS.git for release branches.
|
||||
# Otherwise, `gclient sync` will fail.
|
||||
deps_path = os.path.join(chromium_src_dir, '.DEPS.git')
|
||||
remove_deps_entry(deps_path, "'src'")
|
||||
# Patch the Chromium DEPS file if necessary.
|
||||
apply_deps_patch()
|
||||
|
||||
# Set the GYP_CHROMIUM_NO_ACTION value temporarily so that `gclient sync` does
|
||||
# not run gyp.
|
||||
os.environ['GYP_CHROMIUM_NO_ACTION'] = '1'
|
||||
|
||||
# Update third-party dependencies including branch/tag information.
|
||||
run("gclient sync %s%s--with_branch_heads --jobs 16" % \
|
||||
(('--reset ' if options.forceclean else ''), \
|
||||
('--nohooks ' if chromium_nohooks else '')), \
|
||||
run("gclient sync %s--with_branch_heads --jobs 16" % \
|
||||
(('--reset ' if options.forceclean else '')), \
|
||||
chromium_dir, depot_tools_dir)
|
||||
|
||||
# Clear the GYP_CHROMIUM_NO_ACTION value.
|
||||
|
|
|
@ -9,6 +9,9 @@
|
|||
CEF Changes
|
||||
-----------
|
||||
|
||||
2015/04/22
|
||||
- Write to stdout instead of using warning() for messages
|
||||
|
||||
2013/01/03
|
||||
- Add support for patches containing new files
|
||||
|
||||
|
@ -28,7 +31,7 @@ import re
|
|||
from stat import *
|
||||
# cStringIO doesn't support unicode in 2.5
|
||||
from StringIO import StringIO
|
||||
from logging import debug, info, warning
|
||||
from logging import debug, info
|
||||
|
||||
from os.path import exists, isfile
|
||||
from os import unlink
|
||||
|
@ -55,6 +58,11 @@ def from_string(s):
|
|||
)
|
||||
|
||||
|
||||
def msg(message):
|
||||
""" Output a message. """
|
||||
sys.stdout.write('--> ' + message + "\n")
|
||||
|
||||
|
||||
class HunkInfo(object):
|
||||
""" parsed hunk data (hunk starts with @@ -R +R @@) """
|
||||
|
||||
|
@ -160,7 +168,7 @@ class PatchInfo(object):
|
|||
hunkinfo.text.append(line)
|
||||
# todo: handle \ No newline cases
|
||||
else:
|
||||
warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||
msg("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||
# add hunk status node
|
||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
||||
|
@ -170,7 +178,7 @@ class PatchInfo(object):
|
|||
|
||||
# check exit conditions
|
||||
if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
|
||||
warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||
msg("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||
# add hunk status node
|
||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
||||
|
@ -186,7 +194,7 @@ class PatchInfo(object):
|
|||
# detect mixed window/unix line ends
|
||||
ends = self.hunkends[nextfileno-1]
|
||||
if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
|
||||
warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
|
||||
msg("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
|
||||
if debugmode:
|
||||
debuglines = dict(ends)
|
||||
debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
|
||||
|
@ -208,14 +216,14 @@ class PatchInfo(object):
|
|||
if filenames:
|
||||
if line.startswith("--- "):
|
||||
if nextfileno in self.source:
|
||||
warning("skipping invalid patch for %s" % self.source[nextfileno])
|
||||
msg("skipping invalid patch for %s" % self.source[nextfileno])
|
||||
del self.source[nextfileno]
|
||||
# double source filename line is encountered
|
||||
# attempt to restart from this second line
|
||||
re_filename = "^--- ([^\t]+)"
|
||||
match = re.match(re_filename, line)
|
||||
if not match:
|
||||
warning("skipping invalid filename at line %d" % lineno)
|
||||
msg("skipping invalid filename at line %d" % lineno)
|
||||
# switch back to header state
|
||||
filenames = False
|
||||
header = True
|
||||
|
@ -223,16 +231,16 @@ class PatchInfo(object):
|
|||
self.source.append(match.group(1).strip())
|
||||
elif not line.startswith("+++ "):
|
||||
if nextfileno in self.source:
|
||||
warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
|
||||
msg("skipping invalid patch with no target for %s" % self.source[nextfileno])
|
||||
del self.source[nextfileno]
|
||||
else:
|
||||
# this should be unreachable
|
||||
warning("skipping invalid target patch")
|
||||
msg("skipping invalid target patch")
|
||||
filenames = False
|
||||
header = True
|
||||
else:
|
||||
if nextfileno in self.target:
|
||||
warning("skipping invalid patch - double target at line %d" % lineno)
|
||||
msg("skipping invalid patch - double target at line %d" % lineno)
|
||||
del self.source[nextfileno]
|
||||
del self.target[nextfileno]
|
||||
nextfileno -= 1
|
||||
|
@ -244,7 +252,7 @@ class PatchInfo(object):
|
|||
re_filename = "^\+\+\+ ([^\t]+)"
|
||||
match = re.match(re_filename, line)
|
||||
if not match:
|
||||
warning("skipping invalid patch - no target filename at line %d" % lineno)
|
||||
msg("skipping invalid patch - no target filename at line %d" % lineno)
|
||||
# switch back to header state
|
||||
filenames = False
|
||||
header = True
|
||||
|
@ -264,7 +272,7 @@ class PatchInfo(object):
|
|||
match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
|
||||
if not match:
|
||||
if nextfileno-1 not in self.hunks:
|
||||
warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
|
||||
msg("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
|
||||
# switch to header state
|
||||
hunkhead = False
|
||||
header = True
|
||||
|
@ -296,7 +304,7 @@ class PatchInfo(object):
|
|||
continue
|
||||
else:
|
||||
if not hunkskip:
|
||||
warning("patch file incomplete - %s" % filename)
|
||||
msg("patch file incomplete - %s" % filename)
|
||||
# sys.exit(?)
|
||||
else:
|
||||
# duplicated message when an eof is reached
|
||||
|
@ -319,7 +327,7 @@ class PatchInfo(object):
|
|||
if len(self.hunks[fileno]) == 1 and self.hunks[fileno][0].startsrc == 0:
|
||||
hunklines = [x[1:].rstrip("\r\n") for x in self.hunks[fileno][0].text if x[0] in " +"]
|
||||
if len(hunklines) > 0:
|
||||
warning("creating file %s" % (f2patch))
|
||||
msg("creating file %s" % (f2patch))
|
||||
f = open(f2patch, "wb")
|
||||
for line in hunklines:
|
||||
f.write(line + "\n")
|
||||
|
@ -328,10 +336,10 @@ class PatchInfo(object):
|
|||
|
||||
f2patch = self.target[fileno]
|
||||
if not exists(f2patch):
|
||||
warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
|
||||
msg("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
|
||||
continue
|
||||
if not isfile(f2patch):
|
||||
warning("not a file - %s" % f2patch)
|
||||
msg("not a file - %s" % f2patch)
|
||||
continue
|
||||
filename = f2patch
|
||||
|
||||
|
@ -385,29 +393,29 @@ class PatchInfo(object):
|
|||
else:
|
||||
if hunkno < len(self.hunks[fileno]) and \
|
||||
(len(self.hunks[fileno]) != 1 or self.hunks[fileno][0].startsrc != 0):
|
||||
warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
|
||||
msg("premature end of source file %s at hunk %d" % (filename, hunkno+1))
|
||||
|
||||
f2fp.close()
|
||||
|
||||
if validhunks < len(self.hunks[fileno]):
|
||||
if check_patched(filename, self.hunks[fileno]):
|
||||
warning("already patched %s" % filename)
|
||||
msg("already patched %s" % filename)
|
||||
else:
|
||||
warning("source file is different - %s" % filename)
|
||||
msg("source file is different - %s" % filename)
|
||||
if canpatch:
|
||||
backupname = filename+".orig"
|
||||
if exists(backupname):
|
||||
warning("can't backup original file to %s - aborting" % backupname)
|
||||
msg("can't backup original file to %s - aborting" % backupname)
|
||||
else:
|
||||
import shutil
|
||||
shutil.move(filename, backupname)
|
||||
if patch_hunks(backupname, filename, self.hunks[fileno]):
|
||||
warning("successfully patched %s" % filename)
|
||||
msg("successfully patched %s" % filename)
|
||||
unlink(backupname)
|
||||
else:
|
||||
warning("error patching file %s" % filename)
|
||||
msg("error patching file %s" % filename)
|
||||
shutil.copy(filename, filename+".invalid")
|
||||
warning("invalid version is saved to %s" % filename+".invalid")
|
||||
msg("invalid version is saved to %s" % filename+".invalid")
|
||||
# todo: proper rejects
|
||||
shutil.move(backupname, filename)
|
||||
|
||||
|
@ -461,7 +469,7 @@ def check_patched(filename, hunks):
|
|||
if not len(line):
|
||||
raise NoMatch
|
||||
if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
|
||||
warning("file is not patched - failed hunk: %d" % (hno+1))
|
||||
msg("file is not patched - failed hunk: %d" % (hno+1))
|
||||
raise NoMatch
|
||||
except NoMatch:
|
||||
matched = False
|
||||
|
|
112
tools/patcher.py
112
tools/patcher.py
|
@ -10,45 +10,39 @@ from file_util import *
|
|||
from patch_util import *
|
||||
|
||||
|
||||
# cannot be loaded as a module
|
||||
# Cannot be loaded as a module.
|
||||
if __name__ != "__main__":
|
||||
sys.stderr.write('This file cannot be loaded as a module!')
|
||||
sys.stdout.write('This file cannot be loaded as a module!')
|
||||
sys.exit()
|
||||
|
||||
|
||||
# parse command-line options
|
||||
disc = """
|
||||
This utility applies patch files.
|
||||
"""
|
||||
def normalize_dir(dir):
|
||||
''' Normalize the directory value. '''
|
||||
dir = dir.replace('\\', '/')
|
||||
if dir[-1] != '/':
|
||||
dir += '/'
|
||||
return dir
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--patch-config', dest='patchconfig', metavar='DIR',
|
||||
help='patch configuration file')
|
||||
(options, args) = parser.parse_args()
|
||||
def patch_file(patch_file, patch_dir):
|
||||
''' Apply a single patch file in a single directory. '''
|
||||
if not os.path.isfile(patch_file):
|
||||
raise Exception('Patch file %s does not exist.' % patch_file)
|
||||
|
||||
# the patchconfig option is required
|
||||
if options.patchconfig is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
sys.stdout.write('Reading patch file %s\n' % patch_file)
|
||||
patchObj = from_file(patch_file)
|
||||
patchObj.apply(normalize_dir(patch_dir))
|
||||
|
||||
# normalize the patch directory value
|
||||
patchdir = os.path.dirname(os.path.abspath(options.patchconfig)).replace('\\', '/')
|
||||
if patchdir[-1] != '/':
|
||||
patchdir += '/'
|
||||
def patch_config(config_file):
|
||||
''' Apply patch files based on a configuration file. '''
|
||||
# Normalize the patch directory value.
|
||||
patchdir = normalize_dir(os.path.dirname(os.path.abspath(config_file)))
|
||||
|
||||
# check if the patching should be skipped
|
||||
if os.path.isfile(patchdir + 'NOPATCH'):
|
||||
nopatch = True
|
||||
sys.stdout.write('NOPATCH exists -- files have not been patched.\n')
|
||||
else:
|
||||
nopatch = False
|
||||
# locate the patch configuration file
|
||||
if not os.path.isfile(options.patchconfig):
|
||||
sys.stderr.write('File '+options.patchconfig+' does not exist.\n')
|
||||
sys.exit()
|
||||
if not os.path.isfile(config_file):
|
||||
raise Exception('Patch config file %s does not exist.' % config_file)
|
||||
|
||||
# Parse the configuration file.
|
||||
scope = {}
|
||||
execfile(options.patchconfig, scope)
|
||||
execfile(config_file, scope)
|
||||
patches = scope["patches"]
|
||||
|
||||
for patch in patches:
|
||||
|
@ -58,46 +52,36 @@ else:
|
|||
if 'condition' in patch:
|
||||
# Check that the environment variable is set.
|
||||
if patch['condition'] not in os.environ:
|
||||
sys.stderr.write('Skipping patch file '+file+'\n')
|
||||
sys.stdout.write('Skipping patch file %s\n' % file)
|
||||
dopatch = False
|
||||
|
||||
if dopatch:
|
||||
if not os.path.isfile(file):
|
||||
sys.stderr.write('Patch file '+file+' does not exist.\n')
|
||||
else:
|
||||
sys.stderr.write('Reading patch file '+file+'\n')
|
||||
dir = patch['path']
|
||||
patchObj = from_file(file)
|
||||
patchObj.apply(dir)
|
||||
patch_file(file, patch['path'])
|
||||
if 'note' in patch:
|
||||
separator = '-' * 79 + '\n'
|
||||
sys.stderr.write(separator)
|
||||
sys.stderr.write('NOTE: '+patch['note']+'\n')
|
||||
sys.stderr.write(separator)
|
||||
sys.stdout.write(separator)
|
||||
sys.stdout.write('NOTE: %s\n' % patch['note'])
|
||||
sys.stdout.write(separator)
|
||||
|
||||
# read the current include file, if any
|
||||
incfile = patchdir + 'patch_state.h'
|
||||
if nopatch:
|
||||
incnew = """// This file is generated by the patch tool and should not be edited manually.
|
||||
#ifndef _PATCH_STATE_H
|
||||
#define _PATCH_STATE_H
|
||||
// No patches have been applied to the Chromium/WebKit source base.
|
||||
#define CEF_PATCHES_APPLIED 0
|
||||
#endif // _PATCH_STATE_H
|
||||
|
||||
# Parse command-line options.
|
||||
disc = """
|
||||
This utility applies patch files.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--patch-config', dest='patchconfig', metavar='DIR',
|
||||
help='patch configuration file')
|
||||
parser.add_option('--patch-file', dest='patchfile', metavar='FILE',
|
||||
help='patch source file')
|
||||
parser.add_option('--patch-dir', dest='patchdir', metavar='DIR',
|
||||
help='patch target directory')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if not options.patchconfig is None:
|
||||
patch_config(options.patchconfig)
|
||||
elif not options.patchfile is None and not options.patchdir is None:
|
||||
patch_file(options.patchfile, options.patchdir)
|
||||
else:
|
||||
incnew = """// This file is generated by the patch tool and should not be edited manually.
|
||||
#ifndef _PATCH_STATE_H
|
||||
#define _PATCH_STATE_H
|
||||
// Patches have been applied to the Chromium/WebKit source base.
|
||||
#define CEF_PATCHES_APPLIED 1
|
||||
#endif // _PATCH_STATE_H
|
||||
"""
|
||||
|
||||
inccur = ''
|
||||
if os.path.isfile(incfile):
|
||||
inccur = read_file(incfile)
|
||||
|
||||
if inccur != incnew:
|
||||
sys.stdout.write('Writing file '+incfile+'.\n')
|
||||
write_file(incfile, incnew)
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
|
Loading…
Reference in New Issue