Add ability to patch Chromium DEPS file (issue #1612)
This commit is contained in:
parent
1c6da5fe86
commit
939a6598c1
|
@ -216,7 +216,7 @@ def write_branch_config_file(path, branch):
|
||||||
write_config_file(config_file, {'branch': branch})
|
write_config_file(config_file, {'branch': branch})
|
||||||
|
|
||||||
def remove_deps_entry(path, entry):
|
def remove_deps_entry(path, entry):
|
||||||
""" Remove an entry from the DEPS file at the specified path. """
|
""" Remove an entry from the Chromium DEPS file at the specified path. """
|
||||||
msg('Updating DEPS file: %s' % path)
|
msg('Updating DEPS file: %s' % path)
|
||||||
if not options.dryrun:
|
if not options.dryrun:
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
|
@ -241,6 +241,22 @@ def remove_deps_entry(path, entry):
|
||||||
fp.write(line)
|
fp.write(line)
|
||||||
fp.close()
|
fp.close()
|
||||||
|
|
||||||
|
def apply_deps_patch():
|
||||||
|
""" Patch the Chromium DEPS file if necessary. """
|
||||||
|
deps_file = '.DEPS.git'
|
||||||
|
patch_file = os.path.join(cef_dir, 'patch', 'patches', deps_file + '.patch')
|
||||||
|
if os.path.exists(patch_file):
|
||||||
|
# Attempt to apply the DEPS patch file that may exist with newer branches.
|
||||||
|
patch_tool = os.path.join(cef_dir, 'tools', 'patcher.py')
|
||||||
|
run('%s %s --patch-file "%s" --patch-dir "%s"' %
|
||||||
|
(python_exe, patch_tool, patch_file, chromium_src_dir),
|
||||||
|
chromium_src_dir, depot_tools_dir)
|
||||||
|
elif cef_branch != 'trunk':
|
||||||
|
# Older release branch DEPS files may include a 'src' entry. This entry
|
||||||
|
# needs to be removed otherwise `gclient sync` will fail.
|
||||||
|
deps_path = os.path.join(chromium_src_dir, deps_file)
|
||||||
|
remove_deps_entry(deps_path, "'src'")
|
||||||
|
|
||||||
def onerror(func, path, exc_info):
|
def onerror(func, path, exc_info):
|
||||||
"""
|
"""
|
||||||
Error handler for ``shutil.rmtree``.
|
Error handler for ``shutil.rmtree``.
|
||||||
|
@ -526,17 +542,20 @@ if not options.noupdate:
|
||||||
else:
|
else:
|
||||||
run('update_depot_tools', depot_tools_dir, depot_tools_dir);
|
run('update_depot_tools', depot_tools_dir, depot_tools_dir);
|
||||||
|
|
||||||
# Determine the git executables to use.
|
# Determine the executables to use.
|
||||||
if platform == 'windows':
|
if platform == 'windows':
|
||||||
# Force use of the version bundled with depot_tools.
|
# Force use of the version bundled with depot_tools.
|
||||||
git_exe = os.path.join(depot_tools_dir, 'git.bat')
|
git_exe = os.path.join(depot_tools_dir, 'git.bat')
|
||||||
|
python_exe = os.path.join(depot_tools_dir, 'python.bat')
|
||||||
if options.dryrun and not os.path.exists(git_exe):
|
if options.dryrun and not os.path.exists(git_exe):
|
||||||
sys.stdout.write("WARNING: --dry-run assumes that depot_tools" \
|
sys.stdout.write("WARNING: --dry-run assumes that depot_tools" \
|
||||||
" is already in your PATH. If it isn't\nplease" \
|
" is already in your PATH. If it isn't\nplease" \
|
||||||
" specify a --depot-tools-dir value.\n")
|
" specify a --depot-tools-dir value.\n")
|
||||||
git_exe = 'git.bat'
|
git_exe = 'git.bat'
|
||||||
|
python_exe = 'python.bat'
|
||||||
else:
|
else:
|
||||||
git_exe = 'git'
|
git_exe = 'git'
|
||||||
|
python_exe = 'git'
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
|
@ -652,6 +671,7 @@ if not os.path.exists(gclient_file) or options.forceconfig:
|
||||||
"u'build/scripts/command_wrapper/bin': None, "+\
|
"u'build/scripts/command_wrapper/bin': None, "+\
|
||||||
"u'build/scripts/gsd_generate_index': None, "+\
|
"u'build/scripts/gsd_generate_index': None, "+\
|
||||||
"u'build/scripts/private/data/reliability': None, "+\
|
"u'build/scripts/private/data/reliability': None, "+\
|
||||||
|
"u'build/scripts/tools/deps2git': None, "+\
|
||||||
"u'build/third_party/lighttpd': None, "+\
|
"u'build/third_party/lighttpd': None, "+\
|
||||||
"u'commit-queue': None, "+\
|
"u'commit-queue': None, "+\
|
||||||
"u'depot_tools': None, "+\
|
"u'depot_tools': None, "+\
|
||||||
|
@ -686,7 +706,6 @@ if os.path.exists(chromium_src_dir):
|
||||||
msg("Chromium URL: %s" % (get_git_url(chromium_src_dir)))
|
msg("Chromium URL: %s" % (get_git_url(chromium_src_dir)))
|
||||||
|
|
||||||
# Determine the Chromium checkout options required by CEF.
|
# Determine the Chromium checkout options required by CEF.
|
||||||
chromium_nohooks = False
|
|
||||||
if options.chromiumcheckout == '':
|
if options.chromiumcheckout == '':
|
||||||
# Read the build compatibility file to identify the checkout name.
|
# Read the build compatibility file to identify the checkout name.
|
||||||
compat_path = os.path.join(cef_dir, 'CHROMIUM_BUILD_COMPATIBILITY.txt')
|
compat_path = os.path.join(cef_dir, 'CHROMIUM_BUILD_COMPATIBILITY.txt')
|
||||||
|
@ -696,10 +715,6 @@ if options.chromiumcheckout == '':
|
||||||
chromium_checkout = config['chromium_checkout']
|
chromium_checkout = config['chromium_checkout']
|
||||||
else:
|
else:
|
||||||
raise Exception("Missing chromium_checkout value in %s" % (compat_path))
|
raise Exception("Missing chromium_checkout value in %s" % (compat_path))
|
||||||
|
|
||||||
# Some branches run hooks using CEF instead of Chromium.
|
|
||||||
if 'chromium_nohooks' in config:
|
|
||||||
chromium_nohooks = config['chromium_nohooks']
|
|
||||||
else:
|
else:
|
||||||
chromium_checkout = options.chromiumcheckout
|
chromium_checkout = options.chromiumcheckout
|
||||||
|
|
||||||
|
@ -755,20 +770,16 @@ if chromium_checkout_changed:
|
||||||
(git_exe, ('--force ' if options.forceclean else ''), chromium_checkout), \
|
(git_exe, ('--force ' if options.forceclean else ''), chromium_checkout), \
|
||||||
chromium_src_dir, depot_tools_dir)
|
chromium_src_dir, depot_tools_dir)
|
||||||
|
|
||||||
if cef_branch != 'trunk':
|
# Patch the Chromium DEPS file if necessary.
|
||||||
# Remove the 'src' entry from .DEPS.git for release branches.
|
apply_deps_patch()
|
||||||
# Otherwise, `gclient sync` will fail.
|
|
||||||
deps_path = os.path.join(chromium_src_dir, '.DEPS.git')
|
|
||||||
remove_deps_entry(deps_path, "'src'")
|
|
||||||
|
|
||||||
# Set the GYP_CHROMIUM_NO_ACTION value temporarily so that `gclient sync` does
|
# Set the GYP_CHROMIUM_NO_ACTION value temporarily so that `gclient sync` does
|
||||||
# not run gyp.
|
# not run gyp.
|
||||||
os.environ['GYP_CHROMIUM_NO_ACTION'] = '1'
|
os.environ['GYP_CHROMIUM_NO_ACTION'] = '1'
|
||||||
|
|
||||||
# Update third-party dependencies including branch/tag information.
|
# Update third-party dependencies including branch/tag information.
|
||||||
run("gclient sync %s%s--with_branch_heads --jobs 16" % \
|
run("gclient sync %s--with_branch_heads --jobs 16" % \
|
||||||
(('--reset ' if options.forceclean else ''), \
|
(('--reset ' if options.forceclean else '')), \
|
||||||
('--nohooks ' if chromium_nohooks else '')), \
|
|
||||||
chromium_dir, depot_tools_dir)
|
chromium_dir, depot_tools_dir)
|
||||||
|
|
||||||
# Clear the GYP_CHROMIUM_NO_ACTION value.
|
# Clear the GYP_CHROMIUM_NO_ACTION value.
|
||||||
|
|
|
@ -9,6 +9,9 @@
|
||||||
CEF Changes
|
CEF Changes
|
||||||
-----------
|
-----------
|
||||||
|
|
||||||
|
2015/04/22
|
||||||
|
- Write to stdout instead of using warning() for messages
|
||||||
|
|
||||||
2013/01/03
|
2013/01/03
|
||||||
- Add support for patches containing new files
|
- Add support for patches containing new files
|
||||||
|
|
||||||
|
@ -28,7 +31,7 @@ import re
|
||||||
from stat import *
|
from stat import *
|
||||||
# cStringIO doesn't support unicode in 2.5
|
# cStringIO doesn't support unicode in 2.5
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
from logging import debug, info, warning
|
from logging import debug, info
|
||||||
|
|
||||||
from os.path import exists, isfile
|
from os.path import exists, isfile
|
||||||
from os import unlink
|
from os import unlink
|
||||||
|
@ -55,6 +58,11 @@ def from_string(s):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def msg(message):
|
||||||
|
""" Output a message. """
|
||||||
|
sys.stdout.write('--> ' + message + "\n")
|
||||||
|
|
||||||
|
|
||||||
class HunkInfo(object):
|
class HunkInfo(object):
|
||||||
""" parsed hunk data (hunk starts with @@ -R +R @@) """
|
""" parsed hunk data (hunk starts with @@ -R +R @@) """
|
||||||
|
|
||||||
|
@ -160,7 +168,7 @@ class PatchInfo(object):
|
||||||
hunkinfo.text.append(line)
|
hunkinfo.text.append(line)
|
||||||
# todo: handle \ No newline cases
|
# todo: handle \ No newline cases
|
||||||
else:
|
else:
|
||||||
warning("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
msg("invalid hunk no.%d at %d for target file %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||||
# add hunk status node
|
# add hunk status node
|
||||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||||
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
||||||
|
@ -170,7 +178,7 @@ class PatchInfo(object):
|
||||||
|
|
||||||
# check exit conditions
|
# check exit conditions
|
||||||
if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
|
if hunkactual["linessrc"] > hunkinfo.linessrc or hunkactual["linestgt"] > hunkinfo.linestgt:
|
||||||
warning("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
msg("extra hunk no.%d lines at %d for target %s" % (nexthunkno, lineno+1, self.target[nextfileno-1]))
|
||||||
# add hunk status node
|
# add hunk status node
|
||||||
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
self.hunks[nextfileno-1].append(hunkinfo.copy())
|
||||||
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
self.hunks[nextfileno-1][nexthunkno-1]["invalid"] = True
|
||||||
|
@ -186,7 +194,7 @@ class PatchInfo(object):
|
||||||
# detect mixed window/unix line ends
|
# detect mixed window/unix line ends
|
||||||
ends = self.hunkends[nextfileno-1]
|
ends = self.hunkends[nextfileno-1]
|
||||||
if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
|
if ((ends["cr"]!=0) + (ends["crlf"]!=0) + (ends["lf"]!=0)) > 1:
|
||||||
warning("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
|
msg("inconsistent line ends in patch hunks for %s" % self.source[nextfileno-1])
|
||||||
if debugmode:
|
if debugmode:
|
||||||
debuglines = dict(ends)
|
debuglines = dict(ends)
|
||||||
debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
|
debuglines.update(file=self.target[nextfileno-1], hunk=nexthunkno)
|
||||||
|
@ -208,14 +216,14 @@ class PatchInfo(object):
|
||||||
if filenames:
|
if filenames:
|
||||||
if line.startswith("--- "):
|
if line.startswith("--- "):
|
||||||
if nextfileno in self.source:
|
if nextfileno in self.source:
|
||||||
warning("skipping invalid patch for %s" % self.source[nextfileno])
|
msg("skipping invalid patch for %s" % self.source[nextfileno])
|
||||||
del self.source[nextfileno]
|
del self.source[nextfileno]
|
||||||
# double source filename line is encountered
|
# double source filename line is encountered
|
||||||
# attempt to restart from this second line
|
# attempt to restart from this second line
|
||||||
re_filename = "^--- ([^\t]+)"
|
re_filename = "^--- ([^\t]+)"
|
||||||
match = re.match(re_filename, line)
|
match = re.match(re_filename, line)
|
||||||
if not match:
|
if not match:
|
||||||
warning("skipping invalid filename at line %d" % lineno)
|
msg("skipping invalid filename at line %d" % lineno)
|
||||||
# switch back to header state
|
# switch back to header state
|
||||||
filenames = False
|
filenames = False
|
||||||
header = True
|
header = True
|
||||||
|
@ -223,16 +231,16 @@ class PatchInfo(object):
|
||||||
self.source.append(match.group(1).strip())
|
self.source.append(match.group(1).strip())
|
||||||
elif not line.startswith("+++ "):
|
elif not line.startswith("+++ "):
|
||||||
if nextfileno in self.source:
|
if nextfileno in self.source:
|
||||||
warning("skipping invalid patch with no target for %s" % self.source[nextfileno])
|
msg("skipping invalid patch with no target for %s" % self.source[nextfileno])
|
||||||
del self.source[nextfileno]
|
del self.source[nextfileno]
|
||||||
else:
|
else:
|
||||||
# this should be unreachable
|
# this should be unreachable
|
||||||
warning("skipping invalid target patch")
|
msg("skipping invalid target patch")
|
||||||
filenames = False
|
filenames = False
|
||||||
header = True
|
header = True
|
||||||
else:
|
else:
|
||||||
if nextfileno in self.target:
|
if nextfileno in self.target:
|
||||||
warning("skipping invalid patch - double target at line %d" % lineno)
|
msg("skipping invalid patch - double target at line %d" % lineno)
|
||||||
del self.source[nextfileno]
|
del self.source[nextfileno]
|
||||||
del self.target[nextfileno]
|
del self.target[nextfileno]
|
||||||
nextfileno -= 1
|
nextfileno -= 1
|
||||||
|
@ -244,7 +252,7 @@ class PatchInfo(object):
|
||||||
re_filename = "^\+\+\+ ([^\t]+)"
|
re_filename = "^\+\+\+ ([^\t]+)"
|
||||||
match = re.match(re_filename, line)
|
match = re.match(re_filename, line)
|
||||||
if not match:
|
if not match:
|
||||||
warning("skipping invalid patch - no target filename at line %d" % lineno)
|
msg("skipping invalid patch - no target filename at line %d" % lineno)
|
||||||
# switch back to header state
|
# switch back to header state
|
||||||
filenames = False
|
filenames = False
|
||||||
header = True
|
header = True
|
||||||
|
@ -264,7 +272,7 @@ class PatchInfo(object):
|
||||||
match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
|
match = re.match("^@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))?", line)
|
||||||
if not match:
|
if not match:
|
||||||
if nextfileno-1 not in self.hunks:
|
if nextfileno-1 not in self.hunks:
|
||||||
warning("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
|
msg("skipping invalid patch with no hunks for file %s" % self.target[nextfileno-1])
|
||||||
# switch to header state
|
# switch to header state
|
||||||
hunkhead = False
|
hunkhead = False
|
||||||
header = True
|
header = True
|
||||||
|
@ -296,7 +304,7 @@ class PatchInfo(object):
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
if not hunkskip:
|
if not hunkskip:
|
||||||
warning("patch file incomplete - %s" % filename)
|
msg("patch file incomplete - %s" % filename)
|
||||||
# sys.exit(?)
|
# sys.exit(?)
|
||||||
else:
|
else:
|
||||||
# duplicated message when an eof is reached
|
# duplicated message when an eof is reached
|
||||||
|
@ -319,7 +327,7 @@ class PatchInfo(object):
|
||||||
if len(self.hunks[fileno]) == 1 and self.hunks[fileno][0].startsrc == 0:
|
if len(self.hunks[fileno]) == 1 and self.hunks[fileno][0].startsrc == 0:
|
||||||
hunklines = [x[1:].rstrip("\r\n") for x in self.hunks[fileno][0].text if x[0] in " +"]
|
hunklines = [x[1:].rstrip("\r\n") for x in self.hunks[fileno][0].text if x[0] in " +"]
|
||||||
if len(hunklines) > 0:
|
if len(hunklines) > 0:
|
||||||
warning("creating file %s" % (f2patch))
|
msg("creating file %s" % (f2patch))
|
||||||
f = open(f2patch, "wb")
|
f = open(f2patch, "wb")
|
||||||
for line in hunklines:
|
for line in hunklines:
|
||||||
f.write(line + "\n")
|
f.write(line + "\n")
|
||||||
|
@ -328,10 +336,10 @@ class PatchInfo(object):
|
||||||
|
|
||||||
f2patch = self.target[fileno]
|
f2patch = self.target[fileno]
|
||||||
if not exists(f2patch):
|
if not exists(f2patch):
|
||||||
warning("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
|
msg("source/target file does not exist\n--- %s\n+++ %s" % (filename, f2patch))
|
||||||
continue
|
continue
|
||||||
if not isfile(f2patch):
|
if not isfile(f2patch):
|
||||||
warning("not a file - %s" % f2patch)
|
msg("not a file - %s" % f2patch)
|
||||||
continue
|
continue
|
||||||
filename = f2patch
|
filename = f2patch
|
||||||
|
|
||||||
|
@ -385,29 +393,29 @@ class PatchInfo(object):
|
||||||
else:
|
else:
|
||||||
if hunkno < len(self.hunks[fileno]) and \
|
if hunkno < len(self.hunks[fileno]) and \
|
||||||
(len(self.hunks[fileno]) != 1 or self.hunks[fileno][0].startsrc != 0):
|
(len(self.hunks[fileno]) != 1 or self.hunks[fileno][0].startsrc != 0):
|
||||||
warning("premature end of source file %s at hunk %d" % (filename, hunkno+1))
|
msg("premature end of source file %s at hunk %d" % (filename, hunkno+1))
|
||||||
|
|
||||||
f2fp.close()
|
f2fp.close()
|
||||||
|
|
||||||
if validhunks < len(self.hunks[fileno]):
|
if validhunks < len(self.hunks[fileno]):
|
||||||
if check_patched(filename, self.hunks[fileno]):
|
if check_patched(filename, self.hunks[fileno]):
|
||||||
warning("already patched %s" % filename)
|
msg("already patched %s" % filename)
|
||||||
else:
|
else:
|
||||||
warning("source file is different - %s" % filename)
|
msg("source file is different - %s" % filename)
|
||||||
if canpatch:
|
if canpatch:
|
||||||
backupname = filename+".orig"
|
backupname = filename+".orig"
|
||||||
if exists(backupname):
|
if exists(backupname):
|
||||||
warning("can't backup original file to %s - aborting" % backupname)
|
msg("can't backup original file to %s - aborting" % backupname)
|
||||||
else:
|
else:
|
||||||
import shutil
|
import shutil
|
||||||
shutil.move(filename, backupname)
|
shutil.move(filename, backupname)
|
||||||
if patch_hunks(backupname, filename, self.hunks[fileno]):
|
if patch_hunks(backupname, filename, self.hunks[fileno]):
|
||||||
warning("successfully patched %s" % filename)
|
msg("successfully patched %s" % filename)
|
||||||
unlink(backupname)
|
unlink(backupname)
|
||||||
else:
|
else:
|
||||||
warning("error patching file %s" % filename)
|
msg("error patching file %s" % filename)
|
||||||
shutil.copy(filename, filename+".invalid")
|
shutil.copy(filename, filename+".invalid")
|
||||||
warning("invalid version is saved to %s" % filename+".invalid")
|
msg("invalid version is saved to %s" % filename+".invalid")
|
||||||
# todo: proper rejects
|
# todo: proper rejects
|
||||||
shutil.move(backupname, filename)
|
shutil.move(backupname, filename)
|
||||||
|
|
||||||
|
@ -461,7 +469,7 @@ def check_patched(filename, hunks):
|
||||||
if not len(line):
|
if not len(line):
|
||||||
raise NoMatch
|
raise NoMatch
|
||||||
if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
|
if line.rstrip("\r\n") != hline[1:].rstrip("\r\n"):
|
||||||
warning("file is not patched - failed hunk: %d" % (hno+1))
|
msg("file is not patched - failed hunk: %d" % (hno+1))
|
||||||
raise NoMatch
|
raise NoMatch
|
||||||
except NoMatch:
|
except NoMatch:
|
||||||
matched = False
|
matched = False
|
||||||
|
|
140
tools/patcher.py
140
tools/patcher.py
|
@ -10,13 +10,61 @@ from file_util import *
|
||||||
from patch_util import *
|
from patch_util import *
|
||||||
|
|
||||||
|
|
||||||
# cannot be loaded as a module
|
# Cannot be loaded as a module.
|
||||||
if __name__ != "__main__":
|
if __name__ != "__main__":
|
||||||
sys.stderr.write('This file cannot be loaded as a module!')
|
sys.stdout.write('This file cannot be loaded as a module!')
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
# parse command-line options
|
def normalize_dir(dir):
|
||||||
|
''' Normalize the directory value. '''
|
||||||
|
dir = dir.replace('\\', '/')
|
||||||
|
if dir[-1] != '/':
|
||||||
|
dir += '/'
|
||||||
|
return dir
|
||||||
|
|
||||||
|
def patch_file(patch_file, patch_dir):
|
||||||
|
''' Apply a single patch file in a single directory. '''
|
||||||
|
if not os.path.isfile(patch_file):
|
||||||
|
raise Exception('Patch file %s does not exist.' % patch_file)
|
||||||
|
|
||||||
|
sys.stdout.write('Reading patch file %s\n' % patch_file)
|
||||||
|
patchObj = from_file(patch_file)
|
||||||
|
patchObj.apply(normalize_dir(patch_dir))
|
||||||
|
|
||||||
|
def patch_config(config_file):
|
||||||
|
''' Apply patch files based on a configuration file. '''
|
||||||
|
# Normalize the patch directory value.
|
||||||
|
patchdir = normalize_dir(os.path.dirname(os.path.abspath(config_file)))
|
||||||
|
|
||||||
|
if not os.path.isfile(config_file):
|
||||||
|
raise Exception('Patch config file %s does not exist.' % config_file)
|
||||||
|
|
||||||
|
# Parse the configuration file.
|
||||||
|
scope = {}
|
||||||
|
execfile(config_file, scope)
|
||||||
|
patches = scope["patches"]
|
||||||
|
|
||||||
|
for patch in patches:
|
||||||
|
file = patchdir+'patches/'+patch['name']+'.patch'
|
||||||
|
dopatch = True
|
||||||
|
|
||||||
|
if 'condition' in patch:
|
||||||
|
# Check that the environment variable is set.
|
||||||
|
if patch['condition'] not in os.environ:
|
||||||
|
sys.stdout.write('Skipping patch file %s\n' % file)
|
||||||
|
dopatch = False
|
||||||
|
|
||||||
|
if dopatch:
|
||||||
|
patch_file(file, patch['path'])
|
||||||
|
if 'note' in patch:
|
||||||
|
separator = '-' * 79 + '\n'
|
||||||
|
sys.stdout.write(separator)
|
||||||
|
sys.stdout.write('NOTE: %s\n' % patch['note'])
|
||||||
|
sys.stdout.write(separator)
|
||||||
|
|
||||||
|
|
||||||
|
# Parse command-line options.
|
||||||
disc = """
|
disc = """
|
||||||
This utility applies patch files.
|
This utility applies patch files.
|
||||||
"""
|
"""
|
||||||
|
@ -24,80 +72,16 @@ This utility applies patch files.
|
||||||
parser = OptionParser(description=disc)
|
parser = OptionParser(description=disc)
|
||||||
parser.add_option('--patch-config', dest='patchconfig', metavar='DIR',
|
parser.add_option('--patch-config', dest='patchconfig', metavar='DIR',
|
||||||
help='patch configuration file')
|
help='patch configuration file')
|
||||||
|
parser.add_option('--patch-file', dest='patchfile', metavar='FILE',
|
||||||
|
help='patch source file')
|
||||||
|
parser.add_option('--patch-dir', dest='patchdir', metavar='DIR',
|
||||||
|
help='patch target directory')
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
# the patchconfig option is required
|
if not options.patchconfig is None:
|
||||||
if options.patchconfig is None:
|
patch_config(options.patchconfig)
|
||||||
parser.print_help(sys.stdout)
|
elif not options.patchfile is None and not options.patchdir is None:
|
||||||
sys.exit()
|
patch_file(options.patchfile, options.patchdir)
|
||||||
|
|
||||||
# normalize the patch directory value
|
|
||||||
patchdir = os.path.dirname(os.path.abspath(options.patchconfig)).replace('\\', '/')
|
|
||||||
if patchdir[-1] != '/':
|
|
||||||
patchdir += '/'
|
|
||||||
|
|
||||||
# check if the patching should be skipped
|
|
||||||
if os.path.isfile(patchdir + 'NOPATCH'):
|
|
||||||
nopatch = True
|
|
||||||
sys.stdout.write('NOPATCH exists -- files have not been patched.\n')
|
|
||||||
else:
|
else:
|
||||||
nopatch = False
|
parser.print_help(sys.stdout)
|
||||||
# locate the patch configuration file
|
sys.exit()
|
||||||
if not os.path.isfile(options.patchconfig):
|
|
||||||
sys.stderr.write('File '+options.patchconfig+' does not exist.\n')
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
scope = {}
|
|
||||||
execfile(options.patchconfig, scope)
|
|
||||||
patches = scope["patches"]
|
|
||||||
|
|
||||||
for patch in patches:
|
|
||||||
file = patchdir+'patches/'+patch['name']+'.patch'
|
|
||||||
dopatch = True
|
|
||||||
|
|
||||||
if 'condition' in patch:
|
|
||||||
# Check that the environment variable is set.
|
|
||||||
if patch['condition'] not in os.environ:
|
|
||||||
sys.stderr.write('Skipping patch file '+file+'\n')
|
|
||||||
dopatch = False
|
|
||||||
|
|
||||||
if dopatch:
|
|
||||||
if not os.path.isfile(file):
|
|
||||||
sys.stderr.write('Patch file '+file+' does not exist.\n')
|
|
||||||
else:
|
|
||||||
sys.stderr.write('Reading patch file '+file+'\n')
|
|
||||||
dir = patch['path']
|
|
||||||
patchObj = from_file(file)
|
|
||||||
patchObj.apply(dir)
|
|
||||||
if 'note' in patch:
|
|
||||||
separator = '-' * 79 + '\n'
|
|
||||||
sys.stderr.write(separator)
|
|
||||||
sys.stderr.write('NOTE: '+patch['note']+'\n')
|
|
||||||
sys.stderr.write(separator)
|
|
||||||
|
|
||||||
# read the current include file, if any
|
|
||||||
incfile = patchdir + 'patch_state.h'
|
|
||||||
if nopatch:
|
|
||||||
incnew = """// This file is generated by the patch tool and should not be edited manually.
|
|
||||||
#ifndef _PATCH_STATE_H
|
|
||||||
#define _PATCH_STATE_H
|
|
||||||
// No patches have been applied to the Chromium/WebKit source base.
|
|
||||||
#define CEF_PATCHES_APPLIED 0
|
|
||||||
#endif // _PATCH_STATE_H
|
|
||||||
"""
|
|
||||||
else:
|
|
||||||
incnew = """// This file is generated by the patch tool and should not be edited manually.
|
|
||||||
#ifndef _PATCH_STATE_H
|
|
||||||
#define _PATCH_STATE_H
|
|
||||||
// Patches have been applied to the Chromium/WebKit source base.
|
|
||||||
#define CEF_PATCHES_APPLIED 1
|
|
||||||
#endif // _PATCH_STATE_H
|
|
||||||
"""
|
|
||||||
|
|
||||||
inccur = ''
|
|
||||||
if os.path.isfile(incfile):
|
|
||||||
inccur = read_file(incfile)
|
|
||||||
|
|
||||||
if inccur != incnew:
|
|
||||||
sys.stdout.write('Writing file '+incfile+'.\n')
|
|
||||||
write_file(incfile, incnew)
|
|
||||||
|
|
Loading…
Reference in New Issue