mirror of
https://bitbucket.org/chromiumembedded/cef
synced 2025-06-05 21:39:12 +02:00
Fix svn:eol-style property.
git-svn-id: https://chromiumembedded.googlecode.com/svn/trunk@1470 5089003a-bbd8-11dd-ad1f-f1f9622dbc98
This commit is contained in:
@@ -1,246 +1,246 @@
|
||||
# Copyright (c) 2013 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from file_util import *
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import string
|
||||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
import itertools
|
||||
import hashlib
|
||||
|
||||
|
||||
class cef_api_hash:
|
||||
""" CEF API hash calculator """
|
||||
|
||||
def __init__(self, headerdir, debugdir = None, verbose = False):
|
||||
if headerdir is None or len(headerdir) == 0:
|
||||
raise AssertionError("headerdir is not specified")
|
||||
|
||||
self.__headerdir = headerdir;
|
||||
self.__debugdir = debugdir;
|
||||
self.__verbose = verbose;
|
||||
self.__debug_enabled = not (self.__debugdir is None) and len(self.__debugdir) > 0;
|
||||
|
||||
self.platforms = [ "windows", "macosx", "linux" ];
|
||||
|
||||
self.platform_files = {
|
||||
"windows": [
|
||||
"internal/cef_types_win.h"
|
||||
],
|
||||
"macosx": [
|
||||
"internal/cef_types_mac.h",
|
||||
],
|
||||
"linux": [
|
||||
"internal/cef_types_linux.h"
|
||||
]
|
||||
};
|
||||
|
||||
self.included_files = [
|
||||
"cef_trace_event.h"
|
||||
];
|
||||
|
||||
self.excluded_files = [
|
||||
"cef_version.h",
|
||||
"internal/cef_tuple.h",
|
||||
"internal/cef_types_wrappers.h",
|
||||
"internal/cef_string_wrappers.h",
|
||||
"internal/cef_win.h",
|
||||
"internal/cef_mac.h",
|
||||
"internal/cef_linux.h",
|
||||
];
|
||||
|
||||
def calculate(self):
|
||||
filenames = [filename for filename in self.__get_filenames() if not filename in self.excluded_files]
|
||||
|
||||
objects = []
|
||||
for filename in filenames:
|
||||
if self.__verbose:
|
||||
print "Processing " + filename + "..."
|
||||
content = read_file(os.path.join(self.__headerdir, filename), True)
|
||||
platforms = list([p for p in self.platforms if self.__is_platform_filename(filename, p)])
|
||||
|
||||
# Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration
|
||||
content_objects = None
|
||||
if filename == "internal/cef_string.h":
|
||||
content_objects = self.__parse_string_type(content)
|
||||
else:
|
||||
content_objects = self.__parse_objects(content)
|
||||
|
||||
for o in content_objects:
|
||||
o["text"] = self.__prepare_text(o["text"])
|
||||
o["platforms"] = platforms
|
||||
o["filename"] = filename
|
||||
objects.append(o)
|
||||
|
||||
# objects will be sorted including filename, to make stable universal hashes
|
||||
objects = sorted(objects, key = lambda o: o["name"] + "@" + o["filename"])
|
||||
|
||||
if self.__debug_enabled:
|
||||
namelen = max([len(o["name"]) for o in objects])
|
||||
filenamelen = max([len(o["filename"]) for o in objects])
|
||||
dumpsig = [];
|
||||
for o in objects:
|
||||
dumpsig.append(format(o["name"], str(namelen) + "s") + "|" + format(o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"]);
|
||||
self.__write_debug_file("objects.txt", dumpsig)
|
||||
|
||||
revisions = { };
|
||||
|
||||
for platform in itertools.chain(["universal"], self.platforms):
|
||||
sig = self.__get_final_sig(objects, platform)
|
||||
if self.__debug_enabled:
|
||||
self.__write_debug_file(platform + ".sig", sig)
|
||||
rev = hashlib.sha1(sig).digest();
|
||||
revstr = ''.join(format(ord(i),'0>2x') for i in rev)
|
||||
revisions[platform] = revstr
|
||||
|
||||
return revisions
|
||||
|
||||
def __parse_objects(self, content):
|
||||
""" Returns array of objects in content file. """
|
||||
objects = []
|
||||
content = re.sub("//.*\n", "", content)
|
||||
|
||||
# function declarations
|
||||
for m in re.finditer("\nCEF_EXPORT\s+?.*?\s+?(\w+)\s*?\(.*?\)\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# structs
|
||||
for m in re.finditer("\ntypedef\s+?struct\s+?(\w+)\s+?\{.*?\}\s+?(\w+)\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(2),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# enums
|
||||
for m in re.finditer("\nenum\s+?(\w+)\s+?\{.*?\}\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# typedefs
|
||||
for m in re.finditer("\ntypedef\s+?.*?\s+(\w+);", content, flags = 0):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
return objects
|
||||
|
||||
def __parse_string_type(self, content):
|
||||
""" Grab defined CEF_STRING_TYPE_xxx """
|
||||
objects = []
|
||||
for m in re.finditer("\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content, flags = 0):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0),
|
||||
}
|
||||
objects.append(object)
|
||||
return objects
|
||||
|
||||
def __prepare_text(self, text):
|
||||
text = text.strip()
|
||||
text = re.sub("\s+", " ", text);
|
||||
text = re.sub("\(\s+", "(", text);
|
||||
return text
|
||||
|
||||
def __get_final_sig(self, objects, platform):
|
||||
sig = []
|
||||
|
||||
for o in objects:
|
||||
if platform == "universal" or platform in o["platforms"]:
|
||||
sig.append(o["text"])
|
||||
|
||||
return "\n".join(sig)
|
||||
|
||||
def __get_filenames(self):
|
||||
""" Returns file names to be processed, relative to headerdir """
|
||||
headers = [os.path.join(self.__headerdir, filename) for filename in self.included_files];
|
||||
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "capi", "*.h")))
|
||||
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "internal", "*.h")))
|
||||
|
||||
for v in self.platform_files.values():
|
||||
headers = itertools.chain(headers, [os.path.join(self.__headerdir, f) for f in v])
|
||||
|
||||
normalized = [os.path.relpath(filename, self.__headerdir) for filename in headers];
|
||||
normalized = [f.replace('\\', '/').lower() for f in normalized];
|
||||
|
||||
return list(set(normalized));
|
||||
|
||||
def __is_platform_filename(self, filename, platform):
|
||||
if platform == "universal":
|
||||
return True
|
||||
if not platform in self.platform_files:
|
||||
return False
|
||||
listed = False
|
||||
for p in self.platforms:
|
||||
if filename in self.platform_files[p]:
|
||||
if p == platform:
|
||||
return True
|
||||
else:
|
||||
listed = True
|
||||
return not listed
|
||||
|
||||
def __write_debug_file(self, filename, content):
|
||||
make_dir(self.__debugdir);
|
||||
outfile = os.path.join(self.__debugdir, filename);
|
||||
dir = os.path.dirname(outfile);
|
||||
make_dir(dir);
|
||||
if not isinstance(content, basestring):
|
||||
content = "\n".join(content)
|
||||
write_file(outfile, content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from optparse import OptionParser
|
||||
import time
|
||||
|
||||
disc = """
|
||||
This utility calculates CEF API hash.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--cpp-header-dir', dest='cppheaderdir', metavar='DIR',
|
||||
help='input directory for C++ header files [required]')
|
||||
parser.add_option('--debug-dir', dest='debugdir', metavar='DIR',
|
||||
help='intermediate directory for easy debugging')
|
||||
parser.add_option('-v', '--verbose',
|
||||
action='store_true', dest='verbose', default=False,
|
||||
help='output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the cppheader option is required
|
||||
if options.cppheaderdir is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
# calculate
|
||||
c_start_time = time.time()
|
||||
|
||||
calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose);
|
||||
revisions = calc.calculate();
|
||||
|
||||
c_completed_in = time.time() - c_start_time
|
||||
|
||||
print "{"
|
||||
for k in sorted(revisions.keys()):
|
||||
print format("\"" + k + "\"", ">12s") + ": \"" + revisions[k] + "\""
|
||||
print "}"
|
||||
# print
|
||||
# print 'Completed in: ' + str(c_completed_in)
|
||||
# print
|
||||
|
||||
# print "Press any key to continue...";
|
||||
# sys.stdin.readline();
|
||||
# Copyright (c) 2013 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
from file_util import *
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import string
|
||||
import sys
|
||||
import textwrap
|
||||
import time
|
||||
import itertools
|
||||
import hashlib
|
||||
|
||||
|
||||
class cef_api_hash:
|
||||
""" CEF API hash calculator """
|
||||
|
||||
def __init__(self, headerdir, debugdir = None, verbose = False):
|
||||
if headerdir is None or len(headerdir) == 0:
|
||||
raise AssertionError("headerdir is not specified")
|
||||
|
||||
self.__headerdir = headerdir;
|
||||
self.__debugdir = debugdir;
|
||||
self.__verbose = verbose;
|
||||
self.__debug_enabled = not (self.__debugdir is None) and len(self.__debugdir) > 0;
|
||||
|
||||
self.platforms = [ "windows", "macosx", "linux" ];
|
||||
|
||||
self.platform_files = {
|
||||
"windows": [
|
||||
"internal/cef_types_win.h"
|
||||
],
|
||||
"macosx": [
|
||||
"internal/cef_types_mac.h",
|
||||
],
|
||||
"linux": [
|
||||
"internal/cef_types_linux.h"
|
||||
]
|
||||
};
|
||||
|
||||
self.included_files = [
|
||||
"cef_trace_event.h"
|
||||
];
|
||||
|
||||
self.excluded_files = [
|
||||
"cef_version.h",
|
||||
"internal/cef_tuple.h",
|
||||
"internal/cef_types_wrappers.h",
|
||||
"internal/cef_string_wrappers.h",
|
||||
"internal/cef_win.h",
|
||||
"internal/cef_mac.h",
|
||||
"internal/cef_linux.h",
|
||||
];
|
||||
|
||||
def calculate(self):
|
||||
filenames = [filename for filename in self.__get_filenames() if not filename in self.excluded_files]
|
||||
|
||||
objects = []
|
||||
for filename in filenames:
|
||||
if self.__verbose:
|
||||
print "Processing " + filename + "..."
|
||||
content = read_file(os.path.join(self.__headerdir, filename), True)
|
||||
platforms = list([p for p in self.platforms if self.__is_platform_filename(filename, p)])
|
||||
|
||||
# Parse cef_string.h happens in special case: grab only defined CEF_STRING_TYPE_xxx declaration
|
||||
content_objects = None
|
||||
if filename == "internal/cef_string.h":
|
||||
content_objects = self.__parse_string_type(content)
|
||||
else:
|
||||
content_objects = self.__parse_objects(content)
|
||||
|
||||
for o in content_objects:
|
||||
o["text"] = self.__prepare_text(o["text"])
|
||||
o["platforms"] = platforms
|
||||
o["filename"] = filename
|
||||
objects.append(o)
|
||||
|
||||
# objects will be sorted including filename, to make stable universal hashes
|
||||
objects = sorted(objects, key = lambda o: o["name"] + "@" + o["filename"])
|
||||
|
||||
if self.__debug_enabled:
|
||||
namelen = max([len(o["name"]) for o in objects])
|
||||
filenamelen = max([len(o["filename"]) for o in objects])
|
||||
dumpsig = [];
|
||||
for o in objects:
|
||||
dumpsig.append(format(o["name"], str(namelen) + "s") + "|" + format(o["filename"], "" + str(filenamelen) + "s") + "|" + o["text"]);
|
||||
self.__write_debug_file("objects.txt", dumpsig)
|
||||
|
||||
revisions = { };
|
||||
|
||||
for platform in itertools.chain(["universal"], self.platforms):
|
||||
sig = self.__get_final_sig(objects, platform)
|
||||
if self.__debug_enabled:
|
||||
self.__write_debug_file(platform + ".sig", sig)
|
||||
rev = hashlib.sha1(sig).digest();
|
||||
revstr = ''.join(format(ord(i),'0>2x') for i in rev)
|
||||
revisions[platform] = revstr
|
||||
|
||||
return revisions
|
||||
|
||||
def __parse_objects(self, content):
|
||||
""" Returns array of objects in content file. """
|
||||
objects = []
|
||||
content = re.sub("//.*\n", "", content)
|
||||
|
||||
# function declarations
|
||||
for m in re.finditer("\nCEF_EXPORT\s+?.*?\s+?(\w+)\s*?\(.*?\)\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# structs
|
||||
for m in re.finditer("\ntypedef\s+?struct\s+?(\w+)\s+?\{.*?\}\s+?(\w+)\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(2),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# enums
|
||||
for m in re.finditer("\nenum\s+?(\w+)\s+?\{.*?\}\s*?;", content, flags = re.DOTALL):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
# typedefs
|
||||
for m in re.finditer("\ntypedef\s+?.*?\s+(\w+);", content, flags = 0):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0).strip()
|
||||
}
|
||||
objects.append(object)
|
||||
|
||||
return objects
|
||||
|
||||
def __parse_string_type(self, content):
|
||||
""" Grab defined CEF_STRING_TYPE_xxx """
|
||||
objects = []
|
||||
for m in re.finditer("\n\s*?#\s*?define\s+?(CEF_STRING_TYPE_\w+)\s+?.*?\n", content, flags = 0):
|
||||
object = {
|
||||
"name": m.group(1),
|
||||
"text": m.group(0),
|
||||
}
|
||||
objects.append(object)
|
||||
return objects
|
||||
|
||||
def __prepare_text(self, text):
|
||||
text = text.strip()
|
||||
text = re.sub("\s+", " ", text);
|
||||
text = re.sub("\(\s+", "(", text);
|
||||
return text
|
||||
|
||||
def __get_final_sig(self, objects, platform):
|
||||
sig = []
|
||||
|
||||
for o in objects:
|
||||
if platform == "universal" or platform in o["platforms"]:
|
||||
sig.append(o["text"])
|
||||
|
||||
return "\n".join(sig)
|
||||
|
||||
def __get_filenames(self):
|
||||
""" Returns file names to be processed, relative to headerdir """
|
||||
headers = [os.path.join(self.__headerdir, filename) for filename in self.included_files];
|
||||
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "capi", "*.h")))
|
||||
headers = itertools.chain(headers, get_files(os.path.join(self.__headerdir, "internal", "*.h")))
|
||||
|
||||
for v in self.platform_files.values():
|
||||
headers = itertools.chain(headers, [os.path.join(self.__headerdir, f) for f in v])
|
||||
|
||||
normalized = [os.path.relpath(filename, self.__headerdir) for filename in headers];
|
||||
normalized = [f.replace('\\', '/').lower() for f in normalized];
|
||||
|
||||
return list(set(normalized));
|
||||
|
||||
def __is_platform_filename(self, filename, platform):
|
||||
if platform == "universal":
|
||||
return True
|
||||
if not platform in self.platform_files:
|
||||
return False
|
||||
listed = False
|
||||
for p in self.platforms:
|
||||
if filename in self.platform_files[p]:
|
||||
if p == platform:
|
||||
return True
|
||||
else:
|
||||
listed = True
|
||||
return not listed
|
||||
|
||||
def __write_debug_file(self, filename, content):
|
||||
make_dir(self.__debugdir);
|
||||
outfile = os.path.join(self.__debugdir, filename);
|
||||
dir = os.path.dirname(outfile);
|
||||
make_dir(dir);
|
||||
if not isinstance(content, basestring):
|
||||
content = "\n".join(content)
|
||||
write_file(outfile, content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from optparse import OptionParser
|
||||
import time
|
||||
|
||||
disc = """
|
||||
This utility calculates CEF API hash.
|
||||
"""
|
||||
|
||||
parser = OptionParser(description=disc)
|
||||
parser.add_option('--cpp-header-dir', dest='cppheaderdir', metavar='DIR',
|
||||
help='input directory for C++ header files [required]')
|
||||
parser.add_option('--debug-dir', dest='debugdir', metavar='DIR',
|
||||
help='intermediate directory for easy debugging')
|
||||
parser.add_option('-v', '--verbose',
|
||||
action='store_true', dest='verbose', default=False,
|
||||
help='output detailed status information')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
# the cppheader option is required
|
||||
if options.cppheaderdir is None:
|
||||
parser.print_help(sys.stdout)
|
||||
sys.exit()
|
||||
|
||||
# calculate
|
||||
c_start_time = time.time()
|
||||
|
||||
calc = cef_api_hash(options.cppheaderdir, options.debugdir, options.verbose);
|
||||
revisions = calc.calculate();
|
||||
|
||||
c_completed_in = time.time() - c_start_time
|
||||
|
||||
print "{"
|
||||
for k in sorted(revisions.keys()):
|
||||
print format("\"" + k + "\"", ">12s") + ": \"" + revisions[k] + "\""
|
||||
print "}"
|
||||
# print
|
||||
# print 'Completed in: ' + str(c_completed_in)
|
||||
# print
|
||||
|
||||
# print "Press any key to continue...";
|
||||
# sys.stdin.readline();
|
||||
|
@@ -1,13 +1,13 @@
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import datetime
|
||||
|
||||
def get_year():
|
||||
""" Returns the current year. """
|
||||
return str(datetime.datetime.now().year)
|
||||
|
||||
def get_date():
|
||||
""" Returns the current date. """
|
||||
return datetime.datetime.now().strftime('%B %d, %Y')
|
||||
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import datetime
|
||||
|
||||
def get_year():
|
||||
""" Returns the current year. """
|
||||
return str(datetime.datetime.now().year)
|
||||
|
||||
def get_date():
|
||||
""" Returns the current date. """
|
||||
return datetime.datetime.now().strftime('%B %d, %Y')
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,70 +1,70 @@
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import urllib
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
def check_url(url):
|
||||
""" Check the URL and raise an exception if invalid. """
|
||||
if ':' in url[:7]:
|
||||
parts = url.split(':', 1)
|
||||
if (parts[0] == 'http' or parts[0] == 'https' or parts[0] == 'svn') and \
|
||||
parts[1] == urllib.quote(parts[1]):
|
||||
return url
|
||||
sys.stderr.write('Invalid URL: '+url+"\n")
|
||||
raise Exception('Invalid URL: '+url)
|
||||
|
||||
def get_svn_info(path):
|
||||
""" Retrieves the URL and revision from svn info. """
|
||||
url = 'None'
|
||||
rev = 'None'
|
||||
if path[0:4] == 'http' or os.path.exists(path):
|
||||
try:
|
||||
if sys.platform == 'win32':
|
||||
# Force use of the SVN version bundled with depot_tools.
|
||||
svn = 'svn.bat'
|
||||
else:
|
||||
svn = 'svn'
|
||||
p = subprocess.Popen([svn, 'info', '--xml', path], \
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = p.communicate()
|
||||
if err == '':
|
||||
tree = ET.ElementTree(ET.fromstring(out))
|
||||
entry = tree.getroot().find('entry')
|
||||
url = entry.find('url').text
|
||||
rev = entry.attrib['revision']
|
||||
else:
|
||||
raise Exception("Failed to execute svn info:\n"+err+"\n")
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read svn info: '+strerror+"\n")
|
||||
raise
|
||||
except:
|
||||
raise
|
||||
return {'url': url, 'revision': rev}
|
||||
|
||||
def get_revision(path = '.'):
|
||||
""" Retrieves the revision from svn info. """
|
||||
info = get_svn_info(path)
|
||||
if info['revision'] == 'None':
|
||||
raise Exception('Unable to retrieve SVN revision for "'+path+'"')
|
||||
return info['revision']
|
||||
|
||||
def get_changed_files(path = '.'):
|
||||
""" Retrieves the list of changed files from svn status. """
|
||||
files = []
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
stream = os.popen('svn status '+path)
|
||||
for line in stream:
|
||||
status = line[0]
|
||||
# Return paths with add, modify and switch status.
|
||||
if status == 'A' or status == 'M' or status == 'S':
|
||||
files.append(line[8:].strip())
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read svn status: '+strerror+"\n")
|
||||
raise
|
||||
return files
|
||||
# Copyright (c) 2012 The Chromium Embedded Framework Authors. All rights
|
||||
# reserved. Use of this source code is governed by a BSD-style license that
|
||||
# can be found in the LICENSE file.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import urllib
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
def check_url(url):
|
||||
""" Check the URL and raise an exception if invalid. """
|
||||
if ':' in url[:7]:
|
||||
parts = url.split(':', 1)
|
||||
if (parts[0] == 'http' or parts[0] == 'https' or parts[0] == 'svn') and \
|
||||
parts[1] == urllib.quote(parts[1]):
|
||||
return url
|
||||
sys.stderr.write('Invalid URL: '+url+"\n")
|
||||
raise Exception('Invalid URL: '+url)
|
||||
|
||||
def get_svn_info(path):
|
||||
""" Retrieves the URL and revision from svn info. """
|
||||
url = 'None'
|
||||
rev = 'None'
|
||||
if path[0:4] == 'http' or os.path.exists(path):
|
||||
try:
|
||||
if sys.platform == 'win32':
|
||||
# Force use of the SVN version bundled with depot_tools.
|
||||
svn = 'svn.bat'
|
||||
else:
|
||||
svn = 'svn'
|
||||
p = subprocess.Popen([svn, 'info', '--xml', path], \
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = p.communicate()
|
||||
if err == '':
|
||||
tree = ET.ElementTree(ET.fromstring(out))
|
||||
entry = tree.getroot().find('entry')
|
||||
url = entry.find('url').text
|
||||
rev = entry.attrib['revision']
|
||||
else:
|
||||
raise Exception("Failed to execute svn info:\n"+err+"\n")
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read svn info: '+strerror+"\n")
|
||||
raise
|
||||
except:
|
||||
raise
|
||||
return {'url': url, 'revision': rev}
|
||||
|
||||
def get_revision(path = '.'):
|
||||
""" Retrieves the revision from svn info. """
|
||||
info = get_svn_info(path)
|
||||
if info['revision'] == 'None':
|
||||
raise Exception('Unable to retrieve SVN revision for "'+path+'"')
|
||||
return info['revision']
|
||||
|
||||
def get_changed_files(path = '.'):
|
||||
""" Retrieves the list of changed files from svn status. """
|
||||
files = []
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
stream = os.popen('svn status '+path)
|
||||
for line in stream:
|
||||
status = line[0]
|
||||
# Return paths with add, modify and switch status.
|
||||
if status == 'A' or status == 'M' or status == 'S':
|
||||
files.append(line[8:].strip())
|
||||
except IOError, (errno, strerror):
|
||||
sys.stderr.write('Failed to read svn status: '+strerror+"\n")
|
||||
raise
|
||||
return files
|
||||
|
Reference in New Issue
Block a user