2013-10-18 18:33:56 +02:00
|
|
|
# Copyright (c) 2011 The Chromium Embedded Framework Authors. All rights
|
|
|
|
# reserved. Use of this source code is governed by a BSD-style license that
|
|
|
|
# can be found in the LICENSE file.
|
|
|
|
|
2020-01-10 11:53:02 +01:00
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import print_function
|
2019-03-14 22:37:47 +01:00
|
|
|
from cef_version import VersionFormatter
|
2013-10-18 18:33:56 +02:00
|
|
|
from date_util import *
|
2019-01-08 17:12:43 +01:00
|
|
|
from exec_util import exec_cmd
|
2013-10-18 18:33:56 +02:00
|
|
|
from file_util import *
|
2020-01-10 11:53:02 +01:00
|
|
|
import git_util as git
|
|
|
|
from io import open
|
2014-10-22 23:48:11 +02:00
|
|
|
from make_cmake import process_cmake_template
|
2013-10-18 18:33:56 +02:00
|
|
|
from optparse import OptionParser
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shlex
|
|
|
|
import subprocess
|
|
|
|
import sys
|
2016-05-19 00:14:26 +02:00
|
|
|
import tarfile
|
2013-10-18 18:33:56 +02:00
|
|
|
import zipfile
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2016-05-19 00:14:26 +02:00
|
|
|
def create_zip_archive(input_dir):
|
2013-10-18 18:33:56 +02:00
|
|
|
""" Creates a zip archive of the specified input directory. """
|
2016-05-17 19:52:31 +02:00
|
|
|
zip_file = input_dir + '.zip'
|
2014-08-07 16:41:41 +02:00
|
|
|
zf = zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED, True)
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
def addDir(dir):
|
|
|
|
for f in os.listdir(dir):
|
|
|
|
full_path = os.path.join(dir, f)
|
|
|
|
if os.path.isdir(full_path):
|
|
|
|
addDir(full_path)
|
|
|
|
else:
|
|
|
|
zf.write(full_path, os.path.relpath(full_path, \
|
|
|
|
os.path.join(input_dir, os.pardir)))
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
addDir(input_dir)
|
|
|
|
zf.close()
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2016-05-19 00:14:26 +02:00
|
|
|
def create_tar_archive(input_dir, format):
|
|
|
|
""" Creates a tar archive of the specified input directory. """
|
|
|
|
# Supported formats include "gz" and "bz2".
|
|
|
|
tar_file = input_dir + '.tar.' + format
|
|
|
|
tf = tarfile.open(tar_file, "w:" + format)
|
2020-01-14 11:16:33 +01:00
|
|
|
# The default tar format changed from GNU_FORMAT to PAX_FORMAT in Python 3.8.
|
|
|
|
# However, PAX_FORMAT generates additional @PaxHeader entries and truncates file
|
|
|
|
# names on Windows, so we'll stick with the previous default.
|
|
|
|
tf.format = tarfile.GNU_FORMAT
|
2016-05-19 00:14:26 +02:00
|
|
|
tf.add(input_dir, arcname=os.path.basename(input_dir))
|
|
|
|
tf.close()
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2016-05-17 19:52:31 +02:00
|
|
|
def create_7z_archive(input_dir, format):
|
2013-10-18 18:33:56 +02:00
|
|
|
""" Creates a 7z archive of the specified input directory. """
|
2016-05-17 19:52:31 +02:00
|
|
|
# CEF_COMMAND_7ZIP might be "c:\Program Files (x86)\7Zip\7z.exe" or /usr/bin/7za
|
|
|
|
# or simply 7z if the user knows that it's in the PATH var. Supported formats
|
|
|
|
# depend on the 7za version -- check the 7-zip documentation for details.
|
2013-10-18 18:33:56 +02:00
|
|
|
command = os.environ['CEF_COMMAND_7ZIP']
|
2016-05-17 19:52:31 +02:00
|
|
|
working_dir = os.path.abspath(os.path.join(input_dir, os.pardir))
|
|
|
|
|
|
|
|
tar_file = None
|
|
|
|
if format in ('xz', 'gzip', 'bzip2'):
|
|
|
|
# These formats only support one file per archive. Create a tar file first.
|
|
|
|
tar_file = input_dir + '.tar'
|
|
|
|
run('"%s" a -ttar -y %s %s' % (command, tar_file, input_dir), working_dir)
|
|
|
|
zip_file = tar_file + '.' + format
|
|
|
|
zip_input = tar_file
|
|
|
|
else:
|
|
|
|
zip_file = input_dir + '.' + format
|
|
|
|
zip_input = input_dir
|
|
|
|
|
|
|
|
# Create the compressed archive.
|
2017-05-28 15:04:18 +02:00
|
|
|
run('"%s" a -t%s -y %s %s' % (command, format, zip_file, zip_input),
|
|
|
|
working_dir)
|
2016-05-17 19:52:31 +02:00
|
|
|
|
|
|
|
if not tar_file is None:
|
|
|
|
remove_file(tar_file)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
def create_output_dir(name, parent_dir):
|
|
|
|
""" Creates an output directory and adds the path to the archive list. """
|
|
|
|
output_dir = os.path.abspath(os.path.join(parent_dir, name))
|
|
|
|
remove_dir(output_dir, options.quiet)
|
|
|
|
make_dir(output_dir, options.quiet)
|
|
|
|
archive_dirs.append(output_dir)
|
|
|
|
return output_dir
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
def get_readme_component(name):
|
|
|
|
""" Loads a README file component. """
|
|
|
|
paths = []
|
|
|
|
# platform directory
|
|
|
|
if platform == 'windows':
|
|
|
|
platform_cmp = 'win'
|
2020-08-29 00:39:23 +02:00
|
|
|
elif platform == 'mac':
|
2013-10-18 18:33:56 +02:00
|
|
|
platform_cmp = 'mac'
|
|
|
|
elif platform == 'linux':
|
|
|
|
platform_cmp = 'linux'
|
|
|
|
paths.append(os.path.join(script_dir, 'distrib', platform_cmp))
|
|
|
|
|
|
|
|
# shared directory
|
|
|
|
paths.append(os.path.join(script_dir, 'distrib'))
|
|
|
|
|
|
|
|
# load the file if it exists
|
|
|
|
for path in paths:
|
2017-05-28 15:04:18 +02:00
|
|
|
file = os.path.join(path, 'README.' + name + '.txt')
|
2013-10-18 18:33:56 +02:00
|
|
|
if path_exists(file):
|
|
|
|
return read_file(file)
|
|
|
|
|
|
|
|
raise Exception('Readme component not found: ' + name)
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
def create_readme():
|
|
|
|
""" Creates the README.TXT file. """
|
|
|
|
# gather the components
|
|
|
|
header_data = get_readme_component('header')
|
|
|
|
mode_data = get_readme_component(mode)
|
|
|
|
redistrib_data = get_readme_component('redistrib')
|
|
|
|
footer_data = get_readme_component('footer')
|
|
|
|
|
|
|
|
# format the file
|
2017-07-18 21:25:11 +02:00
|
|
|
data = header_data + '\n\n' + mode_data
|
2024-07-10 22:11:27 +02:00
|
|
|
if mode != 'sandbox' and mode != 'tools':
|
2017-07-18 21:25:11 +02:00
|
|
|
data += '\n\n' + redistrib_data
|
|
|
|
data += '\n\n' + footer_data
|
2013-10-18 18:33:56 +02:00
|
|
|
data = data.replace('$CEF_URL$', cef_url)
|
|
|
|
data = data.replace('$CEF_REV$', cef_rev)
|
|
|
|
data = data.replace('$CEF_VER$', cef_ver)
|
|
|
|
data = data.replace('$CHROMIUM_URL$', chromium_url)
|
|
|
|
data = data.replace('$CHROMIUM_REV$', chromium_rev)
|
|
|
|
data = data.replace('$CHROMIUM_VER$', chromium_ver)
|
|
|
|
data = data.replace('$DATE$', date)
|
|
|
|
|
|
|
|
if platform == 'windows':
|
|
|
|
platform_str = 'Windows'
|
2020-08-29 00:39:23 +02:00
|
|
|
elif platform == 'mac':
|
|
|
|
platform_str = 'MacOS'
|
2013-10-18 18:33:56 +02:00
|
|
|
elif platform == 'linux':
|
|
|
|
platform_str = 'Linux'
|
|
|
|
|
|
|
|
data = data.replace('$PLATFORM$', platform_str)
|
|
|
|
|
|
|
|
if mode == 'standard':
|
|
|
|
distrib_type = 'Standard'
|
|
|
|
distrib_desc = 'This distribution contains all components necessary to build and distribute an\n' \
|
|
|
|
'application using CEF on the ' + platform_str + ' platform. Please see the LICENSING\n' \
|
|
|
|
'section of this document for licensing terms and conditions.'
|
|
|
|
elif mode == 'minimal':
|
|
|
|
distrib_type = 'Minimal'
|
2023-11-14 18:27:28 +01:00
|
|
|
distrib_desc = 'This distribution contains the minimal components necessary to build and\n' \
|
2016-05-16 20:44:47 +02:00
|
|
|
'distribute an application using CEF on the ' + platform_str + ' platform. Please see\n' \
|
|
|
|
'the LICENSING section of this document for licensing terms and conditions.'
|
2013-10-18 18:33:56 +02:00
|
|
|
elif mode == 'client':
|
|
|
|
distrib_type = 'Client'
|
2016-06-07 00:23:15 +02:00
|
|
|
if platform == 'linux':
|
|
|
|
client_app = 'cefsimple'
|
|
|
|
else:
|
|
|
|
client_app = 'cefclient'
|
|
|
|
distrib_desc = 'This distribution contains a release build of the ' + client_app + ' sample application\n' \
|
2013-10-18 18:33:56 +02:00
|
|
|
'for the ' + platform_str + ' platform. Please see the LICENSING section of this document for\n' \
|
|
|
|
'licensing terms and conditions.'
|
2017-07-18 21:25:11 +02:00
|
|
|
elif mode == 'sandbox':
|
|
|
|
distrib_type = 'Sandbox'
|
|
|
|
distrib_desc = 'This distribution contains only the cef_sandbox static library. Please see\n' \
|
|
|
|
'the LICENSING section of this document for licensing terms and conditions.'
|
2024-07-10 22:11:27 +02:00
|
|
|
elif mode == 'tools':
|
|
|
|
distrib_type = 'Tools'
|
|
|
|
distrib_desc = 'This distribution contains additional tools for building CEF-based applications.'
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
data = data.replace('$DISTRIB_TYPE$', distrib_type)
|
|
|
|
data = data.replace('$DISTRIB_DESC$', distrib_desc)
|
|
|
|
|
|
|
|
write_file(os.path.join(output_dir, 'README.txt'), data)
|
|
|
|
if not options.quiet:
|
|
|
|
sys.stdout.write('Creating README.TXT file.\n')
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2021-08-24 23:13:10 +02:00
|
|
|
def copy_gtest(tests_dir):
|
|
|
|
""" Copy GTest files to the expected directory structure. """
|
2016-11-18 00:52:42 +01:00
|
|
|
if not options.quiet:
|
|
|
|
sys.stdout.write('Building gtest directory structure.\n')
|
|
|
|
|
2021-08-24 23:13:10 +02:00
|
|
|
src_gtest_dir = os.path.join(cef_dir, 'tools', 'distrib', 'gtest')
|
2016-11-18 00:52:42 +01:00
|
|
|
target_gtest_dir = os.path.join(tests_dir, 'gtest')
|
|
|
|
|
|
|
|
# gtest header file at tests/gtest/include/gtest/gtest.h
|
|
|
|
target_gtest_header_dir = os.path.join(target_gtest_dir, 'include', 'gtest')
|
|
|
|
make_dir(target_gtest_header_dir, options.quiet)
|
2021-08-24 23:13:10 +02:00
|
|
|
copy_file(
|
|
|
|
os.path.join(src_gtest_dir, 'gtest.h'), target_gtest_header_dir,
|
|
|
|
options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
|
|
|
# gtest source file at tests/gtest/src/gtest-all.cc
|
|
|
|
target_gtest_cpp_dir = os.path.join(target_gtest_dir, 'src')
|
|
|
|
make_dir(target_gtest_cpp_dir, options.quiet)
|
2021-08-24 23:13:10 +02:00
|
|
|
copy_file(
|
|
|
|
os.path.join(src_gtest_dir, 'gtest-all.cc'), target_gtest_cpp_dir,
|
|
|
|
options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
|
|
|
# gtest LICENSE file at tests/gtest/LICENSE
|
2017-05-28 15:04:18 +02:00
|
|
|
copy_file(
|
2021-08-24 23:13:10 +02:00
|
|
|
os.path.join(src_gtest_dir, 'LICENSE'), target_gtest_dir, options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
|
|
|
# CEF README file at tests/gtest/README.cef
|
2017-05-28 15:04:18 +02:00
|
|
|
copy_file(
|
2021-08-24 23:13:10 +02:00
|
|
|
os.path.join(src_gtest_dir, 'README.cef'),
|
2017-05-28 15:04:18 +02:00
|
|
|
os.path.join(target_gtest_dir, 'README.cef'), options.quiet)
|
|
|
|
|
2021-06-24 22:52:37 +02:00
|
|
|
# Copy tests/gtest/teamcity files
|
|
|
|
copy_dir(
|
|
|
|
os.path.join(cef_dir, 'tests', 'gtest', 'teamcity'),
|
|
|
|
os.path.join(target_gtest_dir, 'teamcity'), options.quiet)
|
|
|
|
|
2016-11-18 00:52:42 +01:00
|
|
|
|
2022-09-02 19:10:58 +02:00
|
|
|
def transfer_doxyfile(dst_dir, quiet):
|
|
|
|
""" Transfer and post-process the Doxyfile. """
|
|
|
|
src_file = os.path.join(cef_dir, 'Doxyfile')
|
|
|
|
if os.path.isfile(src_file):
|
|
|
|
data = read_file(src_file)
|
|
|
|
data = data.replace("$(PROJECT_NUMBER)", cef_ver)
|
|
|
|
write_file(os.path.join(dst_dir, 'Doxyfile'), data)
|
|
|
|
if not quiet:
|
|
|
|
sys.stdout.write('Creating Doxyfile file.\n')
|
|
|
|
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
def transfer_gypi_files(src_dir, gypi_paths, gypi_path_prefix, dst_dir, quiet):
|
|
|
|
""" Transfer files from one location to another. """
|
|
|
|
for path in gypi_paths:
|
|
|
|
src = os.path.join(src_dir, path)
|
|
|
|
dst = os.path.join(dst_dir, path.replace(gypi_path_prefix, ''))
|
|
|
|
dst_path = os.path.dirname(dst)
|
|
|
|
make_dir(dst_path, quiet)
|
|
|
|
copy_file(src, dst, quiet)
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2024-07-10 22:11:27 +02:00
|
|
|
def extract_toolchain_cmd(build_dir,
|
|
|
|
exe_name,
|
|
|
|
require_toolchain,
|
|
|
|
require_cmd=True):
|
|
|
|
""" Extract a toolchain command from the ninja configuration file. """
|
|
|
|
toolchain_ninja = os.path.join(build_dir, 'toolchain.ninja')
|
|
|
|
if not os.path.isfile(toolchain_ninja):
|
|
|
|
if not require_toolchain:
|
|
|
|
return None, None
|
|
|
|
raise Exception('Missing file: %s' % toolchain_ninja)
|
|
|
|
|
|
|
|
data = read_file(toolchain_ninja)
|
|
|
|
|
|
|
|
cmd = None
|
|
|
|
path = None
|
|
|
|
|
|
|
|
# Looking for a value like:
|
|
|
|
# command = python3 ../../v8/tools/run.py ./exe_name --arg1 --arg2
|
|
|
|
# OR (for cross-compile):
|
|
|
|
# command = python3 ../../v8/tools/run.py ./clang_arch1_arch2/exe_name --arg1 --arg2
|
|
|
|
findstr = '/%s ' % exe_name
|
|
|
|
start = data.find(findstr)
|
|
|
|
if start >= 0:
|
|
|
|
# Extract the command-line arguments.
|
|
|
|
after_start = start + len(findstr)
|
|
|
|
end = data.find('\n', after_start)
|
|
|
|
if end >= after_start:
|
|
|
|
cmd = data[after_start:end].strip()
|
|
|
|
print('%s command:' % exe_name, cmd)
|
|
|
|
if cmd != '' and not re.match(r"^[0-9a-zA-Z\_\- ./=]{1,}$", cmd):
|
|
|
|
cmd = None
|
|
|
|
|
|
|
|
# Extract the relative file path.
|
|
|
|
dot = start - 1
|
|
|
|
while data[dot].isalnum() or data[dot] == '_':
|
|
|
|
dot -= 1
|
|
|
|
path = data[dot + 1:start]
|
|
|
|
print('%s path:' % exe_name, path)
|
|
|
|
if path != '' and not re.match(r"^(win_)?clang_[0-9a-z_]{1,}$", path):
|
|
|
|
path = None
|
|
|
|
|
|
|
|
if require_cmd and (cmd is None or path is None):
|
|
|
|
raise Exception('Failed to extract %s command from %s' % (exe_name,
|
|
|
|
toolchain_ninja))
|
|
|
|
|
|
|
|
return cmd, path
|
|
|
|
|
|
|
|
|
|
|
|
def get_exe_name(exe_name):
|
|
|
|
return exe_name + ('.exe' if platform == 'windows' else '')
|
|
|
|
|
|
|
|
|
|
|
|
def get_script_name(script_name):
|
|
|
|
return script_name + ('.bat' if platform == 'windows' else '.sh')
|
|
|
|
|
|
|
|
|
|
|
|
def transfer_tools_files(script_dir, build_dirs, output_dir):
|
|
|
|
for build_dir in build_dirs:
|
|
|
|
is_debug = build_dir.find('Debug') >= 0
|
|
|
|
dst_dir_name = 'Debug' if is_debug else 'Release'
|
|
|
|
dst_dir = os.path.join(output_dir, dst_dir_name)
|
|
|
|
|
|
|
|
# Retrieve the binary path and command-line arguments.
|
|
|
|
# See issue #3734 for the expected format.
|
|
|
|
mksnapshot_name = 'mksnapshot'
|
|
|
|
tool_cmd, tool_dir = extract_toolchain_cmd(
|
|
|
|
build_dir, mksnapshot_name, require_toolchain=not options.allowpartial)
|
|
|
|
if tool_cmd is None:
|
|
|
|
sys.stdout.write("No %s build toolchain for %s.\n" % (dst_dir_name,
|
|
|
|
mksnapshot_name))
|
|
|
|
continue
|
|
|
|
|
|
|
|
if options.allowpartial and not path_exists(
|
|
|
|
os.path.join(build_dir, tool_dir, get_exe_name(mksnapshot_name))):
|
|
|
|
sys.stdout.write("No %s build of %s.\n" % (dst_dir_name, mksnapshot_name))
|
|
|
|
continue
|
|
|
|
|
|
|
|
# yapf: disable
|
|
|
|
binaries = [
|
|
|
|
{'path': get_exe_name(mksnapshot_name)},
|
|
|
|
{'path': get_exe_name('v8_context_snapshot_generator')},
|
|
|
|
]
|
|
|
|
# yapf: disable
|
|
|
|
|
|
|
|
# Transfer binaries.
|
|
|
|
copy_files_list(os.path.join(build_dir, tool_dir), dst_dir, binaries)
|
|
|
|
|
|
|
|
# Evaluate command-line arguments and remove relative paths. Copy any input files
|
|
|
|
# into the distribution.
|
|
|
|
# - Example input path : ../../v8/tools/builtins-pgo/profiles/x64-rl.profile
|
|
|
|
# - Example output path: gen/v8/embedded.S
|
|
|
|
parsed_cmd = []
|
|
|
|
for cmd in tool_cmd.split(' '):
|
|
|
|
if cmd.find('/') > 0:
|
|
|
|
file_name = os.path.split(cmd)[1]
|
|
|
|
if len(file_name) == 0:
|
|
|
|
raise Exception('Failed to parse %s command component: %s' % (mksnapshot_name, cmd))
|
|
|
|
if cmd.startswith('../../'):
|
|
|
|
file_path = os.path.realpath(os.path.join(build_dir, cmd))
|
|
|
|
# Validate input file/path.
|
|
|
|
if not file_path.startswith(src_dir):
|
|
|
|
raise Exception('Invalid %s command input file: %s' % (mksnapshot_name, file_path))
|
|
|
|
if not os.path.isfile(file_path):
|
|
|
|
raise Exception('Missing %s command input file: %s' % (mksnapshot_name, file_path))
|
|
|
|
# Transfer input file.
|
|
|
|
copy_file(file_path, os.path.join(dst_dir, file_name), options.quiet)
|
|
|
|
cmd = file_name
|
|
|
|
parsed_cmd.append(cmd)
|
|
|
|
|
|
|
|
# Write command-line arguments file.
|
|
|
|
write_file(os.path.join(dst_dir, 'mksnapshot_cmd.txt'), ' '.join(parsed_cmd))
|
|
|
|
|
|
|
|
# yapf: disable
|
|
|
|
files = [
|
|
|
|
{'path': get_script_name('run_mksnapshot')},
|
|
|
|
]
|
|
|
|
# yapf: disable
|
|
|
|
|
|
|
|
# Transfer other tools files.
|
|
|
|
copy_files_list(os.path.join(script_dir, 'distrib', 'tools'), output_dir, files)
|
|
|
|
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
def normalize_headers(file, new_path=''):
|
2013-10-18 18:33:56 +02:00
|
|
|
""" Normalize headers post-processing. Remove the path component from any
|
|
|
|
project include directives. """
|
|
|
|
data = read_file(file)
|
|
|
|
data = re.sub(r'''#include \"(?!include\/)[a-zA-Z0-9_\/]+\/+([a-zA-Z0-9_\.]+)\"''', \
|
|
|
|
"// Include path modified for CEF Binary Distribution.\n#include \""+new_path+"\\1\"", data)
|
|
|
|
write_file(file, data)
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
def eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet):
|
2013-10-18 18:33:56 +02:00
|
|
|
""" Transfer files based on the specified configuration. """
|
|
|
|
if not path_exists(transfer_cfg):
|
|
|
|
return
|
|
|
|
|
|
|
|
configs = eval_file(transfer_cfg)
|
|
|
|
for cfg in configs:
|
|
|
|
dst = os.path.join(output_dir, cfg['target'])
|
|
|
|
|
|
|
|
# perform a copy if source is specified
|
|
|
|
if not cfg['source'] is None:
|
|
|
|
src = os.path.join(cef_dir, cfg['source'])
|
|
|
|
dst_path = os.path.dirname(dst)
|
|
|
|
make_dir(dst_path, quiet)
|
|
|
|
copy_file(src, dst, quiet)
|
|
|
|
|
|
|
|
# place a readme file in the destination directory
|
|
|
|
readme = os.path.join(dst_path, 'README-TRANSFER.txt')
|
|
|
|
if not path_exists(readme):
|
2017-05-28 15:04:18 +02:00
|
|
|
copy_file(
|
|
|
|
os.path.join(script_dir, 'distrib/README-TRANSFER.txt'), readme)
|
2020-01-10 11:53:02 +01:00
|
|
|
|
|
|
|
str = cfg['source'] + "\n"
|
|
|
|
with open(readme, 'a', encoding='utf-8') as fp:
|
2020-01-13 11:53:36 +01:00
|
|
|
if sys.version_info.major == 2:
|
2020-01-10 11:53:02 +01:00
|
|
|
fp.write(str.decode('utf-8'))
|
2020-01-13 11:53:36 +01:00
|
|
|
else:
|
2020-01-10 11:53:02 +01:00
|
|
|
fp.write(str)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
# perform any required post-processing
|
|
|
|
if 'post-process' in cfg:
|
|
|
|
post = cfg['post-process']
|
|
|
|
if post == 'normalize_headers':
|
|
|
|
new_path = ''
|
2020-01-10 11:53:02 +01:00
|
|
|
if 'new_header_path' in cfg:
|
2013-10-18 18:33:56 +02:00
|
|
|
new_path = cfg['new_header_path']
|
|
|
|
normalize_headers(dst, new_path)
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
|
|
|
def transfer_files(cef_dir, script_dir, transfer_cfg_dir, mode, output_dir,
|
|
|
|
quiet):
|
2016-05-16 20:44:47 +02:00
|
|
|
# Non-mode-specific transfers.
|
|
|
|
transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer.cfg')
|
|
|
|
eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet)
|
|
|
|
# Mode-specific transfers.
|
|
|
|
transfer_cfg = os.path.join(transfer_cfg_dir, 'transfer_%s.cfg' % mode)
|
|
|
|
eval_transfer_file(cef_dir, script_dir, transfer_cfg, output_dir, quiet)
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2018-02-15 01:12:09 +01:00
|
|
|
# |paths| is a list of dictionary values with the following keys:
|
|
|
|
# path [required] Input file or directory path relative to |build_dir|.
|
|
|
|
# By default this will also be the output path relative
|
|
|
|
# to |dst_dir|.
|
|
|
|
# out_path [optional] Override the output path relative to |dst_dir|.
|
|
|
|
# conditional [optional] Set to True if the path is conditional on build
|
|
|
|
# settings. Missing conditional paths will not be
|
|
|
|
# treated as an error.
|
2018-03-29 00:20:10 +02:00
|
|
|
# delete [optional] Glob pattern of files to delete after the copy.
|
2018-02-15 01:12:09 +01:00
|
|
|
def copy_files_list(build_dir, dst_dir, paths):
|
|
|
|
''' Copy the files listed in |paths| from |build_dir| to |dst_dir|. '''
|
|
|
|
for entry in paths:
|
|
|
|
source_path = os.path.join(build_dir, entry['path'])
|
|
|
|
if os.path.exists(source_path):
|
|
|
|
target_path = os.path.join(dst_dir, entry['out_path']
|
|
|
|
if 'out_path' in entry else entry['path'])
|
|
|
|
make_dir(os.path.dirname(target_path), options.quiet)
|
|
|
|
if os.path.isdir(source_path):
|
|
|
|
copy_dir(source_path, target_path, options.quiet)
|
2018-03-29 00:20:10 +02:00
|
|
|
if 'delete' in entry:
|
|
|
|
for delete_path in get_files(
|
|
|
|
os.path.join(target_path, entry['delete'])):
|
|
|
|
if not os.path.isdir(delete_path):
|
|
|
|
remove_file(delete_path, options.quiet)
|
|
|
|
else:
|
|
|
|
raise Exception('Refusing to delete directory: %s' % delete_path)
|
2018-02-15 01:12:09 +01:00
|
|
|
else:
|
|
|
|
copy_file(source_path, target_path, options.quiet)
|
|
|
|
else:
|
|
|
|
if 'conditional' in entry and entry['conditional']:
|
|
|
|
sys.stdout.write('Missing conditional path: %s.\n' % source_path)
|
|
|
|
else:
|
|
|
|
raise Exception('Missing required path: %s' % source_path)
|
|
|
|
|
|
|
|
|
2019-01-08 17:12:43 +01:00
|
|
|
def get_exported_symbols(file):
|
|
|
|
""" Returns the global symbols exported by |file|. """
|
|
|
|
symbols = []
|
|
|
|
|
2019-03-16 18:12:19 +01:00
|
|
|
# Each symbol line has a value like:
|
2019-01-08 17:12:43 +01:00
|
|
|
# 0000000000000000 T _cef_sandbox_initialize
|
|
|
|
cmdline = 'nm -g -U %s' % file
|
|
|
|
result = exec_cmd(cmdline, os.path.join(cef_dir, 'tools'))
|
|
|
|
if len(result['err']) > 0:
|
|
|
|
raise Exception('ERROR: nm failed: %s' % result['err'])
|
|
|
|
for line in result['out'].split('\n'):
|
|
|
|
if line.find(' T ') < 0:
|
|
|
|
continue
|
|
|
|
symbol = line[line.rfind(' ') + 1:]
|
|
|
|
symbols.append(symbol)
|
|
|
|
|
|
|
|
return symbols
|
|
|
|
|
|
|
|
|
2019-03-16 18:12:19 +01:00
|
|
|
def get_undefined_symbols(file):
|
|
|
|
""" Returns the undefined symbols imported by |file|. """
|
|
|
|
symbols = []
|
|
|
|
|
|
|
|
# Each symbol line has a value like:
|
|
|
|
# cef_sandbox.a:cef_sandbox.o: _memcpy
|
|
|
|
cmdline = 'nm -u -A %s' % file
|
|
|
|
result = exec_cmd(cmdline, os.path.join(cef_dir, 'tools'))
|
|
|
|
if len(result['err']) > 0:
|
|
|
|
raise Exception('ERROR: nm failed: %s' % result['err'])
|
|
|
|
for line in result['out'].split('\n'):
|
|
|
|
if line.find(': ') < 0:
|
|
|
|
continue
|
|
|
|
symbol = line[line.rfind(': ') + 2:]
|
|
|
|
symbols.append(symbol)
|
|
|
|
|
|
|
|
return symbols
|
|
|
|
|
|
|
|
|
2018-07-27 23:28:12 +02:00
|
|
|
def combine_libs(platform, build_dir, libs, dest_lib):
|
2013-11-15 19:47:02 +01:00
|
|
|
""" Combine multiple static libraries into a single static library. """
|
2019-01-08 17:12:43 +01:00
|
|
|
intermediate_obj = None
|
2018-07-27 23:28:12 +02:00
|
|
|
if platform == 'windows':
|
2023-08-14 18:10:20 +02:00
|
|
|
cmdline = 'msvs_env.bat win%s "%s" combine_libs.py -b "%s" -o "%s"' % (
|
|
|
|
platform_arch, sys.executable, build_dir, dest_lib)
|
2020-08-29 00:39:23 +02:00
|
|
|
elif platform == 'mac':
|
2019-01-08 17:12:43 +01:00
|
|
|
# Find CEF_EXPORT symbols from libcef_sandbox.a (include/cef_sandbox_mac.h)
|
|
|
|
# Export only symbols that include these strings.
|
|
|
|
symbol_match = [
|
|
|
|
'_cef_', # C symbols
|
|
|
|
'Cef', # C++ symbols
|
|
|
|
]
|
|
|
|
|
2020-01-10 11:53:02 +01:00
|
|
|
print('Finding exported symbols...')
|
2019-01-08 17:12:43 +01:00
|
|
|
assert 'libcef_sandbox.a' in libs[0], libs[0]
|
|
|
|
symbols = []
|
|
|
|
for symbol in get_exported_symbols(os.path.join(build_dir, libs[0])):
|
|
|
|
for match in symbol_match:
|
|
|
|
if symbol.find(match) >= 0:
|
|
|
|
symbols.append(symbol)
|
|
|
|
break
|
|
|
|
assert len(symbols) > 0
|
|
|
|
|
|
|
|
# Create an intermediate object file that combines all other object files.
|
|
|
|
# Symbols not identified above will be made private (local).
|
|
|
|
intermediate_obj = os.path.splitext(dest_lib)[0] + '.o'
|
2020-11-20 22:00:56 +01:00
|
|
|
arch = 'arm64' if options.arm64build else 'x86_64'
|
|
|
|
cmdline = 'ld -arch %s -r -o "%s"' % (arch, intermediate_obj)
|
2019-01-08 17:12:43 +01:00
|
|
|
for symbol in symbols:
|
|
|
|
cmdline += ' -exported_symbol %s' % symbol
|
|
|
|
|
2013-11-15 19:47:02 +01:00
|
|
|
for lib in libs:
|
|
|
|
lib_path = os.path.join(build_dir, lib)
|
2016-12-01 01:50:37 +01:00
|
|
|
for path in get_files(lib_path): # Expand wildcards in |lib_path|.
|
|
|
|
if not path_exists(path):
|
|
|
|
raise Exception('File not found: ' + path)
|
2023-08-14 18:10:20 +02:00
|
|
|
if platform == 'windows':
|
|
|
|
path = os.path.relpath(path, build_dir)
|
2018-07-27 23:28:12 +02:00
|
|
|
cmdline += ' "%s"' % path
|
2013-11-15 19:47:02 +01:00
|
|
|
run(cmdline, os.path.join(cef_dir, 'tools'))
|
|
|
|
|
2019-01-08 17:12:43 +01:00
|
|
|
if not intermediate_obj is None:
|
|
|
|
# Create an archive file containing the new object file.
|
|
|
|
cmdline = 'libtool -static -o "%s" "%s"' % (dest_lib, intermediate_obj)
|
|
|
|
run(cmdline, os.path.join(cef_dir, 'tools'))
|
|
|
|
remove_file(intermediate_obj)
|
|
|
|
|
|
|
|
# Verify that only the expected symbols are exported from the archive file.
|
2020-01-10 11:53:02 +01:00
|
|
|
print('Verifying exported symbols...')
|
2019-01-08 17:12:43 +01:00
|
|
|
result_symbols = get_exported_symbols(dest_lib)
|
|
|
|
if set(symbols) != set(result_symbols):
|
2020-01-10 11:53:02 +01:00
|
|
|
print('Expected', symbols)
|
|
|
|
print('Got', result_symbols)
|
2019-01-08 17:12:43 +01:00
|
|
|
raise Exception('Failure verifying exported symbols')
|
|
|
|
|
2019-03-16 18:12:19 +01:00
|
|
|
# Verify that no C++ symbols are imported by the archive file. If the
|
|
|
|
# archive imports C++ symbols and the client app links an incompatible C++
|
|
|
|
# library, the result will be undefined behavior.
|
2020-11-20 22:00:56 +01:00
|
|
|
# For example, to avoid importing libc++ symbols the cef_sandbox target
|
|
|
|
# should have a dependency on libc++abi. This dependency can be verified
|
|
|
|
# with the following command:
|
|
|
|
# gn path out/[config] //cef:cef_sandbox //buildtools/third_party/libc++abi
|
2020-01-10 11:53:02 +01:00
|
|
|
print('Verifying imported (undefined) symbols...')
|
2019-03-16 18:12:19 +01:00
|
|
|
undefined_symbols = get_undefined_symbols(dest_lib)
|
2019-05-15 13:32:53 +02:00
|
|
|
cpp_symbols = list(
|
|
|
|
filter(lambda symbol: symbol.startswith('__Z'), undefined_symbols))
|
2019-03-16 18:12:19 +01:00
|
|
|
if cpp_symbols:
|
2020-01-10 11:53:02 +01:00
|
|
|
print('Found C++ symbols:', cpp_symbols)
|
2019-03-16 18:12:19 +01:00
|
|
|
raise Exception('Failure verifying imported (undefined) symbols')
|
|
|
|
|
2017-05-28 15:04:18 +02:00
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
def run(command_line, working_dir):
|
|
|
|
""" Run a command. """
|
|
|
|
sys.stdout.write('-------- Running "'+command_line+'" in "'+\
|
|
|
|
working_dir+'"...'+"\n")
|
|
|
|
args = shlex.split(command_line.replace('\\', '\\\\'))
|
2017-05-28 15:04:18 +02:00
|
|
|
return subprocess.check_call(
|
|
|
|
args, cwd=working_dir, env=os.environ, shell=(sys.platform == 'win32'))
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2020-01-10 11:53:02 +01:00
|
|
|
def print_error(msg):
|
|
|
|
print('Error: %s\nSee --help for usage.' % msg)
|
|
|
|
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
# cannot be loaded as a module
|
|
|
|
if __name__ != "__main__":
|
|
|
|
sys.stderr.write('This file cannot be loaded as a module!')
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
# parse command-line options
|
|
|
|
disc = """
|
|
|
|
This utility builds the CEF Binary Distribution.
|
|
|
|
"""
|
|
|
|
|
|
|
|
parser = OptionParser(description=disc)
|
2017-05-28 15:04:18 +02:00
|
|
|
parser.add_option(
|
|
|
|
'--output-dir',
|
|
|
|
dest='outputdir',
|
|
|
|
metavar='DIR',
|
|
|
|
help='output directory [required]')
|
|
|
|
parser.add_option(
|
|
|
|
'--distrib-subdir',
|
|
|
|
dest='distribsubdir',
|
|
|
|
help='name of the subdirectory for the distribution',
|
|
|
|
default='')
|
2020-08-03 20:37:24 +02:00
|
|
|
parser.add_option(
|
|
|
|
'--distrib-subdir-suffix',
|
|
|
|
dest='distribsubdirsuffix',
|
|
|
|
help='suffix added to name of the subdirectory for the distribution',
|
|
|
|
default='')
|
2017-05-28 15:04:18 +02:00
|
|
|
parser.add_option(
|
|
|
|
'--allow-partial',
|
|
|
|
action='store_true',
|
|
|
|
dest='allowpartial',
|
|
|
|
default=False,
|
|
|
|
help='allow creation of partial distributions')
|
|
|
|
parser.add_option(
|
|
|
|
'--no-symbols',
|
|
|
|
action='store_true',
|
|
|
|
dest='nosymbols',
|
|
|
|
default=False,
|
|
|
|
help='don\'t create symbol files')
|
|
|
|
parser.add_option(
|
|
|
|
'--no-docs',
|
|
|
|
action='store_true',
|
|
|
|
dest='nodocs',
|
|
|
|
default=False,
|
|
|
|
help='don\'t create documentation')
|
|
|
|
parser.add_option(
|
|
|
|
'--no-archive',
|
|
|
|
action='store_true',
|
|
|
|
dest='noarchive',
|
|
|
|
default=False,
|
|
|
|
help='don\'t create archives for output directories')
|
|
|
|
parser.add_option(
|
|
|
|
'--ninja-build',
|
|
|
|
action='store_true',
|
|
|
|
dest='ninjabuild',
|
|
|
|
default=False,
|
|
|
|
help='build was created using ninja')
|
|
|
|
parser.add_option(
|
|
|
|
'--x64-build',
|
|
|
|
action='store_true',
|
|
|
|
dest='x64build',
|
|
|
|
default=False,
|
|
|
|
help='create a 64-bit binary distribution')
|
|
|
|
parser.add_option(
|
|
|
|
'--arm-build',
|
|
|
|
action='store_true',
|
|
|
|
dest='armbuild',
|
|
|
|
default=False,
|
2017-07-18 21:25:11 +02:00
|
|
|
help='create an ARM binary distribution (Linux only)')
|
2019-06-13 11:45:50 +02:00
|
|
|
parser.add_option(
|
|
|
|
'--arm64-build',
|
|
|
|
action='store_true',
|
|
|
|
dest='arm64build',
|
|
|
|
default=False,
|
|
|
|
help='create an ARM64 binary distribution (Linux only)')
|
2017-05-28 15:04:18 +02:00
|
|
|
parser.add_option(
|
|
|
|
'--minimal',
|
|
|
|
action='store_true',
|
|
|
|
dest='minimal',
|
|
|
|
default=False,
|
|
|
|
help='include only release build binary files')
|
|
|
|
parser.add_option(
|
|
|
|
'--client',
|
|
|
|
action='store_true',
|
|
|
|
dest='client',
|
|
|
|
default=False,
|
|
|
|
help='include only the sample application')
|
2017-07-18 21:25:11 +02:00
|
|
|
parser.add_option(
|
|
|
|
'--sandbox',
|
|
|
|
action='store_true',
|
|
|
|
dest='sandbox',
|
|
|
|
default=False,
|
2019-01-08 17:12:43 +01:00
|
|
|
help='include only the cef_sandbox static library (macOS and Windows only)')
|
2024-07-10 22:11:27 +02:00
|
|
|
parser.add_option(
|
|
|
|
'--tools',
|
|
|
|
action='store_true',
|
|
|
|
dest='tools',
|
|
|
|
default=False,
|
|
|
|
help='include only the tools')
|
2019-04-23 19:00:14 +02:00
|
|
|
parser.add_option(
|
|
|
|
'--ozone',
|
|
|
|
action='store_true',
|
|
|
|
dest='ozone',
|
|
|
|
default=False,
|
|
|
|
help='include ozone build related files (Linux only)')
|
2017-05-28 15:04:18 +02:00
|
|
|
parser.add_option(
|
|
|
|
'-q',
|
|
|
|
'--quiet',
|
|
|
|
action='store_true',
|
|
|
|
dest='quiet',
|
|
|
|
default=False,
|
|
|
|
help='do not output detailed status information')
|
2013-10-18 18:33:56 +02:00
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
|
|
|
|
# Test the operating system.
|
2017-05-28 15:04:18 +02:00
|
|
|
platform = ''
|
2013-10-18 18:33:56 +02:00
|
|
|
if sys.platform == 'win32':
|
|
|
|
platform = 'windows'
|
|
|
|
elif sys.platform == 'darwin':
|
2020-08-29 00:39:23 +02:00
|
|
|
platform = 'mac'
|
2013-10-18 18:33:56 +02:00
|
|
|
elif sys.platform.startswith('linux'):
|
|
|
|
platform = 'linux'
|
|
|
|
|
|
|
|
# the outputdir option is required
|
|
|
|
if options.outputdir is None:
|
2020-01-10 11:53:02 +01:00
|
|
|
print_error('--output-dir is required.')
|
2013-10-18 18:33:56 +02:00
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
if options.minimal and options.client:
|
2020-01-10 11:53:02 +01:00
|
|
|
print_error('Cannot specify both --minimal and --client.')
|
2013-10-18 18:33:56 +02:00
|
|
|
sys.exit()
|
|
|
|
|
2019-06-13 11:45:50 +02:00
|
|
|
if options.x64build + options.armbuild + options.arm64build > 1:
|
2020-01-10 11:53:02 +01:00
|
|
|
print_error('Invalid combination of build options.')
|
2016-09-10 03:03:17 +02:00
|
|
|
sys.exit()
|
|
|
|
|
2020-02-19 17:33:27 +01:00
|
|
|
if options.armbuild and platform != 'linux':
|
|
|
|
print_error('--arm-build is only supported on Linux.')
|
|
|
|
sys.exit()
|
|
|
|
|
2020-08-29 00:39:23 +02:00
|
|
|
if options.sandbox and not platform in ('mac', 'windows'):
|
2020-01-10 11:53:02 +01:00
|
|
|
print_error('--sandbox is only supported on macOS and Windows.')
|
2017-07-18 21:25:11 +02:00
|
|
|
sys.exit()
|
|
|
|
|
2014-11-26 00:32:34 +01:00
|
|
|
if not options.ninjabuild:
|
2020-01-10 11:53:02 +01:00
|
|
|
print_error('--ninja-build is required.')
|
2013-10-18 18:33:56 +02:00
|
|
|
sys.exit()
|
|
|
|
|
2019-04-23 19:00:14 +02:00
|
|
|
if options.ozone and platform != 'linux':
|
2020-01-10 11:53:02 +01:00
|
|
|
print_error('--ozone is only supported on Linux.')
|
2019-04-23 19:00:14 +02:00
|
|
|
sys.exit()
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
# script directory
|
|
|
|
script_dir = os.path.dirname(__file__)
|
|
|
|
|
|
|
|
# CEF root directory
|
2024-07-10 22:11:27 +02:00
|
|
|
cef_dir = os.path.realpath(os.path.join(script_dir, os.pardir))
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
# src directory
|
2024-07-10 22:11:27 +02:00
|
|
|
src_dir = os.path.realpath(os.path.join(cef_dir, os.pardir))
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2015-03-17 00:34:35 +01:00
|
|
|
if not git.is_checkout(cef_dir):
|
2014-04-18 20:31:22 +02:00
|
|
|
raise Exception('Not a valid checkout: %s' % (cef_dir))
|
|
|
|
|
2015-03-17 00:34:35 +01:00
|
|
|
# retrieve information for CEF
|
|
|
|
cef_url = git.get_url(cef_dir)
|
|
|
|
cef_rev = git.get_hash(cef_dir)
|
|
|
|
cef_commit_number = git.get_commit_number(cef_dir)
|
|
|
|
|
|
|
|
if not git.is_checkout(src_dir):
|
2014-04-18 20:31:22 +02:00
|
|
|
raise Exception('Not a valid checkout: %s' % (src_dir))
|
|
|
|
|
2015-03-17 00:34:35 +01:00
|
|
|
# retrieve information for Chromium
|
|
|
|
chromium_url = git.get_url(src_dir)
|
|
|
|
chromium_rev = git.get_hash(src_dir)
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
date = get_date()
|
|
|
|
|
2019-03-14 22:37:47 +01:00
|
|
|
# format version strings
|
|
|
|
formatter = VersionFormatter()
|
|
|
|
cef_ver = formatter.get_version_string()
|
|
|
|
chromium_ver = formatter.get_chromium_version_string()
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
# list of output directories to be archived
|
|
|
|
archive_dirs = []
|
|
|
|
|
|
|
|
if options.x64build:
|
2019-06-13 11:45:50 +02:00
|
|
|
platform_arch = '64'
|
|
|
|
binary_arch = 'x64'
|
2016-09-10 03:03:17 +02:00
|
|
|
elif options.armbuild:
|
|
|
|
platform_arch = 'arm'
|
2019-05-15 13:32:53 +02:00
|
|
|
binary_arch = 'arm'
|
2019-06-13 11:45:50 +02:00
|
|
|
elif options.arm64build:
|
|
|
|
platform_arch = 'arm64'
|
|
|
|
binary_arch = 'arm64'
|
2016-09-10 03:03:17 +02:00
|
|
|
else:
|
|
|
|
platform_arch = '32'
|
2019-05-15 13:32:53 +02:00
|
|
|
binary_arch = 'x86'
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
# output directory
|
|
|
|
output_dir_base = 'cef_binary_' + cef_ver
|
2015-11-13 16:48:19 +01:00
|
|
|
|
|
|
|
if options.distribsubdir == '':
|
2020-11-20 22:00:56 +01:00
|
|
|
if platform == 'mac':
|
|
|
|
# For backwards compatibility keep the old default directory name on mac.
|
|
|
|
platform_name = 'macos' + ('x' if platform_arch == '64' else '')
|
|
|
|
else:
|
|
|
|
platform_name = platform
|
2020-08-29 00:39:23 +02:00
|
|
|
|
|
|
|
output_dir_name = output_dir_base + '_' + platform_name + platform_arch
|
2020-08-03 20:37:24 +02:00
|
|
|
if options.distribsubdirsuffix != '':
|
|
|
|
output_dir_name += '_' + options.distribsubdirsuffix
|
2015-11-13 16:48:19 +01:00
|
|
|
else:
|
|
|
|
output_dir_name = options.distribsubdir
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
if options.minimal:
|
|
|
|
mode = 'minimal'
|
|
|
|
output_dir_name = output_dir_name + '_minimal'
|
|
|
|
elif options.client:
|
|
|
|
mode = 'client'
|
|
|
|
output_dir_name = output_dir_name + '_client'
|
2017-07-18 21:25:11 +02:00
|
|
|
elif options.sandbox:
|
|
|
|
mode = 'sandbox'
|
|
|
|
output_dir_name = output_dir_name + '_sandbox'
|
2024-07-10 22:11:27 +02:00
|
|
|
elif options.tools:
|
|
|
|
mode = 'tools'
|
|
|
|
output_dir_name = output_dir_name + '_tools'
|
2013-10-18 18:33:56 +02:00
|
|
|
else:
|
|
|
|
mode = 'standard'
|
|
|
|
|
2019-04-23 19:00:14 +02:00
|
|
|
if options.ozone:
|
|
|
|
output_dir_name = output_dir_name + '_ozone'
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
output_dir = create_output_dir(output_dir_name, options.outputdir)
|
|
|
|
|
|
|
|
# create the README.TXT file
|
|
|
|
create_readme()
|
|
|
|
|
|
|
|
# transfer the LICENSE.txt file
|
|
|
|
copy_file(os.path.join(cef_dir, 'LICENSE.txt'), output_dir, options.quiet)
|
|
|
|
|
|
|
|
# read the variables list from the autogenerated cef_paths.gypi file
|
|
|
|
cef_paths = eval_file(os.path.join(cef_dir, 'cef_paths.gypi'))
|
|
|
|
cef_paths = cef_paths['variables']
|
|
|
|
|
|
|
|
# read the variables list from the manually edited cef_paths2.gypi file
|
|
|
|
cef_paths2 = eval_file(os.path.join(cef_dir, 'cef_paths2.gypi'))
|
|
|
|
cef_paths2 = cef_paths2['variables']
|
|
|
|
|
2016-08-08 13:31:57 +02:00
|
|
|
# Determine the build directory suffix. CEF uses a consistent directory naming
|
|
|
|
# scheme for GN via GetAllPlatformConfigs in gn_args.py.
|
|
|
|
if options.x64build:
|
|
|
|
build_dir_suffix = '_GN_x64'
|
2016-09-10 03:03:17 +02:00
|
|
|
elif options.armbuild:
|
|
|
|
build_dir_suffix = '_GN_arm'
|
2019-06-13 11:45:50 +02:00
|
|
|
elif options.arm64build:
|
|
|
|
build_dir_suffix = '_GN_arm64'
|
2016-07-20 22:40:23 +02:00
|
|
|
else:
|
2016-08-08 13:31:57 +02:00
|
|
|
build_dir_suffix = '_GN_x86'
|
2016-07-20 22:40:23 +02:00
|
|
|
|
|
|
|
# Determine the build directory paths.
|
|
|
|
out_dir = os.path.join(src_dir, 'out')
|
|
|
|
build_dir_debug = os.path.join(out_dir, 'Debug' + build_dir_suffix)
|
|
|
|
build_dir_release = os.path.join(out_dir, 'Release' + build_dir_suffix)
|
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
if mode == 'standard' or mode == 'minimal':
|
2013-10-18 18:33:56 +02:00
|
|
|
# create the include directory
|
|
|
|
include_dir = os.path.join(output_dir, 'include')
|
|
|
|
make_dir(include_dir, options.quiet)
|
|
|
|
|
2016-05-12 23:41:30 +02:00
|
|
|
# create the cmake directory
|
|
|
|
cmake_dir = os.path.join(output_dir, 'cmake')
|
|
|
|
make_dir(cmake_dir, options.quiet)
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
# create the libcef_dll_wrapper directory
|
2016-05-12 23:41:30 +02:00
|
|
|
libcef_dll_dir = os.path.join(output_dir, 'libcef_dll')
|
|
|
|
make_dir(libcef_dll_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
# transfer common include files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_common'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
2020-04-30 21:59:23 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_common_capi'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_capi'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_wrapper'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths['autogen_cpp_includes'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths['autogen_capi_includes'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
|
|
|
|
2016-07-20 22:40:23 +02:00
|
|
|
# Transfer generated include files.
|
|
|
|
generated_includes = [
|
2024-03-28 19:33:29 +01:00
|
|
|
'cef_color_ids.h',
|
2022-03-22 19:31:30 +01:00
|
|
|
'cef_command_ids.h',
|
2022-02-02 18:31:20 +01:00
|
|
|
'cef_config.h',
|
2017-05-28 15:04:18 +02:00
|
|
|
'cef_pack_resources.h',
|
|
|
|
'cef_pack_strings.h',
|
2016-07-20 22:40:23 +02:00
|
|
|
]
|
|
|
|
for include in generated_includes:
|
|
|
|
# Debug and Release build should be the same so grab whichever exists.
|
2024-04-30 17:45:07 +02:00
|
|
|
rel_path = os.path.join('includes', 'cef', 'include', include)
|
|
|
|
src_path = os.path.join(build_dir_release, rel_path)
|
2016-07-20 22:40:23 +02:00
|
|
|
if not os.path.exists(src_path):
|
2024-04-30 17:45:07 +02:00
|
|
|
src_path = os.path.join(build_dir_debug, rel_path)
|
2016-07-20 22:40:23 +02:00
|
|
|
if not os.path.exists(src_path):
|
|
|
|
raise Exception('Missing generated header file: %s' % include)
|
|
|
|
copy_file(src_path, os.path.join(include_dir, include), options.quiet)
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
# transfer common libcef_dll_wrapper files
|
2016-03-16 03:55:59 +01:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['libcef_dll_wrapper_sources_base'], \
|
2016-05-12 23:41:30 +02:00
|
|
|
'libcef_dll/', libcef_dll_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['libcef_dll_wrapper_sources_common'], \
|
2016-05-12 23:41:30 +02:00
|
|
|
'libcef_dll/', libcef_dll_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths['autogen_client_side'], \
|
2016-05-12 23:41:30 +02:00
|
|
|
'libcef_dll/', libcef_dll_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2019-04-16 18:06:54 +02:00
|
|
|
if mode == 'standard' or mode == 'minimal':
|
|
|
|
# transfer additional files
|
|
|
|
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib'), \
|
|
|
|
mode, output_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2014-10-22 23:48:11 +02:00
|
|
|
# process cmake templates
|
2020-01-10 11:53:02 +01:00
|
|
|
variables = cef_paths.copy()
|
|
|
|
variables.update(cef_paths2)
|
2014-10-22 23:48:11 +02:00
|
|
|
process_cmake_template(os.path.join(cef_dir, 'CMakeLists.txt.in'), \
|
|
|
|
os.path.join(output_dir, 'CMakeLists.txt'), \
|
|
|
|
variables, options.quiet)
|
2016-05-12 23:41:30 +02:00
|
|
|
process_cmake_template(os.path.join(cef_dir, 'cmake', 'cef_macros.cmake.in'), \
|
|
|
|
os.path.join(cmake_dir, 'cef_macros.cmake'), \
|
|
|
|
variables, options.quiet)
|
|
|
|
process_cmake_template(os.path.join(cef_dir, 'cmake', 'cef_variables.cmake.in'), \
|
|
|
|
os.path.join(cmake_dir, 'cef_variables.cmake'), \
|
|
|
|
variables, options.quiet)
|
|
|
|
process_cmake_template(os.path.join(cef_dir, 'cmake', 'FindCEF.cmake.in'), \
|
|
|
|
os.path.join(cmake_dir, 'FindCEF.cmake'), \
|
2014-10-22 23:48:11 +02:00
|
|
|
variables, options.quiet)
|
|
|
|
process_cmake_template(os.path.join(cef_dir, 'libcef_dll', 'CMakeLists.txt.in'), \
|
2016-05-12 23:41:30 +02:00
|
|
|
os.path.join(libcef_dll_dir, 'CMakeLists.txt'), \
|
2014-10-22 23:48:11 +02:00
|
|
|
variables, options.quiet)
|
2016-05-16 20:44:47 +02:00
|
|
|
|
|
|
|
if mode == 'standard':
|
2016-11-18 00:52:42 +01:00
|
|
|
# create the tests directory
|
|
|
|
tests_dir = os.path.join(output_dir, 'tests')
|
|
|
|
make_dir(tests_dir, options.quiet)
|
|
|
|
|
|
|
|
# create the tests/shared directory
|
|
|
|
shared_dir = os.path.join(tests_dir, 'shared')
|
|
|
|
make_dir(shared_dir, options.quiet)
|
|
|
|
|
2019-04-23 19:00:14 +02:00
|
|
|
if not options.ozone:
|
|
|
|
# create the tests/cefclient directory
|
|
|
|
cefclient_dir = os.path.join(tests_dir, 'cefclient')
|
|
|
|
make_dir(cefclient_dir, options.quiet)
|
2016-05-16 20:44:47 +02:00
|
|
|
|
2016-11-18 00:52:42 +01:00
|
|
|
# create the tests/cefsimple directory
|
|
|
|
cefsimple_dir = os.path.join(tests_dir, 'cefsimple')
|
2016-05-16 20:44:47 +02:00
|
|
|
make_dir(cefsimple_dir, options.quiet)
|
|
|
|
|
2016-11-18 18:31:21 +01:00
|
|
|
# create the tests/ceftests directory
|
|
|
|
ceftests_dir = os.path.join(tests_dir, 'ceftests')
|
|
|
|
make_dir(ceftests_dir, options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
|
|
|
# transfer common shared files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['shared_sources_browser'], \
|
|
|
|
'tests/shared/', shared_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['shared_sources_common'], \
|
|
|
|
'tests/shared/', shared_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['shared_sources_renderer'], \
|
|
|
|
'tests/shared/', shared_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['shared_sources_resources'], \
|
|
|
|
'tests/shared/', shared_dir, options.quiet)
|
|
|
|
|
2019-04-23 19:00:14 +02:00
|
|
|
if not options.ozone:
|
2019-05-15 13:32:53 +02:00
|
|
|
# transfer common cefclient files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_browser'], \
|
|
|
|
'tests/cefclient/', cefclient_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_common'], \
|
|
|
|
'tests/cefclient/', cefclient_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_renderer'], \
|
|
|
|
'tests/cefclient/', cefclient_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_resources'], \
|
|
|
|
'tests/cefclient/', cefclient_dir, options.quiet)
|
2016-05-16 20:44:47 +02:00
|
|
|
|
|
|
|
# transfer common cefsimple files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_common'], \
|
|
|
|
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
|
|
|
|
2016-11-18 18:31:21 +01:00
|
|
|
# transfer common ceftests files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['ceftests_sources_common'], \
|
|
|
|
'tests/ceftests/', ceftests_dir, options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
2021-08-24 23:13:10 +02:00
|
|
|
# copy GTest files
|
|
|
|
copy_gtest(tests_dir)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
# process cmake templates
|
2019-04-23 19:00:14 +02:00
|
|
|
if not options.ozone:
|
|
|
|
process_cmake_template(os.path.join(cef_dir, 'tests', 'cefclient', 'CMakeLists.txt.in'), \
|
|
|
|
os.path.join(cefclient_dir, 'CMakeLists.txt'), \
|
|
|
|
variables, options.quiet)
|
2014-10-22 23:48:11 +02:00
|
|
|
process_cmake_template(os.path.join(cef_dir, 'tests', 'cefsimple', 'CMakeLists.txt.in'), \
|
2016-05-12 23:41:30 +02:00
|
|
|
os.path.join(cefsimple_dir, 'CMakeLists.txt'), \
|
2014-10-22 23:48:11 +02:00
|
|
|
variables, options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
process_cmake_template(os.path.join(cef_dir, 'tests', 'gtest', 'CMakeLists.txt.in'), \
|
|
|
|
os.path.join(tests_dir, 'gtest', 'CMakeLists.txt'), \
|
|
|
|
variables, options.quiet)
|
2016-11-18 18:31:21 +01:00
|
|
|
process_cmake_template(os.path.join(cef_dir, 'tests', 'ceftests', 'CMakeLists.txt.in'), \
|
|
|
|
os.path.join(ceftests_dir, 'CMakeLists.txt'), \
|
2016-11-18 00:52:42 +01:00
|
|
|
variables, options.quiet)
|
2014-10-22 23:48:11 +02:00
|
|
|
|
2016-11-18 00:52:42 +01:00
|
|
|
# transfer gypi files
|
2016-05-16 20:44:47 +02:00
|
|
|
copy_file(os.path.join(cef_dir, 'cef_paths.gypi'), \
|
|
|
|
os.path.join(output_dir, 'cef_paths.gypi'), options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
copy_file(os.path.join(cef_dir, 'cef_paths2.gypi'), \
|
|
|
|
os.path.join(output_dir, 'cef_paths2.gypi'), options.quiet)
|
2016-05-16 20:44:47 +02:00
|
|
|
|
2022-09-02 19:10:58 +02:00
|
|
|
# transfer Doxyfile
|
|
|
|
transfer_doxyfile(output_dir, options.quiet)
|
|
|
|
|
2022-09-05 20:06:18 +02:00
|
|
|
# transfer README.md
|
|
|
|
copy_file(os.path.join(cef_dir, 'README.md'), \
|
|
|
|
os.path.join(output_dir, 'README.md'), options.quiet)
|
|
|
|
|
2022-09-02 20:22:42 +02:00
|
|
|
if not options.nodocs:
|
|
|
|
# generate doc files
|
|
|
|
sys.stdout.write("Generating docs...\n")
|
|
|
|
result = exec_cmd(
|
|
|
|
os.path.join('tools', 'make_cppdocs.%s' %
|
|
|
|
('bat' if platform == 'windows' else 'sh')), cef_dir)
|
|
|
|
if (len(result['err']) > 0):
|
|
|
|
sys.stdout.write(result['err'])
|
|
|
|
sys.stdout.write(result['out'])
|
|
|
|
|
|
|
|
src_dir = os.path.join(cef_dir, 'docs')
|
|
|
|
if path_exists(src_dir):
|
|
|
|
# create the docs output directory
|
|
|
|
docs_output_dir = create_output_dir(output_dir_base + '_docs',
|
|
|
|
options.outputdir)
|
|
|
|
# transfer contents
|
|
|
|
copy_dir(src_dir, docs_output_dir, options.quiet)
|
|
|
|
else:
|
|
|
|
sys.stdout.write("ERROR: No docs generated.\n")
|
|
|
|
|
2024-07-10 22:11:27 +02:00
|
|
|
if mode == 'tools':
|
|
|
|
transfer_tools_files(script_dir, (build_dir_debug, build_dir_release),
|
|
|
|
output_dir)
|
|
|
|
elif platform == 'windows':
|
2018-02-15 01:12:09 +01:00
|
|
|
libcef_dll = 'libcef.dll'
|
|
|
|
# yapf: disable
|
2014-02-05 21:35:45 +01:00
|
|
|
binaries = [
|
2018-02-15 01:12:09 +01:00
|
|
|
{'path': 'chrome_elf.dll'},
|
2023-06-01 12:30:42 +02:00
|
|
|
{'path': 'd3dcompiler_47.dll'},
|
2024-02-15 22:59:55 +01:00
|
|
|
{'path': 'dxcompiler.dll', 'conditional': True},
|
|
|
|
{'path': 'dxil.dll', 'conditional': True},
|
2018-02-15 01:12:09 +01:00
|
|
|
{'path': libcef_dll},
|
|
|
|
{'path': 'libEGL.dll'},
|
|
|
|
{'path': 'libGLESv2.dll'},
|
|
|
|
{'path': 'snapshot_blob.bin', 'conditional': True},
|
|
|
|
{'path': 'v8_context_snapshot.bin', 'conditional': True},
|
2021-09-30 20:21:50 +02:00
|
|
|
{'path': 'vk_swiftshader.dll'},
|
|
|
|
{'path': 'vk_swiftshader_icd.json'},
|
|
|
|
{'path': 'vulkan-1.dll'},
|
2018-02-15 01:12:09 +01:00
|
|
|
]
|
2024-07-08 17:54:24 +02:00
|
|
|
pdb_files = [
|
|
|
|
{'path': 'chrome_elf.dll.pdb'},
|
|
|
|
{'path': 'dxcompiler.dll.pdb', 'conditional': True},
|
|
|
|
{'path': '%s.pdb' % libcef_dll},
|
|
|
|
{'path': 'libEGL.dll.pdb'},
|
|
|
|
{'path': 'libGLESv2.dll.pdb'},
|
|
|
|
{'path': 'vk_swiftshader.dll.pdb'},
|
|
|
|
{'path': 'vulkan-1.dll.pdb'},
|
|
|
|
]
|
2018-02-15 01:12:09 +01:00
|
|
|
# yapf: enable
|
|
|
|
|
|
|
|
if mode == 'client':
|
2020-12-15 20:11:11 +01:00
|
|
|
binaries.append({
|
|
|
|
'path': 'cefsimple.exe' if platform_arch == 'arm64' else 'cefclient.exe'
|
|
|
|
})
|
2018-02-15 01:12:09 +01:00
|
|
|
else:
|
2024-07-08 17:54:24 +02:00
|
|
|
binaries.append({'path': '%s.lib' % libcef_dll, 'out_path': 'libcef.lib'})
|
2018-02-15 01:12:09 +01:00
|
|
|
|
|
|
|
# yapf: disable
|
|
|
|
resources = [
|
2021-02-28 21:23:42 +01:00
|
|
|
{'path': 'chrome_100_percent.pak'},
|
|
|
|
{'path': 'chrome_200_percent.pak'},
|
|
|
|
{'path': 'resources.pak'},
|
2018-02-15 01:12:09 +01:00
|
|
|
{'path': 'icudtl.dat'},
|
2018-03-29 00:20:10 +02:00
|
|
|
{'path': 'locales', 'delete': '*.info'},
|
2014-02-05 21:35:45 +01:00
|
|
|
]
|
2018-02-15 01:12:09 +01:00
|
|
|
# yapf: enable
|
2014-02-05 21:35:45 +01:00
|
|
|
|
2017-07-18 21:25:11 +02:00
|
|
|
cef_sandbox_lib = 'obj\\cef\\cef_sandbox.lib'
|
2014-11-26 00:32:34 +01:00
|
|
|
sandbox_libs = [
|
2017-05-28 15:04:18 +02:00
|
|
|
'obj\\base\\base.lib',
|
|
|
|
'obj\\base\\base_static.lib',
|
2019-10-29 22:29:29 +01:00
|
|
|
'obj\\base\\third_party\\double_conversion\\double_conversion.lib',
|
2017-09-20 11:31:31 +02:00
|
|
|
'obj\\base\\win\\pe_image.lib',
|
2017-07-18 21:25:11 +02:00
|
|
|
cef_sandbox_lib,
|
2022-03-26 02:12:30 +01:00
|
|
|
'obj\\sandbox\\common\\*.obj',
|
2017-05-28 15:04:18 +02:00
|
|
|
'obj\\sandbox\\win\\sandbox.lib',
|
2023-08-09 23:17:17 +02:00
|
|
|
'obj\\sandbox\\win\\service_resolver\\*.obj',
|
2021-11-16 20:22:55 +01:00
|
|
|
'obj\\third_party\\abseil-cpp\\absl\\base\\**\\*.obj',
|
|
|
|
'obj\\third_party\\abseil-cpp\\absl\\debugging\\**\\*.obj',
|
|
|
|
'obj\\third_party\\abseil-cpp\\absl\\numeric\\**\\*.obj',
|
|
|
|
'obj\\third_party\\abseil-cpp\\absl\\synchronization\\**\\*.obj',
|
|
|
|
'obj\\third_party\\abseil-cpp\\absl\\time\\**\\*.obj',
|
|
|
|
'obj\\third_party\\abseil-cpp\\absl\\types\\**\\*.obj',
|
2014-11-26 00:32:34 +01:00
|
|
|
]
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2017-07-18 21:25:11 +02:00
|
|
|
# Generate the cef_sandbox.lib merged library. A separate *_sandbox build
|
|
|
|
# should exist when GN is_official_build=true.
|
|
|
|
if mode in ('standard', 'minimal', 'sandbox'):
|
|
|
|
dirs = {
|
|
|
|
'Debug': (build_dir_debug + '_sandbox', build_dir_debug),
|
|
|
|
'Release': (build_dir_release + '_sandbox', build_dir_release)
|
|
|
|
}
|
|
|
|
for dir_name in dirs.keys():
|
|
|
|
for src_dir in dirs[dir_name]:
|
|
|
|
if path_exists(os.path.join(src_dir, cef_sandbox_lib)):
|
|
|
|
dst_dir = os.path.join(output_dir, dir_name)
|
|
|
|
make_dir(dst_dir, options.quiet)
|
2018-07-27 23:28:12 +02:00
|
|
|
combine_libs(platform, src_dir, sandbox_libs,
|
2017-07-18 21:25:11 +02:00
|
|
|
os.path.join(dst_dir, 'cef_sandbox.lib'))
|
|
|
|
break
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
valid_build_dir = None
|
|
|
|
|
|
|
|
if mode == 'standard':
|
|
|
|
# transfer Debug files
|
2016-06-23 19:42:00 +02:00
|
|
|
build_dir = build_dir_debug
|
2017-05-28 15:04:18 +02:00
|
|
|
if not options.allowpartial or path_exists(
|
2018-02-15 01:12:09 +01:00
|
|
|
os.path.join(build_dir, libcef_dll)):
|
2013-10-18 18:33:56 +02:00
|
|
|
valid_build_dir = build_dir
|
|
|
|
dst_dir = os.path.join(output_dir, 'Debug')
|
2018-02-15 01:12:09 +01:00
|
|
|
copy_files_list(build_dir, dst_dir, binaries)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
if not options.nosymbols:
|
|
|
|
# create the symbol output directory
|
2017-05-28 15:04:18 +02:00
|
|
|
symbol_output_dir = create_output_dir(
|
|
|
|
output_dir_name + '_debug_symbols', options.outputdir)
|
2013-10-18 18:33:56 +02:00
|
|
|
# transfer contents
|
2024-07-08 17:54:24 +02:00
|
|
|
copy_files_list(build_dir, symbol_output_dir, pdb_files)
|
2013-10-18 18:33:56 +02:00
|
|
|
else:
|
2018-02-15 01:12:09 +01:00
|
|
|
sys.stdout.write("No Debug build files.\n")
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2017-07-18 21:25:11 +02:00
|
|
|
if mode != 'sandbox':
|
|
|
|
# transfer Release files
|
|
|
|
build_dir = build_dir_release
|
|
|
|
if not options.allowpartial or path_exists(
|
2018-02-15 01:12:09 +01:00
|
|
|
os.path.join(build_dir, libcef_dll)):
|
2017-07-18 21:25:11 +02:00
|
|
|
valid_build_dir = build_dir
|
|
|
|
dst_dir = os.path.join(output_dir, 'Release')
|
2018-02-15 01:12:09 +01:00
|
|
|
copy_files_list(build_dir, dst_dir, binaries)
|
2017-07-18 21:25:11 +02:00
|
|
|
|
|
|
|
if not options.nosymbols:
|
|
|
|
# create the symbol output directory
|
|
|
|
symbol_output_dir = create_output_dir(
|
|
|
|
output_dir_name + '_release_symbols', options.outputdir)
|
|
|
|
# transfer contents
|
2024-07-08 17:54:24 +02:00
|
|
|
copy_files_list(build_dir, symbol_output_dir, pdb_files)
|
2013-10-18 18:33:56 +02:00
|
|
|
else:
|
2018-02-15 01:12:09 +01:00
|
|
|
sys.stdout.write("No Release build files.\n")
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
if not valid_build_dir is None:
|
|
|
|
# transfer resource files
|
|
|
|
build_dir = valid_build_dir
|
|
|
|
if mode == 'client':
|
|
|
|
dst_dir = os.path.join(output_dir, 'Release')
|
|
|
|
else:
|
|
|
|
dst_dir = os.path.join(output_dir, 'Resources')
|
2018-02-15 01:12:09 +01:00
|
|
|
copy_files_list(build_dir, dst_dir, resources)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
if mode == 'standard' or mode == 'minimal':
|
2013-10-18 18:33:56 +02:00
|
|
|
# transfer include files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_win'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
2020-04-30 21:59:23 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_win_capi'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
# transfer additional files, if any
|
|
|
|
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib', 'win'), \
|
|
|
|
mode, output_dir, options.quiet)
|
|
|
|
|
|
|
|
if mode == 'standard':
|
2016-11-18 00:52:42 +01:00
|
|
|
# transfer shared files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['shared_sources_win'], \
|
|
|
|
'tests/shared/', shared_dir, options.quiet)
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
# transfer cefclient files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_win'], \
|
|
|
|
'tests/cefclient/', cefclient_dir, options.quiet)
|
2019-10-01 11:17:34 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_resources_win'], \
|
|
|
|
'tests/cefclient/', cefclient_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2013-11-26 23:02:14 +01:00
|
|
|
# transfer cefsimple files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_win'], \
|
|
|
|
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
2019-10-01 11:17:34 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_resources_win'], \
|
|
|
|
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
2013-11-26 23:02:14 +01:00
|
|
|
|
2016-11-18 18:31:21 +01:00
|
|
|
# transfer ceftests files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['ceftests_sources_win'], \
|
|
|
|
'tests/ceftests/', ceftests_dir, options.quiet)
|
2019-10-01 11:17:34 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['ceftests_sources_resources_win'], \
|
|
|
|
'tests/ceftests/', ceftests_dir, options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
2020-08-29 00:39:23 +02:00
|
|
|
elif platform == 'mac':
|
2013-11-21 23:43:36 +01:00
|
|
|
framework_name = 'Chromium Embedded Framework'
|
2018-02-15 01:12:09 +01:00
|
|
|
cefclient_app = 'cefclient.app'
|
|
|
|
|
2018-07-27 23:28:12 +02:00
|
|
|
cef_sandbox_lib = 'obj/cef/libcef_sandbox.a'
|
|
|
|
sandbox_libs = [
|
|
|
|
cef_sandbox_lib,
|
|
|
|
'obj/sandbox/mac/libseatbelt.a',
|
|
|
|
'obj/sandbox/mac/libseatbelt_proto.a',
|
|
|
|
'obj/third_party/protobuf/libprotobuf_lite.a',
|
2019-03-16 18:12:19 +01:00
|
|
|
'obj/buildtools/third_party/libc++/libc++/*.o',
|
|
|
|
'obj/buildtools/third_party/libc++abi/libc++abi/*.o',
|
2018-07-27 23:28:12 +02:00
|
|
|
]
|
2024-07-08 17:54:24 +02:00
|
|
|
dsym_dirs = [
|
|
|
|
'%s.dSYM' % framework_name,
|
|
|
|
'libEGL.dylib.dSYM',
|
|
|
|
'libGLESv2.dylib.dSYM',
|
|
|
|
'libvk_swiftshader.dylib.dSYM',
|
|
|
|
]
|
2018-07-27 23:28:12 +02:00
|
|
|
|
|
|
|
# Generate the cef_sandbox.a merged library. A separate *_sandbox build
|
|
|
|
# should exist when GN is_official_build=true.
|
|
|
|
if mode in ('standard', 'minimal', 'sandbox'):
|
|
|
|
dirs = {
|
|
|
|
'Debug': (build_dir_debug + '_sandbox', build_dir_debug),
|
|
|
|
'Release': (build_dir_release + '_sandbox', build_dir_release)
|
|
|
|
}
|
|
|
|
for dir_name in dirs.keys():
|
|
|
|
for src_dir in dirs[dir_name]:
|
|
|
|
if path_exists(os.path.join(src_dir, cef_sandbox_lib)):
|
|
|
|
dst_dir = os.path.join(output_dir, dir_name)
|
|
|
|
make_dir(dst_dir, options.quiet)
|
|
|
|
combine_libs(platform, src_dir, sandbox_libs,
|
|
|
|
os.path.join(dst_dir, 'cef_sandbox.a'))
|
|
|
|
break
|
|
|
|
|
2018-02-15 01:12:09 +01:00
|
|
|
valid_build_dir = None
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
if mode == 'standard':
|
|
|
|
# transfer Debug files
|
2016-06-23 19:42:00 +02:00
|
|
|
build_dir = build_dir_debug
|
2017-05-28 15:04:18 +02:00
|
|
|
if not options.allowpartial or path_exists(
|
2018-02-15 01:12:09 +01:00
|
|
|
os.path.join(build_dir, cefclient_app)):
|
2013-10-18 18:33:56 +02:00
|
|
|
valid_build_dir = build_dir
|
|
|
|
dst_dir = os.path.join(output_dir, 'Debug')
|
|
|
|
make_dir(dst_dir, options.quiet)
|
2018-04-19 17:44:42 +02:00
|
|
|
framework_src_dir = os.path.join(
|
|
|
|
build_dir, '%s/Contents/Frameworks/%s.framework/Versions/A' %
|
|
|
|
(cefclient_app, framework_name))
|
2017-11-30 17:31:03 +01:00
|
|
|
framework_dst_dir = os.path.join(dst_dir, '%s.framework' % framework_name)
|
2018-04-19 17:44:42 +02:00
|
|
|
copy_dir(framework_src_dir, framework_dst_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2016-09-02 15:58:13 +02:00
|
|
|
if not options.nosymbols:
|
2016-09-01 11:21:14 +02:00
|
|
|
# create the symbol output directory
|
2017-05-28 15:04:18 +02:00
|
|
|
symbol_output_dir = create_output_dir(
|
|
|
|
output_dir_name + '_debug_symbols', options.outputdir)
|
2016-09-01 11:21:14 +02:00
|
|
|
|
|
|
|
# The real dSYM already exists, just copy it to the output directory.
|
|
|
|
# dSYMs are only generated when is_official_build=true or enable_dsyms=true.
|
|
|
|
# See //build/config/mac/symbols.gni.
|
2024-07-08 17:54:24 +02:00
|
|
|
for dsym in dsym_dirs:
|
|
|
|
copy_dir(
|
|
|
|
os.path.join(build_dir, dsym),
|
|
|
|
os.path.join(symbol_output_dir, dsym), options.quiet)
|
2018-02-15 01:12:09 +01:00
|
|
|
else:
|
|
|
|
sys.stdout.write("No Debug build files.\n")
|
2016-09-01 11:21:14 +02:00
|
|
|
|
2018-07-27 23:28:12 +02:00
|
|
|
if mode != 'sandbox':
|
|
|
|
# transfer Release files
|
|
|
|
build_dir = build_dir_release
|
|
|
|
if not options.allowpartial or path_exists(
|
|
|
|
os.path.join(build_dir, cefclient_app)):
|
|
|
|
valid_build_dir = build_dir
|
|
|
|
dst_dir = os.path.join(output_dir, 'Release')
|
|
|
|
make_dir(dst_dir, options.quiet)
|
|
|
|
framework_src_dir = os.path.join(
|
|
|
|
build_dir, '%s/Contents/Frameworks/%s.framework/Versions/A' %
|
|
|
|
(cefclient_app, framework_name))
|
|
|
|
if mode != 'client':
|
|
|
|
framework_dst_dir = os.path.join(dst_dir,
|
|
|
|
'%s.framework' % framework_name)
|
|
|
|
else:
|
|
|
|
copy_dir(
|
|
|
|
os.path.join(build_dir, cefclient_app),
|
|
|
|
os.path.join(dst_dir, cefclient_app), options.quiet)
|
|
|
|
# Replace the versioned framework with an unversioned framework in the sample app.
|
|
|
|
framework_dst_dir = os.path.join(
|
|
|
|
dst_dir, '%s/Contents/Frameworks/%s.framework' % (cefclient_app,
|
|
|
|
framework_name))
|
|
|
|
remove_dir(framework_dst_dir, options.quiet)
|
|
|
|
copy_dir(framework_src_dir, framework_dst_dir, options.quiet)
|
|
|
|
|
|
|
|
if not options.nosymbols:
|
|
|
|
# create the symbol output directory
|
|
|
|
symbol_output_dir = create_output_dir(
|
|
|
|
output_dir_name + '_release_symbols', options.outputdir)
|
|
|
|
|
|
|
|
# The real dSYM already exists, just copy it to the output directory.
|
|
|
|
# dSYMs are only generated when is_official_build=true or enable_dsyms=true.
|
|
|
|
# See //build/config/mac/symbols.gni.
|
2024-07-08 17:54:24 +02:00
|
|
|
for dsym in dsym_dirs:
|
|
|
|
copy_dir(
|
|
|
|
os.path.join(build_dir, dsym),
|
|
|
|
os.path.join(symbol_output_dir, dsym), options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
else:
|
2018-07-27 23:28:12 +02:00
|
|
|
sys.stdout.write("No Release build files.\n")
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
if mode == 'standard' or mode == 'minimal':
|
2013-10-18 18:33:56 +02:00
|
|
|
# transfer include files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_mac'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
2020-04-30 21:59:23 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_mac_capi'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
2018-07-27 17:39:53 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_wrapper_mac'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
|
|
|
|
|
|
|
# transfer libcef_dll_wrapper files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['libcef_dll_wrapper_sources_mac'], \
|
|
|
|
'libcef_dll/', libcef_dll_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
# transfer additional files, if any
|
|
|
|
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib', 'mac'), \
|
|
|
|
mode, output_dir, options.quiet)
|
|
|
|
|
|
|
|
if mode == 'standard':
|
2016-11-18 00:52:42 +01:00
|
|
|
# transfer shared files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['shared_sources_mac'], \
|
|
|
|
'tests/shared/', shared_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['shared_sources_mac_helper'], \
|
|
|
|
'tests/shared/', shared_dir, options.quiet)
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
# transfer cefclient files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_mac'], \
|
|
|
|
'tests/cefclient/', cefclient_dir, options.quiet)
|
|
|
|
|
2015-01-30 21:55:28 +01:00
|
|
|
# transfer cefclient/resources/mac files
|
2016-11-18 00:52:42 +01:00
|
|
|
copy_dir(os.path.join(cef_dir, 'tests/cefclient/resources/mac'), \
|
|
|
|
os.path.join(cefclient_dir, 'resources/mac'), \
|
2013-10-18 18:33:56 +02:00
|
|
|
options.quiet)
|
|
|
|
|
2013-11-26 23:02:14 +01:00
|
|
|
# transfer cefsimple files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_mac'], \
|
|
|
|
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_mac_helper'], \
|
|
|
|
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
|
|
|
|
|
|
|
# transfer cefsimple/mac files
|
2016-11-18 00:52:42 +01:00
|
|
|
copy_dir(os.path.join(cef_dir, 'tests/cefsimple/mac'), \
|
|
|
|
os.path.join(cefsimple_dir, 'mac'), \
|
|
|
|
options.quiet)
|
|
|
|
|
2016-11-18 18:31:21 +01:00
|
|
|
# transfer ceftests files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['ceftests_sources_mac'], \
|
|
|
|
'tests/ceftests/', ceftests_dir, options.quiet)
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['ceftests_sources_mac_helper'], \
|
|
|
|
'tests/ceftests/', ceftests_dir, options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
2016-11-18 18:31:21 +01:00
|
|
|
# transfer ceftests/resources/mac files
|
|
|
|
copy_dir(os.path.join(cef_dir, 'tests/ceftests/resources/mac'), \
|
|
|
|
os.path.join(ceftests_dir, 'resources/mac'), \
|
2013-11-26 23:02:14 +01:00
|
|
|
options.quiet)
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
elif platform == 'linux':
|
2018-02-15 01:12:09 +01:00
|
|
|
libcef_so = 'libcef.so'
|
|
|
|
# yapf: disable
|
2017-12-18 20:07:45 +01:00
|
|
|
binaries = [
|
2018-02-15 01:12:09 +01:00
|
|
|
{'path': 'chrome_sandbox', 'out_path': 'chrome-sandbox'},
|
|
|
|
{'path': libcef_so},
|
|
|
|
{'path': 'libEGL.so'},
|
|
|
|
{'path': 'libGLESv2.so'},
|
2021-09-30 20:21:50 +02:00
|
|
|
{'path': 'libvk_swiftshader.so'},
|
|
|
|
{'path': 'libvulkan.so.1'},
|
2018-02-15 01:12:09 +01:00
|
|
|
{'path': 'snapshot_blob.bin', 'conditional': True},
|
|
|
|
{'path': 'v8_context_snapshot.bin', 'conditional': True},
|
2021-09-30 20:21:50 +02:00
|
|
|
{'path': 'vk_swiftshader_icd.json'},
|
2017-12-18 20:07:45 +01:00
|
|
|
]
|
2018-02-15 01:12:09 +01:00
|
|
|
# yapf: enable
|
2019-04-23 19:00:14 +02:00
|
|
|
if options.ozone:
|
|
|
|
binaries.append({'path': 'libminigbm.so', 'conditional': True})
|
2018-02-15 01:12:09 +01:00
|
|
|
|
|
|
|
if mode == 'client':
|
2018-03-08 16:53:00 +01:00
|
|
|
binaries.append({'path': 'cefsimple'})
|
2018-02-15 01:12:09 +01:00
|
|
|
|
|
|
|
# yapf: disable
|
|
|
|
resources = [
|
2021-02-28 21:23:42 +01:00
|
|
|
{'path': 'chrome_100_percent.pak'},
|
|
|
|
{'path': 'chrome_200_percent.pak'},
|
|
|
|
{'path': 'resources.pak'},
|
2018-02-15 01:12:09 +01:00
|
|
|
{'path': 'icudtl.dat'},
|
2018-03-29 00:20:10 +02:00
|
|
|
{'path': 'locales', 'delete': '*.info'},
|
2018-02-15 01:12:09 +01:00
|
|
|
]
|
|
|
|
# yapf: enable
|
2017-12-18 20:07:45 +01:00
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
valid_build_dir = None
|
|
|
|
|
|
|
|
if mode == 'standard':
|
|
|
|
# transfer Debug files
|
2016-06-23 19:42:00 +02:00
|
|
|
build_dir = build_dir_debug
|
2018-02-15 01:12:09 +01:00
|
|
|
libcef_path = os.path.join(build_dir, libcef_so)
|
2016-03-31 15:30:19 +02:00
|
|
|
if not options.allowpartial or path_exists(libcef_path):
|
2013-10-18 18:33:56 +02:00
|
|
|
valid_build_dir = build_dir
|
|
|
|
dst_dir = os.path.join(output_dir, 'Debug')
|
2018-02-15 01:12:09 +01:00
|
|
|
copy_files_list(build_dir, dst_dir, binaries)
|
2013-10-18 18:33:56 +02:00
|
|
|
else:
|
2018-02-15 01:12:09 +01:00
|
|
|
sys.stdout.write("No Debug build files.\n")
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
# transfer Release files
|
2016-06-23 19:42:00 +02:00
|
|
|
build_dir = build_dir_release
|
2018-02-15 01:12:09 +01:00
|
|
|
libcef_path = os.path.join(build_dir, libcef_so)
|
2016-03-31 15:30:19 +02:00
|
|
|
if not options.allowpartial or path_exists(libcef_path):
|
2013-10-18 18:33:56 +02:00
|
|
|
valid_build_dir = build_dir
|
|
|
|
dst_dir = os.path.join(output_dir, 'Release')
|
2018-02-15 01:12:09 +01:00
|
|
|
copy_files_list(build_dir, dst_dir, binaries)
|
2013-10-18 18:33:56 +02:00
|
|
|
else:
|
2018-02-15 01:12:09 +01:00
|
|
|
sys.stdout.write("No Release build files.\n")
|
2013-10-18 18:33:56 +02:00
|
|
|
|
|
|
|
if not valid_build_dir is None:
|
|
|
|
# transfer resource files
|
|
|
|
build_dir = valid_build_dir
|
|
|
|
if mode == 'client':
|
|
|
|
dst_dir = os.path.join(output_dir, 'Release')
|
|
|
|
else:
|
|
|
|
dst_dir = os.path.join(output_dir, 'Resources')
|
2018-02-15 01:12:09 +01:00
|
|
|
copy_files_list(build_dir, dst_dir, resources)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
if mode == 'standard' or mode == 'minimal':
|
2013-10-18 18:33:56 +02:00
|
|
|
# transfer include files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_linux'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
2020-04-30 21:59:23 +02:00
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['includes_linux_capi'], \
|
|
|
|
'include/', include_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2016-05-16 20:44:47 +02:00
|
|
|
# transfer additional files, if any
|
|
|
|
transfer_files(cef_dir, script_dir, os.path.join(script_dir, 'distrib', 'linux'), \
|
|
|
|
mode, output_dir, options.quiet)
|
|
|
|
|
|
|
|
if mode == 'standard':
|
2016-11-18 00:52:42 +01:00
|
|
|
# transfer shared files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['shared_sources_linux'], \
|
|
|
|
'tests/shared/', shared_dir, options.quiet)
|
|
|
|
|
2019-04-23 19:00:14 +02:00
|
|
|
if not options.ozone:
|
|
|
|
# transfer cefclient files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefclient_sources_linux'], \
|
|
|
|
'tests/cefclient/', cefclient_dir, options.quiet)
|
2013-10-18 18:33:56 +02:00
|
|
|
|
2013-11-26 23:02:14 +01:00
|
|
|
# transfer cefsimple files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['cefsimple_sources_linux'], \
|
|
|
|
'tests/cefsimple/', cefsimple_dir, options.quiet)
|
|
|
|
|
2016-11-18 18:31:21 +01:00
|
|
|
# transfer ceftests files
|
|
|
|
transfer_gypi_files(cef_dir, cef_paths2['ceftests_sources_linux'], \
|
|
|
|
'tests/ceftests/', ceftests_dir, options.quiet)
|
2016-11-18 00:52:42 +01:00
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
if not options.noarchive:
|
|
|
|
# create an archive for each output directory
|
2016-05-19 00:14:26 +02:00
|
|
|
archive_format = os.getenv('CEF_ARCHIVE_FORMAT', 'zip')
|
|
|
|
if archive_format not in ('zip', 'tar.gz', 'tar.bz2'):
|
|
|
|
raise Exception('Unsupported archive format: %s' % archive_format)
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
if os.getenv('CEF_COMMAND_7ZIP', '') != '':
|
2016-05-17 19:52:31 +02:00
|
|
|
archive_format = os.getenv('CEF_COMMAND_7ZIP_FORMAT', '7z')
|
|
|
|
|
2013-10-18 18:33:56 +02:00
|
|
|
for dir in archive_dirs:
|
|
|
|
if not options.quiet:
|
2017-05-28 15:04:18 +02:00
|
|
|
sys.stdout.write("Creating %s archive for %s...\n" %
|
|
|
|
(archive_format, os.path.basename(dir)))
|
2016-05-17 19:52:31 +02:00
|
|
|
if archive_format == 'zip':
|
2016-05-19 00:14:26 +02:00
|
|
|
create_zip_archive(dir)
|
|
|
|
elif archive_format == 'tar.gz':
|
|
|
|
create_tar_archive(dir, 'gz')
|
|
|
|
elif archive_format == 'tar.bz2':
|
|
|
|
create_tar_archive(dir, 'bz2')
|
2013-10-18 18:33:56 +02:00
|
|
|
else:
|
2016-05-17 19:52:31 +02:00
|
|
|
create_7z_archive(dir, archive_format)
|