2018-10-24 13:20:27 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
#############################################################################
|
|
|
|
##
|
|
|
|
## Copyright (C) 2018 The Qt Company Ltd.
|
|
|
|
## Contact: https://www.qt.io/licensing/
|
|
|
|
##
|
|
|
|
## This file is part of the plugins of the Qt Toolkit.
|
|
|
|
##
|
|
|
|
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
|
|
|
|
## Commercial License Usage
|
|
|
|
## Licensees holding valid commercial Qt licenses may use this file in
|
|
|
|
## accordance with the commercial license agreement provided with the
|
|
|
|
## Software or, alternatively, in accordance with the terms contained in
|
|
|
|
## a written agreement between you and The Qt Company. For licensing terms
|
|
|
|
## and conditions see https://www.qt.io/terms-conditions. For further
|
|
|
|
## information use the contact form at https://www.qt.io/contact-us.
|
|
|
|
##
|
|
|
|
## GNU General Public License Usage
|
|
|
|
## Alternatively, this file may be used under the terms of the GNU
|
|
|
|
## General Public License version 3 as published by the Free Software
|
|
|
|
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
|
|
|
|
## included in the packaging of this file. Please review the following
|
|
|
|
## information to ensure the GNU General Public License requirements will
|
|
|
|
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
|
|
|
|
##
|
|
|
|
## $QT_END_LICENSE$
|
|
|
|
##
|
|
|
|
#############################################################################
|
|
|
|
|
2019-01-24 15:01:17 +00:00
|
|
|
|
|
|
|
from __future__ import annotations
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
from argparse import ArgumentParser
|
2019-07-26 16:15:41 +00:00
|
|
|
from textwrap import dedent
|
2019-01-24 15:01:17 +00:00
|
|
|
import copy
|
2019-03-07 10:06:23 +00:00
|
|
|
import xml.etree.ElementTree as ET
|
2019-02-11 17:02:22 +00:00
|
|
|
from itertools import chain
|
2018-10-24 13:20:27 +00:00
|
|
|
import os.path
|
|
|
|
import re
|
|
|
|
import io
|
|
|
|
import typing
|
2019-09-09 08:52:27 +00:00
|
|
|
import glob
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-01-23 15:40:23 +00:00
|
|
|
from sympy.logic import (simplify_logic, And, Or, Not,)
|
2018-10-24 13:20:27 +00:00
|
|
|
import pyparsing as pp
|
2019-05-24 15:29:21 +00:00
|
|
|
from helper import _set_up_py_parsing_nicer_debug_output
|
|
|
|
_set_up_py_parsing_nicer_debug_output(pp)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-05-06 10:26:31 +00:00
|
|
|
from helper import map_qt_library, map_3rd_party_library, is_known_3rd_party_library, \
|
2019-05-17 13:05:49 +00:00
|
|
|
featureName, map_platform, find_library_info_for_target, generate_find_package_info, \
|
|
|
|
LibraryMapping
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-05-04 11:08:19 +00:00
|
|
|
from shutil import copyfile
|
|
|
|
from special_case_helper import SpecialCaseHandler
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-09-09 08:52:27 +00:00
|
|
|
cmake_version_string = "3.15.0"
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
def _parse_commandline():
|
2018-12-21 11:13:38 +00:00
|
|
|
parser = ArgumentParser(description='Generate CMakeLists.txt files from .'
|
|
|
|
'pro files.')
|
2018-10-24 13:20:27 +00:00
|
|
|
parser.add_argument('--debug', dest='debug', action='store_true',
|
|
|
|
help='Turn on all debug output')
|
2018-12-21 11:13:38 +00:00
|
|
|
parser.add_argument('--debug-parser', dest='debug_parser',
|
|
|
|
action='store_true',
|
2018-10-24 13:20:27 +00:00
|
|
|
help='Print debug output from qmake parser.')
|
2018-12-21 11:13:38 +00:00
|
|
|
parser.add_argument('--debug-parse-result', dest='debug_parse_result',
|
|
|
|
action='store_true',
|
2018-10-24 13:20:27 +00:00
|
|
|
help='Dump the qmake parser result.')
|
2018-12-21 11:13:38 +00:00
|
|
|
parser.add_argument('--debug-parse-dictionary',
|
|
|
|
dest='debug_parse_dictionary', action='store_true',
|
2018-10-24 13:20:27 +00:00
|
|
|
help='Dump the qmake parser result as dictionary.')
|
2018-12-21 11:13:38 +00:00
|
|
|
parser.add_argument('--debug-pro-structure', dest='debug_pro_structure',
|
|
|
|
action='store_true',
|
2018-10-24 13:20:27 +00:00
|
|
|
help='Dump the structure of the qmake .pro-file.')
|
2018-12-21 11:13:38 +00:00
|
|
|
parser.add_argument('--debug-full-pro-structure',
|
|
|
|
dest='debug_full_pro_structure', action='store_true',
|
|
|
|
help='Dump the full structure of the qmake .pro-file '
|
|
|
|
'(with includes).')
|
2019-05-04 11:08:19 +00:00
|
|
|
parser.add_argument('--debug-special-case-preservation',
|
|
|
|
dest='debug_special_case_preservation', action='store_true',
|
|
|
|
help='Show all git commands and file copies.')
|
2019-05-07 09:27:33 +00:00
|
|
|
|
2019-05-09 08:59:13 +00:00
|
|
|
parser.add_argument('--is-example', action='store_true',
|
2019-05-07 09:27:33 +00:00
|
|
|
dest="is_example",
|
|
|
|
help='Treat the input .pro file as an example.')
|
2019-05-04 11:08:19 +00:00
|
|
|
parser.add_argument('-s', '--skip-special-case-preservation',
|
|
|
|
dest='skip_special_case_preservation', action='store_true',
|
|
|
|
help='Skips behavior to reapply '
|
|
|
|
'special case modifications (requires git in PATH)')
|
|
|
|
parser.add_argument('-k', '--keep-temporary-files',
|
|
|
|
dest='keep_temporary_files', action='store_true',
|
|
|
|
help='Don\'t automatically remove CMakeLists.gen.txt and other '
|
|
|
|
'intermediate files.')
|
2019-05-07 09:27:33 +00:00
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
parser.add_argument('files', metavar='<.pro/.pri file>', type=str,
|
|
|
|
nargs='+', help='The .pro/.pri file to process')
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
return parser.parse_args()
|
|
|
|
|
|
|
|
|
2019-09-09 08:52:27 +00:00
|
|
|
def is_top_level_repo_project(project_file_path: str = '') -> bool:
|
|
|
|
qmake_conf_path = find_qmake_conf(project_file_path)
|
|
|
|
qmake_conf_dir_path = os.path.dirname(qmake_conf_path)
|
|
|
|
project_dir_path = os.path.dirname(project_file_path)
|
|
|
|
if qmake_conf_dir_path == project_dir_path:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def is_top_level_repo_tests_project(project_file_path: str = '') -> bool:
|
|
|
|
qmake_conf_path = find_qmake_conf(project_file_path)
|
|
|
|
qmake_conf_dir_path = os.path.dirname(qmake_conf_path)
|
|
|
|
project_dir_path = os.path.dirname(project_file_path)
|
|
|
|
project_dir_name = os.path.basename(project_dir_path)
|
|
|
|
maybe_same_level_dir_path = os.path.join(project_dir_path, "..")
|
|
|
|
normalized_maybe_same_level_dir_path = os.path.normpath(maybe_same_level_dir_path)
|
|
|
|
if qmake_conf_dir_path == normalized_maybe_same_level_dir_path and project_dir_name == 'tests':
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def is_top_level_repo_examples_project(project_file_path: str = '') -> bool:
|
|
|
|
qmake_conf_path = find_qmake_conf(project_file_path)
|
|
|
|
qmake_conf_dir_path = os.path.dirname(qmake_conf_path)
|
|
|
|
project_dir_path = os.path.dirname(project_file_path)
|
|
|
|
project_dir_name = os.path.basename(project_dir_path)
|
|
|
|
maybe_same_level_dir_path = os.path.join(project_dir_path, "..")
|
|
|
|
normalized_maybe_same_level_dir_path = os.path.normpath(maybe_same_level_dir_path)
|
|
|
|
if qmake_conf_dir_path == normalized_maybe_same_level_dir_path \
|
|
|
|
and project_dir_name == 'examples':
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2019-09-09 12:16:26 +00:00
|
|
|
def is_example_project(project_file_path: str = '') -> bool:
|
|
|
|
qmake_conf_path = find_qmake_conf(project_file_path)
|
|
|
|
qmake_conf_dir_path = os.path.dirname(qmake_conf_path)
|
|
|
|
|
|
|
|
project_relative_path = os.path.relpath(project_file_path, qmake_conf_dir_path)
|
|
|
|
# If the project file is found in a subdir called 'examples'
|
|
|
|
# relative to the repo source dir, then it must be an example.
|
|
|
|
if project_relative_path.startswith('examples'):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
Ugly fix for handling QT_SOURCE_TREE
QT_SOURCE_TREE is a variable that is set in qtbase/.qmake.conf.
In qtbase, it's used throughout various
projects to find cpp sources when building standalone tests (among
other things).
Everything works fine with qmake, because even if qmake is invoked
on the tests subfolder, qmake searches up the source directory tree
until it finds a .qmake.conf file, and uses that.
When building qttools with qmake, the qdoc project expects
to have a QT_SOURCE_TREE value, but it's not actually set in the
qttools/.qmake.conf file, so the generated include paths that use
that value are incorrect. Curiously the build still succeeds.
Now in CMake land we replaced QT_SOURCE_TREE with
CMAKE_SOURCE_DIR, but that does not work properly when doing a
standalone tests build, because the project in that case is the
tests one, and not the qtbase one, so configuration fails in a
developer build when trying to configure some private tests.
So far I've found that only qtbase actively uses this value.
A temporary fix is to save the qtbase source directory into a
QT_SOURCE_TREE variable inside the generated
BuildInternalsExtra.cmake file.
The pro2cmake script is changed to handle presence of QT_SOURCE_TREE
in a qrc file path. This is handled by finding the location of a
.qmake.conf file starting from the project file absolute path.
This is needed to stop the script from crashing when handling
the mimedatabase test projects for example.
The change also regenerates the relevant failing test projects, and
thus standalone tests (when doing developer builds aka private_tests
enabled) now configure and build successfully.
Change-Id: I15adc6f4ab6e3056c43ed850196204e2229c4d98
Reviewed-by: Simon Hausmann <simon.hausmann@qt.io>
2019-07-26 16:59:53 +00:00
|
|
|
def find_qmake_conf(project_file_path: str = '') -> typing.Optional[str]:
|
|
|
|
if not os.path.isabs(project_file_path):
|
|
|
|
print('Warning: could not find .qmake.conf file, given path is not an absolute path: {}'
|
|
|
|
.format(project_file_path))
|
|
|
|
return None
|
|
|
|
|
|
|
|
cwd = os.path.dirname(project_file_path)
|
|
|
|
file_name = '.qmake.conf'
|
|
|
|
|
|
|
|
while os.path.isdir(cwd):
|
|
|
|
maybe_file = os.path.join(cwd, file_name)
|
|
|
|
if os.path.isfile(maybe_file):
|
|
|
|
return maybe_file
|
|
|
|
else:
|
|
|
|
cwd = os.path.dirname(cwd)
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2019-08-12 14:31:30 +00:00
|
|
|
def process_qrc_file(target: str, filepath: str, base_dir: str = '', project_file_path: str = '', skip_qtquick_compiler: bool = False,
|
2019-08-21 13:47:38 +00:00
|
|
|
retain_qtquick_compiler: bool = False, is_example: bool = False) -> str:
|
2019-03-07 10:06:23 +00:00
|
|
|
assert(target)
|
Ugly fix for handling QT_SOURCE_TREE
QT_SOURCE_TREE is a variable that is set in qtbase/.qmake.conf.
In qtbase, it's used throughout various
projects to find cpp sources when building standalone tests (among
other things).
Everything works fine with qmake, because even if qmake is invoked
on the tests subfolder, qmake searches up the source directory tree
until it finds a .qmake.conf file, and uses that.
When building qttools with qmake, the qdoc project expects
to have a QT_SOURCE_TREE value, but it's not actually set in the
qttools/.qmake.conf file, so the generated include paths that use
that value are incorrect. Curiously the build still succeeds.
Now in CMake land we replaced QT_SOURCE_TREE with
CMAKE_SOURCE_DIR, but that does not work properly when doing a
standalone tests build, because the project in that case is the
tests one, and not the qtbase one, so configuration fails in a
developer build when trying to configure some private tests.
So far I've found that only qtbase actively uses this value.
A temporary fix is to save the qtbase source directory into a
QT_SOURCE_TREE variable inside the generated
BuildInternalsExtra.cmake file.
The pro2cmake script is changed to handle presence of QT_SOURCE_TREE
in a qrc file path. This is handled by finding the location of a
.qmake.conf file starting from the project file absolute path.
This is needed to stop the script from crashing when handling
the mimedatabase test projects for example.
The change also regenerates the relevant failing test projects, and
thus standalone tests (when doing developer builds aka private_tests
enabled) now configure and build successfully.
Change-Id: I15adc6f4ab6e3056c43ed850196204e2229c4d98
Reviewed-by: Simon Hausmann <simon.hausmann@qt.io>
2019-07-26 16:59:53 +00:00
|
|
|
|
|
|
|
# Hack to handle QT_SOURCE_TREE. Assume currently that it's the same
|
|
|
|
# as the qtbase source path.
|
|
|
|
qt_source_tree_literal = '${QT_SOURCE_TREE}'
|
|
|
|
if qt_source_tree_literal in filepath:
|
|
|
|
qmake_conf = find_qmake_conf(project_file_path)
|
|
|
|
|
|
|
|
if qmake_conf:
|
|
|
|
qt_source_tree = os.path.dirname(qmake_conf)
|
|
|
|
filepath = filepath.replace(qt_source_tree_literal, qt_source_tree)
|
|
|
|
else:
|
|
|
|
print('Warning, could not determine QT_SOURCE_TREE location while trying to find: {}'
|
|
|
|
.format(filepath))
|
|
|
|
|
|
|
|
|
2019-03-07 10:06:23 +00:00
|
|
|
resource_name = os.path.splitext(os.path.basename(filepath))[0]
|
2019-08-06 12:51:04 +00:00
|
|
|
dir_name = os.path.dirname(filepath)
|
|
|
|
base_dir = os.path.join('' if base_dir == '.' else base_dir, dir_name)
|
2019-03-07 10:06:23 +00:00
|
|
|
|
2019-08-06 12:51:04 +00:00
|
|
|
# Small not very thorough check to see if this a shared qrc resource
|
|
|
|
# pattern is mostly used by the tests.
|
|
|
|
is_parent_path = dir_name.startswith('..')
|
Ugly fix for handling QT_SOURCE_TREE
QT_SOURCE_TREE is a variable that is set in qtbase/.qmake.conf.
In qtbase, it's used throughout various
projects to find cpp sources when building standalone tests (among
other things).
Everything works fine with qmake, because even if qmake is invoked
on the tests subfolder, qmake searches up the source directory tree
until it finds a .qmake.conf file, and uses that.
When building qttools with qmake, the qdoc project expects
to have a QT_SOURCE_TREE value, but it's not actually set in the
qttools/.qmake.conf file, so the generated include paths that use
that value are incorrect. Curiously the build still succeeds.
Now in CMake land we replaced QT_SOURCE_TREE with
CMAKE_SOURCE_DIR, but that does not work properly when doing a
standalone tests build, because the project in that case is the
tests one, and not the qtbase one, so configuration fails in a
developer build when trying to configure some private tests.
So far I've found that only qtbase actively uses this value.
A temporary fix is to save the qtbase source directory into a
QT_SOURCE_TREE variable inside the generated
BuildInternalsExtra.cmake file.
The pro2cmake script is changed to handle presence of QT_SOURCE_TREE
in a qrc file path. This is handled by finding the location of a
.qmake.conf file starting from the project file absolute path.
This is needed to stop the script from crashing when handling
the mimedatabase test projects for example.
The change also regenerates the relevant failing test projects, and
thus standalone tests (when doing developer builds aka private_tests
enabled) now configure and build successfully.
Change-Id: I15adc6f4ab6e3056c43ed850196204e2229c4d98
Reviewed-by: Simon Hausmann <simon.hausmann@qt.io>
2019-07-26 16:59:53 +00:00
|
|
|
if not os.path.isfile(filepath):
|
|
|
|
raise RuntimeError('Invalid file path given to process_qrc_file: {}'.format(filepath))
|
|
|
|
|
2019-03-07 10:06:23 +00:00
|
|
|
tree = ET.parse(filepath)
|
|
|
|
root = tree.getroot()
|
|
|
|
assert(root.tag == 'RCC')
|
|
|
|
|
|
|
|
output = ''
|
|
|
|
|
|
|
|
resource_count = 0
|
|
|
|
for resource in root:
|
|
|
|
assert(resource.tag == 'qresource')
|
|
|
|
lang = resource.get('lang', '')
|
2019-08-09 10:47:55 +00:00
|
|
|
prefix = resource.get('prefix', '/')
|
2019-08-09 15:41:16 +00:00
|
|
|
if not prefix.startswith('/'):
|
|
|
|
prefix = '/' + prefix
|
2019-03-07 10:06:23 +00:00
|
|
|
|
|
|
|
full_resource_name = resource_name + (str(resource_count) if resource_count > 0 else '')
|
|
|
|
|
2019-04-04 08:38:13 +00:00
|
|
|
files: typing.Dict[str, str] = {}
|
2019-03-07 10:06:23 +00:00
|
|
|
for file in resource:
|
|
|
|
path = file.text
|
|
|
|
assert path
|
|
|
|
|
|
|
|
# Get alias:
|
|
|
|
alias = file.get('alias', '')
|
2019-08-06 12:51:04 +00:00
|
|
|
# In cases where examples use shared resources, we set the alias
|
|
|
|
# too the same name of the file, or the applications won't be
|
|
|
|
# be able to locate the resource
|
|
|
|
if not alias and is_parent_path:
|
|
|
|
alias = path
|
2019-03-07 10:06:23 +00:00
|
|
|
files[path] = alias
|
|
|
|
|
2019-08-21 13:47:38 +00:00
|
|
|
output += write_add_qt_resource_call(target, full_resource_name, prefix, base_dir, lang, files, skip_qtquick_compiler, retain_qtquick_compiler, is_example)
|
2019-03-07 10:06:23 +00:00
|
|
|
resource_count += 1
|
|
|
|
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
2019-08-12 14:31:30 +00:00
|
|
|
def write_add_qt_resource_call(target: str, resource_name: str, prefix: typing.Optional[str], base_dir: typing.Optional[str],
|
2019-08-21 13:47:38 +00:00
|
|
|
lang: typing.Optional[str], files: typing.Dict[str, str], skip_qtquick_compiler: bool, retain_qtquick_compiler: bool, is_example :bool) -> str:
|
2019-08-09 12:15:42 +00:00
|
|
|
output = ''
|
|
|
|
|
|
|
|
sorted_files = sorted(files.keys())
|
|
|
|
|
|
|
|
assert(sorted_files)
|
|
|
|
|
|
|
|
for source in sorted_files:
|
|
|
|
alias = files[source]
|
|
|
|
if alias:
|
|
|
|
full_source = os.path.join(base_dir, source)
|
|
|
|
output += 'set_source_files_properties("{}"\n' \
|
2019-08-15 08:42:47 +00:00
|
|
|
' PROPERTIES QT_RESOURCE_ALIAS "{}"\n)\n'.format(full_source, alias)
|
2019-08-09 12:15:42 +00:00
|
|
|
|
2019-08-12 12:05:05 +00:00
|
|
|
# Quote file paths in case there are spaces.
|
|
|
|
sorted_files = ['"{}"'.format(f) for f in sorted_files]
|
|
|
|
|
2019-08-12 14:31:30 +00:00
|
|
|
file_list = '\n '.join(sorted_files)
|
|
|
|
output += 'set({}_resource_files\n {}\n)\n\n'.format(resource_name, file_list)
|
|
|
|
file_list = "${{{}_resource_files}}".format(resource_name)
|
2019-08-09 12:15:42 +00:00
|
|
|
if skip_qtquick_compiler:
|
2019-08-12 14:31:30 +00:00
|
|
|
output += 'set_source_files_properties(${{{}_resource_files}} PROPERTIES QT_SKIP_QUICKCOMPILER 1)\n\n'.format(resource_name)
|
|
|
|
|
|
|
|
if retain_qtquick_compiler:
|
|
|
|
output += 'set_source_files_properties(${{{}_resource_files}} PROPERTIES QT_RETAIN_QUICKCOMPILER 1)\n\n'.format(resource_name)
|
2019-08-09 12:15:42 +00:00
|
|
|
|
|
|
|
params = ''
|
|
|
|
if lang:
|
|
|
|
params += ' LANG\n "{}"\n'.format(lang)
|
|
|
|
params += ' PREFIX\n "{}"\n'.format(prefix)
|
|
|
|
if base_dir:
|
|
|
|
params += ' BASE\n "{}"\n'.format(base_dir)
|
2019-08-21 13:47:38 +00:00
|
|
|
add_resource_command = ''
|
|
|
|
if is_example:
|
2019-08-23 09:39:30 +00:00
|
|
|
add_resource_command = 'QT6_ADD_RESOURCES'
|
2019-08-21 13:47:38 +00:00
|
|
|
else:
|
|
|
|
add_resource_command = 'add_qt_resource'
|
|
|
|
output += '{}({} "{}"\n{} FILES\n {}\n)\n'.format(add_resource_command,
|
|
|
|
target, resource_name, params, file_list)
|
2019-08-09 12:15:42 +00:00
|
|
|
|
|
|
|
return output
|
|
|
|
|
|
|
|
|
2019-02-13 11:24:14 +00:00
|
|
|
def fixup_linecontinuation(contents: str) -> str:
|
2019-05-23 13:57:59 +00:00
|
|
|
# Remove all line continuations, aka a backslash followed by
|
|
|
|
# a newline character with an arbitrary amount of whitespace
|
|
|
|
# between the backslash and the newline.
|
|
|
|
# This greatly simplifies the qmake parsing grammar.
|
|
|
|
contents = re.sub(r'([^\t ])\\[ \t]*\n', '\\1 ', contents)
|
|
|
|
contents = re.sub(r'\\[ \t]*\n', '', contents)
|
2019-02-13 11:24:14 +00:00
|
|
|
return contents
|
|
|
|
|
|
|
|
|
2019-05-17 16:31:19 +00:00
|
|
|
def fixup_comments(contents: str) -> str:
|
|
|
|
# Get rid of completely commented out lines.
|
|
|
|
# So any line which starts with a '#' char and ends with a new line
|
|
|
|
# will be replaced by a single new line.
|
|
|
|
#
|
|
|
|
# This is needed because qmake syntax is weird. In a multi line
|
|
|
|
# assignment (separated by backslashes and newlines aka
|
|
|
|
# # \\\n ), if any of the lines are completely commented out, in
|
|
|
|
# principle the assignment should fail.
|
|
|
|
#
|
|
|
|
# It should fail because you would have a new line separating
|
|
|
|
# the previous value from the next value, and the next value would
|
|
|
|
# not be interpreted as a value, but as a new token / operation.
|
|
|
|
# qmake is lenient though, and accepts that, so we need to take
|
|
|
|
# care of it as well, as if the commented line didn't exist in the
|
|
|
|
# first place.
|
|
|
|
|
|
|
|
contents = re.sub(r'\n#[^\n]*?\n', '\n', contents, re.DOTALL)
|
|
|
|
return contents
|
|
|
|
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
def spaces(indent: int) -> str:
|
|
|
|
return ' ' * indent
|
|
|
|
|
|
|
|
|
2019-04-16 14:32:08 +00:00
|
|
|
def trim_leading_dot(file: str) -> str:
|
|
|
|
while file.startswith('./'):
|
|
|
|
file = file[2:]
|
|
|
|
return file
|
|
|
|
|
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
def map_to_file(f: str, scope: Scope, *, is_include: bool = False) -> str:
|
|
|
|
assert('$$' not in f)
|
|
|
|
|
|
|
|
if f.startswith('${'): # Some cmake variable is prepended
|
|
|
|
return f
|
|
|
|
|
|
|
|
base_dir = scope.currentdir if is_include else scope.basedir
|
|
|
|
f = os.path.join(base_dir, f)
|
|
|
|
|
2019-04-16 14:32:08 +00:00
|
|
|
return trim_leading_dot(f)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
def handle_vpath(source: str, base_dir: str, vpath: typing.List[str]) -> str:
|
|
|
|
assert('$$' not in source)
|
|
|
|
|
|
|
|
if not source:
|
2018-12-21 11:13:38 +00:00
|
|
|
return ''
|
2019-03-28 12:54:56 +00:00
|
|
|
|
|
|
|
if not vpath:
|
|
|
|
return source
|
2018-12-20 09:41:56 +00:00
|
|
|
|
|
|
|
if os.path.exists(os.path.join(base_dir, source)):
|
|
|
|
return source
|
|
|
|
|
2019-03-28 07:29:53 +00:00
|
|
|
variable_pattern = re.compile(r'\$\{[A-Za-z0-9_]+\}')
|
|
|
|
match = re.match(variable_pattern, source)
|
|
|
|
if match:
|
|
|
|
# a complex, variable based path, skipping validation
|
|
|
|
# or resolving
|
|
|
|
return source
|
|
|
|
|
2018-12-20 09:41:56 +00:00
|
|
|
for v in vpath:
|
|
|
|
fullpath = os.path.join(v, source)
|
|
|
|
if os.path.exists(fullpath):
|
2019-04-16 14:32:08 +00:00
|
|
|
return trim_leading_dot(os.path.relpath(fullpath, base_dir))
|
2018-12-20 09:41:56 +00:00
|
|
|
|
|
|
|
print(' XXXX: Source {}: Not found.'.format(source))
|
|
|
|
return '{}-NOTFOUND'.format(source)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2019-08-08 11:21:23 +00:00
|
|
|
def handle_function_value(group: pp.ParseResults):
|
|
|
|
function_name = group[0]
|
|
|
|
function_args = group[1]
|
|
|
|
if function_name == 'qtLibraryTarget':
|
|
|
|
if len(function_args) > 1:
|
|
|
|
raise RuntimeError('Don\'t know what to with more than one function argument for $$qtLibraryTarget().')
|
|
|
|
return str(function_args[0])
|
|
|
|
|
|
|
|
if function_name == 'quote':
|
|
|
|
# Do nothing, just return a string result
|
|
|
|
return str(group)
|
|
|
|
|
2019-08-15 11:09:55 +00:00
|
|
|
# Return the whole expression as a string.
|
2019-08-19 13:06:54 +00:00
|
|
|
if function_name in ['join', 'cmakeRelativePath', 'shell_quote', 'shadowed', 'cmakeTargetPath',
|
2019-08-28 10:57:12 +00:00
|
|
|
'shell_path', 'cmakeProcessLibs', 'cmakeTargetPaths',
|
|
|
|
'cmakePortablePaths', 'escape_expand']:
|
2019-08-15 11:09:55 +00:00
|
|
|
return 'join({})'.format(''.join(function_args))
|
|
|
|
|
2019-08-08 11:21:23 +00:00
|
|
|
raise RuntimeError('No logic to handle function "{}", please add one in handle_function_value().'.format(function_name))
|
|
|
|
|
2018-12-20 15:15:10 +00:00
|
|
|
class Operation:
|
2019-05-17 16:09:21 +00:00
|
|
|
def __init__(self, value: typing.Union[typing.List[str], str]):
|
|
|
|
if isinstance(value, list):
|
|
|
|
self._value = value
|
|
|
|
else:
|
|
|
|
self._value = [str(value), ]
|
2018-12-20 15:15:10 +00:00
|
|
|
|
2019-05-17 13:05:49 +00:00
|
|
|
def process(self, key: str, input: typing.List[str],
|
|
|
|
transformer: typing.Callable[[typing.List[str]], typing.List[str]]) -> typing.List[str]:
|
2018-12-20 15:15:10 +00:00
|
|
|
assert(False)
|
|
|
|
|
2019-01-17 16:10:17 +00:00
|
|
|
def __repr__(self):
|
2018-12-20 15:15:10 +00:00
|
|
|
assert(False)
|
|
|
|
|
2019-01-22 13:16:41 +00:00
|
|
|
def _dump(self):
|
|
|
|
if not self._value:
|
|
|
|
return '<NOTHING>'
|
|
|
|
|
|
|
|
if not isinstance(self._value, list):
|
|
|
|
return '<NOT A LIST>'
|
|
|
|
|
|
|
|
result = []
|
|
|
|
for i in self._value:
|
|
|
|
if not i:
|
|
|
|
result.append('<NONE>')
|
|
|
|
else:
|
|
|
|
result.append(str(i))
|
|
|
|
return '"' + '", "'.join(result) + '"'
|
2018-12-20 15:15:10 +00:00
|
|
|
|
2019-01-25 14:41:02 +00:00
|
|
|
|
2018-12-20 15:15:10 +00:00
|
|
|
class AddOperation(Operation):
|
2019-05-17 13:05:49 +00:00
|
|
|
def process(self, key: str, input: typing.List[str],
|
|
|
|
transformer: typing.Callable[[typing.List[str]], typing.List[str]]) -> typing.List[str]:
|
2019-03-28 12:54:56 +00:00
|
|
|
return input + transformer(self._value)
|
2018-12-20 15:15:10 +00:00
|
|
|
|
2019-01-17 16:10:17 +00:00
|
|
|
def __repr__(self):
|
2019-01-22 13:16:41 +00:00
|
|
|
return '+({})'.format(self._dump())
|
2018-12-20 15:15:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
class UniqueAddOperation(Operation):
|
2019-05-17 13:05:49 +00:00
|
|
|
def process(self, key: str, input: typing.List[str],
|
|
|
|
transformer: typing.Callable[[typing.List[str]], typing.List[str]]) -> typing.List[str]:
|
2018-12-20 15:15:10 +00:00
|
|
|
result = input
|
2019-03-28 12:54:56 +00:00
|
|
|
for v in transformer(self._value):
|
2018-12-21 11:13:38 +00:00
|
|
|
if v not in result:
|
2019-03-28 12:54:56 +00:00
|
|
|
result.append(v)
|
2018-12-20 15:15:10 +00:00
|
|
|
return result
|
|
|
|
|
2019-01-17 16:10:17 +00:00
|
|
|
def __repr__(self):
|
2019-01-22 13:16:41 +00:00
|
|
|
return '*({})'.format(self._dump())
|
2018-12-20 15:15:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
class SetOperation(Operation):
|
2019-05-17 13:05:49 +00:00
|
|
|
def process(self, key: str, input: typing.List[str],
|
|
|
|
transformer: typing.Callable[[typing.List[str]], typing.List[str]]) -> typing.List[str]:
|
2019-04-11 15:06:01 +00:00
|
|
|
values = [] # typing.List[str]
|
|
|
|
for v in self._value:
|
2019-05-17 13:05:49 +00:00
|
|
|
if v != '$${}'.format(key):
|
2019-04-11 15:06:01 +00:00
|
|
|
values.append(v)
|
|
|
|
else:
|
|
|
|
values += input
|
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
if transformer:
|
2019-04-11 15:06:01 +00:00
|
|
|
return list(transformer(values))
|
2019-03-28 12:54:56 +00:00
|
|
|
else:
|
2019-04-11 15:06:01 +00:00
|
|
|
return values
|
2018-12-20 15:15:10 +00:00
|
|
|
|
2019-01-17 16:10:17 +00:00
|
|
|
def __repr__(self):
|
2019-01-22 13:16:41 +00:00
|
|
|
return '=({})'.format(self._dump())
|
2018-12-20 15:15:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
class RemoveOperation(Operation):
|
|
|
|
def __init__(self, value):
|
|
|
|
super().__init__(value)
|
|
|
|
|
2019-05-17 13:05:49 +00:00
|
|
|
def process(self, key: str, input: typing.List[str],
|
|
|
|
transformer: typing.Callable[[typing.List[str]], typing.List[str]]) -> typing.List[str]:
|
2018-12-20 15:15:10 +00:00
|
|
|
input_set = set(input)
|
2019-03-26 13:12:09 +00:00
|
|
|
value_set = set(self._value)
|
2018-12-20 15:15:10 +00:00
|
|
|
result = []
|
2019-03-26 13:12:09 +00:00
|
|
|
|
|
|
|
# Add everything that is not going to get removed:
|
|
|
|
for v in input:
|
|
|
|
if v not in value_set:
|
2019-03-26 09:17:58 +00:00
|
|
|
result += [v,]
|
2019-03-26 13:12:09 +00:00
|
|
|
|
|
|
|
# Add everything else with removal marker:
|
2019-03-28 12:54:56 +00:00
|
|
|
for v in transformer(self._value):
|
2019-03-26 13:12:09 +00:00
|
|
|
if v not in input_set:
|
2018-12-21 11:13:38 +00:00
|
|
|
result += ['-{}'.format(v), ]
|
2019-03-26 13:12:09 +00:00
|
|
|
|
2018-12-20 15:15:10 +00:00
|
|
|
return result
|
|
|
|
|
2019-01-17 16:10:17 +00:00
|
|
|
def __repr__(self):
|
2019-01-22 13:16:41 +00:00
|
|
|
return '-({})'.format(self._dump())
|
2018-12-20 15:15:10 +00:00
|
|
|
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
class Scope(object):
|
2019-01-31 11:17:27 +00:00
|
|
|
|
|
|
|
SCOPE_ID: int = 1
|
|
|
|
|
2019-01-24 15:01:17 +00:00
|
|
|
def __init__(self, *,
|
|
|
|
parent_scope: typing.Optional[Scope],
|
2018-12-21 11:13:38 +00:00
|
|
|
file: typing.Optional[str] = None, condition: str = '',
|
2019-01-24 15:01:17 +00:00
|
|
|
base_dir: str = '',
|
2019-08-01 14:01:22 +00:00
|
|
|
operations: typing.Union[
|
|
|
|
typing.Mapping[str, typing.List[Operation]], None] = None) -> None:
|
|
|
|
if operations is None:
|
|
|
|
operations = {
|
|
|
|
'QT_SOURCE_TREE': [SetOperation(['${QT_SOURCE_TREE}'])],
|
|
|
|
'QT_BUILD_TREE': [SetOperation(['${PROJECT_BUILD_DIR}'])],
|
|
|
|
}
|
|
|
|
|
2019-07-04 12:48:32 +00:00
|
|
|
self._operations = copy.deepcopy(operations)
|
2018-12-20 15:15:10 +00:00
|
|
|
if parent_scope:
|
|
|
|
parent_scope._add_child(self)
|
|
|
|
else:
|
2018-12-21 11:13:38 +00:00
|
|
|
self._parent = None # type: typing.Optional[Scope]
|
2019-08-01 14:39:43 +00:00
|
|
|
# Only add the "QT = core gui" Set operation once, on the
|
|
|
|
# very top-level .pro scope, aka it's basedir is empty.
|
|
|
|
if not base_dir:
|
|
|
|
self._operations['QT'] = [SetOperation(['core', 'gui'])]
|
2018-12-20 15:15:10 +00:00
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
self._basedir = base_dir
|
|
|
|
if file:
|
|
|
|
self._currentdir = os.path.dirname(file)
|
|
|
|
if not self._currentdir:
|
|
|
|
self._currentdir = '.'
|
|
|
|
if not self._basedir:
|
|
|
|
self._basedir = self._currentdir
|
|
|
|
|
2019-01-31 11:17:27 +00:00
|
|
|
self._scope_id = Scope.SCOPE_ID
|
|
|
|
Scope.SCOPE_ID += 1
|
2018-10-24 13:20:27 +00:00
|
|
|
self._file = file
|
Ugly fix for handling QT_SOURCE_TREE
QT_SOURCE_TREE is a variable that is set in qtbase/.qmake.conf.
In qtbase, it's used throughout various
projects to find cpp sources when building standalone tests (among
other things).
Everything works fine with qmake, because even if qmake is invoked
on the tests subfolder, qmake searches up the source directory tree
until it finds a .qmake.conf file, and uses that.
When building qttools with qmake, the qdoc project expects
to have a QT_SOURCE_TREE value, but it's not actually set in the
qttools/.qmake.conf file, so the generated include paths that use
that value are incorrect. Curiously the build still succeeds.
Now in CMake land we replaced QT_SOURCE_TREE with
CMAKE_SOURCE_DIR, but that does not work properly when doing a
standalone tests build, because the project in that case is the
tests one, and not the qtbase one, so configuration fails in a
developer build when trying to configure some private tests.
So far I've found that only qtbase actively uses this value.
A temporary fix is to save the qtbase source directory into a
QT_SOURCE_TREE variable inside the generated
BuildInternalsExtra.cmake file.
The pro2cmake script is changed to handle presence of QT_SOURCE_TREE
in a qrc file path. This is handled by finding the location of a
.qmake.conf file starting from the project file absolute path.
This is needed to stop the script from crashing when handling
the mimedatabase test projects for example.
The change also regenerates the relevant failing test projects, and
thus standalone tests (when doing developer builds aka private_tests
enabled) now configure and build successfully.
Change-Id: I15adc6f4ab6e3056c43ed850196204e2229c4d98
Reviewed-by: Simon Hausmann <simon.hausmann@qt.io>
2019-07-26 16:59:53 +00:00
|
|
|
self._file_absolute_path = os.path.abspath(file)
|
2018-10-24 13:20:27 +00:00
|
|
|
self._condition = map_condition(condition)
|
2018-12-21 11:13:38 +00:00
|
|
|
self._children = [] # type: typing.List[Scope]
|
2019-03-28 12:54:56 +00:00
|
|
|
self._included_children = [] # type: typing.List[Scope]
|
2019-01-18 11:43:11 +00:00
|
|
|
self._visited_keys = set() # type: typing.Set[str]
|
2019-01-22 13:20:47 +00:00
|
|
|
self._total_condition = None # type: typing.Optional[str]
|
2019-01-18 11:43:11 +00:00
|
|
|
|
2019-01-24 14:43:13 +00:00
|
|
|
def __repr__(self):
|
2019-04-08 09:28:49 +00:00
|
|
|
return '{}:{}:{}:{}:{}'.format(self._scope_id,
|
|
|
|
self._basedir, self._currentdir,
|
2019-03-28 12:54:56 +00:00
|
|
|
self._file, self._condition or '<TRUE>')
|
2019-01-24 14:43:13 +00:00
|
|
|
|
2019-01-18 11:43:11 +00:00
|
|
|
def reset_visited_keys(self):
|
|
|
|
self._visited_keys = set()
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
def merge(self, other: 'Scope') -> None:
|
2019-01-31 09:18:14 +00:00
|
|
|
assert self != other
|
2019-03-28 12:54:56 +00:00
|
|
|
self._included_children.append(other)
|
2019-01-31 09:18:14 +00:00
|
|
|
|
2019-01-31 13:23:57 +00:00
|
|
|
@property
|
|
|
|
def scope_debug(self) -> bool:
|
2019-03-28 12:54:56 +00:00
|
|
|
merge = self.get_string('PRO2CMAKE_SCOPE_DEBUG').lower()
|
2019-05-08 14:45:25 +00:00
|
|
|
return merge == '1' or merge == 'on' or merge == 'yes' or merge == 'true'
|
2019-01-31 13:23:57 +00:00
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2019-01-24 15:01:17 +00:00
|
|
|
def parent(self) -> typing.Optional[Scope]:
|
|
|
|
return self._parent
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2018-10-24 13:20:27 +00:00
|
|
|
def basedir(self) -> str:
|
|
|
|
return self._basedir
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2018-10-24 13:20:27 +00:00
|
|
|
def currentdir(self) -> str:
|
|
|
|
return self._currentdir
|
|
|
|
|
2019-01-31 13:26:06 +00:00
|
|
|
def can_merge_condition(self):
|
|
|
|
if self._condition == 'else':
|
|
|
|
return False
|
|
|
|
if self._operations:
|
|
|
|
return False
|
|
|
|
|
|
|
|
child_count = len(self._children)
|
|
|
|
if child_count == 0 or child_count > 2:
|
|
|
|
return False
|
|
|
|
assert child_count != 1 or self._children[0]._condition != 'else'
|
|
|
|
return child_count == 1 or self._children[1]._condition == 'else'
|
|
|
|
|
|
|
|
def settle_condition(self):
|
2019-03-18 18:12:52 +00:00
|
|
|
new_children: typing.List[Scope] = []
|
2019-01-31 13:26:06 +00:00
|
|
|
for c in self._children:
|
|
|
|
c.settle_condition()
|
|
|
|
|
|
|
|
if c.can_merge_condition():
|
|
|
|
child = c._children[0]
|
|
|
|
child._condition = '({}) AND ({})'.format(c._condition, child._condition)
|
|
|
|
new_children += c._children
|
|
|
|
else:
|
|
|
|
new_children.append(c)
|
|
|
|
self._children = new_children
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
@staticmethod
|
2018-12-20 15:15:10 +00:00
|
|
|
def FromDict(parent_scope: typing.Optional['Scope'],
|
2019-01-31 13:25:32 +00:00
|
|
|
file: str, statements, cond: str = '', base_dir: str = '') -> Scope:
|
2019-01-24 15:01:17 +00:00
|
|
|
scope = Scope(parent_scope=parent_scope, file=file, condition=cond, base_dir=base_dir)
|
2018-10-24 13:20:27 +00:00
|
|
|
for statement in statements:
|
|
|
|
if isinstance(statement, list): # Handle skipped parts...
|
|
|
|
assert not statement
|
|
|
|
continue
|
|
|
|
|
|
|
|
operation = statement.get('operation', None)
|
|
|
|
if operation:
|
|
|
|
key = statement.get('key', '')
|
|
|
|
value = statement.get('value', [])
|
|
|
|
assert key != ''
|
|
|
|
|
|
|
|
if operation == '=':
|
2018-12-20 15:15:10 +00:00
|
|
|
scope._append_operation(key, SetOperation(value))
|
2018-10-24 13:20:27 +00:00
|
|
|
elif operation == '-=':
|
2018-12-20 15:15:10 +00:00
|
|
|
scope._append_operation(key, RemoveOperation(value))
|
|
|
|
elif operation == '+=':
|
|
|
|
scope._append_operation(key, AddOperation(value))
|
|
|
|
elif operation == '*=':
|
|
|
|
scope._append_operation(key, UniqueAddOperation(value))
|
2018-10-24 13:20:27 +00:00
|
|
|
else:
|
2019-01-24 14:43:13 +00:00
|
|
|
print('Unexpected operation "{}" in scope "{}".'
|
|
|
|
.format(operation, scope))
|
2018-10-24 13:20:27 +00:00
|
|
|
assert(False)
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
condition = statement.get('condition', None)
|
|
|
|
if condition:
|
2018-12-21 11:13:38 +00:00
|
|
|
Scope.FromDict(scope, file,
|
|
|
|
statement.get('statements'), condition,
|
2019-01-29 09:18:21 +00:00
|
|
|
scope.basedir)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
else_statements = statement.get('else_statements')
|
|
|
|
if else_statements:
|
2018-12-21 11:13:38 +00:00
|
|
|
Scope.FromDict(scope, file, else_statements,
|
2019-01-31 13:26:06 +00:00
|
|
|
'else', scope.basedir)
|
2018-10-24 13:20:27 +00:00
|
|
|
continue
|
|
|
|
|
2018-12-20 15:15:10 +00:00
|
|
|
loaded = statement.get('loaded')
|
2018-10-24 13:20:27 +00:00
|
|
|
if loaded:
|
2018-12-20 15:15:10 +00:00
|
|
|
scope._append_operation('_LOADED', UniqueAddOperation(loaded))
|
2018-10-24 13:20:27 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
option = statement.get('option', None)
|
|
|
|
if option:
|
2018-12-20 15:15:10 +00:00
|
|
|
scope._append_operation('_OPTION', UniqueAddOperation(option))
|
2018-10-24 13:20:27 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
included = statement.get('included', None)
|
|
|
|
if included:
|
2018-12-21 11:13:38 +00:00
|
|
|
scope._append_operation('_INCLUDED',
|
2019-03-28 12:54:56 +00:00
|
|
|
UniqueAddOperation(included))
|
2018-10-24 13:20:27 +00:00
|
|
|
continue
|
|
|
|
|
2019-01-31 13:26:06 +00:00
|
|
|
scope.settle_condition()
|
|
|
|
|
2019-01-31 13:23:57 +00:00
|
|
|
if scope.scope_debug:
|
|
|
|
print('..... [SCOPE_DEBUG]: Created scope {}:'.format(scope))
|
|
|
|
scope.dump(indent=1)
|
|
|
|
print('..... [SCOPE_DEBUG]: <<END OF SCOPE>>')
|
2018-10-24 13:20:27 +00:00
|
|
|
return scope
|
|
|
|
|
2018-12-20 15:15:10 +00:00
|
|
|
def _append_operation(self, key: str, op: Operation) -> None:
|
|
|
|
if key in self._operations:
|
|
|
|
self._operations[key].append(op)
|
|
|
|
else:
|
2018-12-21 11:13:38 +00:00
|
|
|
self._operations[key] = [op, ]
|
2018-12-20 15:15:10 +00:00
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2018-10-24 13:20:27 +00:00
|
|
|
def file(self) -> str:
|
|
|
|
return self._file or ''
|
|
|
|
|
Ugly fix for handling QT_SOURCE_TREE
QT_SOURCE_TREE is a variable that is set in qtbase/.qmake.conf.
In qtbase, it's used throughout various
projects to find cpp sources when building standalone tests (among
other things).
Everything works fine with qmake, because even if qmake is invoked
on the tests subfolder, qmake searches up the source directory tree
until it finds a .qmake.conf file, and uses that.
When building qttools with qmake, the qdoc project expects
to have a QT_SOURCE_TREE value, but it's not actually set in the
qttools/.qmake.conf file, so the generated include paths that use
that value are incorrect. Curiously the build still succeeds.
Now in CMake land we replaced QT_SOURCE_TREE with
CMAKE_SOURCE_DIR, but that does not work properly when doing a
standalone tests build, because the project in that case is the
tests one, and not the qtbase one, so configuration fails in a
developer build when trying to configure some private tests.
So far I've found that only qtbase actively uses this value.
A temporary fix is to save the qtbase source directory into a
QT_SOURCE_TREE variable inside the generated
BuildInternalsExtra.cmake file.
The pro2cmake script is changed to handle presence of QT_SOURCE_TREE
in a qrc file path. This is handled by finding the location of a
.qmake.conf file starting from the project file absolute path.
This is needed to stop the script from crashing when handling
the mimedatabase test projects for example.
The change also regenerates the relevant failing test projects, and
thus standalone tests (when doing developer builds aka private_tests
enabled) now configure and build successfully.
Change-Id: I15adc6f4ab6e3056c43ed850196204e2229c4d98
Reviewed-by: Simon Hausmann <simon.hausmann@qt.io>
2019-07-26 16:59:53 +00:00
|
|
|
@property
|
|
|
|
def file_absolute_path(self) -> str:
|
|
|
|
return self._file_absolute_path or ''
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2019-05-04 11:08:19 +00:00
|
|
|
def generated_cmake_lists_path(self) -> str:
|
|
|
|
assert self.basedir
|
|
|
|
return os.path.join(self.basedir, 'CMakeLists.gen.txt')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def original_cmake_lists_path(self) -> str:
|
2019-01-29 09:18:21 +00:00
|
|
|
assert self.basedir
|
|
|
|
return os.path.join(self.basedir, 'CMakeLists.txt')
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2018-10-24 13:20:27 +00:00
|
|
|
def condition(self) -> str:
|
|
|
|
return self._condition
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2019-01-22 13:20:47 +00:00
|
|
|
def total_condition(self) -> typing.Optional[str]:
|
|
|
|
return self._total_condition
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@total_condition.setter
|
|
|
|
def total_condition(self, condition: str) -> None:
|
|
|
|
self._total_condition = condition
|
|
|
|
|
2018-12-20 15:15:10 +00:00
|
|
|
def _add_child(self, scope: 'Scope') -> None:
|
2018-10-24 13:20:27 +00:00
|
|
|
scope._parent = self
|
|
|
|
self._children.append(scope)
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2018-12-21 11:13:38 +00:00
|
|
|
def children(self) -> typing.List['Scope']:
|
2019-03-28 12:54:56 +00:00
|
|
|
result = list(self._children)
|
|
|
|
for include_scope in self._included_children:
|
2019-05-29 15:39:56 +00:00
|
|
|
result += include_scope.children
|
2019-03-28 12:54:56 +00:00
|
|
|
return result
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
def dump(self, *, indent: int = 0) -> None:
|
|
|
|
ind = ' ' * indent
|
2019-01-24 14:43:13 +00:00
|
|
|
print('{}Scope "{}":'.format(ind, self))
|
2019-01-31 10:54:55 +00:00
|
|
|
if self.total_condition:
|
|
|
|
print('{} Total condition = {}'.format(ind, self.total_condition))
|
2019-01-18 11:40:29 +00:00
|
|
|
print('{} Keys:'.format(ind))
|
2019-01-17 16:10:57 +00:00
|
|
|
keys = self._operations.keys()
|
|
|
|
if not keys:
|
|
|
|
print('{} -- NONE --'.format(ind))
|
|
|
|
else:
|
|
|
|
for k in sorted(keys):
|
2019-01-25 14:41:02 +00:00
|
|
|
print('{} {} = "{}"'
|
|
|
|
.format(ind, k, self._operations.get(k, [])))
|
2019-01-18 11:40:29 +00:00
|
|
|
print('{} Children:'.format(ind))
|
2019-01-17 16:10:57 +00:00
|
|
|
if not self._children:
|
|
|
|
print('{} -- NONE --'.format(ind))
|
|
|
|
else:
|
|
|
|
for c in self._children:
|
|
|
|
c.dump(indent=indent + 1)
|
2019-03-28 12:54:56 +00:00
|
|
|
print('{} Includes:'.format(ind))
|
|
|
|
if not self._included_children:
|
|
|
|
print('{} -- NONE --'.format(ind))
|
|
|
|
else:
|
|
|
|
for c in self._included_children:
|
|
|
|
c.dump(indent=indent + 1)
|
|
|
|
|
|
|
|
def dump_structure(self, *, type: str = 'ROOT', indent: int = 0) -> None:
|
|
|
|
print('{}{}: {}'.format(spaces(indent), type, self))
|
|
|
|
for i in self._included_children:
|
|
|
|
i.dump_structure(type='INCL', indent=indent + 1)
|
|
|
|
for i in self._children:
|
|
|
|
i.dump_structure(type='CHLD', indent=indent + 1)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2019-01-18 11:43:11 +00:00
|
|
|
def keys(self):
|
|
|
|
return self._operations.keys()
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
2019-01-18 11:43:11 +00:00
|
|
|
def visited_keys(self):
|
2019-01-25 14:41:02 +00:00
|
|
|
return self._visited_keys
|
2019-01-18 11:43:11 +00:00
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
def _evalOps(self, key: str,
|
|
|
|
transformer: typing.Optional[typing.Callable[[Scope, typing.List[str]], typing.List[str]]],
|
2019-07-15 13:38:47 +00:00
|
|
|
result: typing.List[str], *, inherit: bool = False) \
|
2019-03-28 12:54:56 +00:00
|
|
|
-> typing.List[str]:
|
2019-01-18 11:43:11 +00:00
|
|
|
self._visited_keys.add(key)
|
2019-03-28 12:54:56 +00:00
|
|
|
|
2019-04-12 09:45:43 +00:00
|
|
|
# Inherrit values from above:
|
2019-07-15 13:38:47 +00:00
|
|
|
if self._parent and inherit:
|
2019-04-12 09:45:43 +00:00
|
|
|
result = self._parent._evalOps(key, transformer, result)
|
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
if transformer:
|
|
|
|
op_transformer = lambda files: transformer(self, files)
|
|
|
|
else:
|
|
|
|
op_transformer = lambda files: files
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2018-12-20 15:15:10 +00:00
|
|
|
for op in self._operations.get(key, []):
|
2019-04-11 15:06:01 +00:00
|
|
|
result = op.process(key, result, op_transformer)
|
2019-03-28 12:54:56 +00:00
|
|
|
|
|
|
|
for ic in self._included_children:
|
|
|
|
result = list(ic._evalOps(key, transformer, result))
|
|
|
|
|
2018-12-20 15:15:10 +00:00
|
|
|
return result
|
|
|
|
|
2019-07-15 13:38:47 +00:00
|
|
|
def get(self, key: str, *, ignore_includes: bool = False, inherit: bool = False) -> typing.List[str]:
|
2019-05-29 07:57:34 +00:00
|
|
|
|
|
|
|
is_same_path = self.currentdir == self.basedir
|
|
|
|
|
2019-08-05 11:51:25 +00:00
|
|
|
if key == '_PRO_FILE_PWD_':
|
|
|
|
return ['${CMAKE_CURRENT_SOURCE_DIR}']
|
|
|
|
if key == 'PWD':
|
2019-05-29 07:57:34 +00:00
|
|
|
if is_same_path:
|
|
|
|
return ['${CMAKE_CURRENT_SOURCE_DIR}']
|
|
|
|
else:
|
|
|
|
return ['${CMAKE_CURRENT_SOURCE_DIR}/' + os.path.relpath(self.currentdir, self.basedir),]
|
2019-03-28 12:54:56 +00:00
|
|
|
if key == 'OUT_PWD':
|
2019-05-29 07:57:34 +00:00
|
|
|
if is_same_path:
|
2019-06-07 16:13:53 +00:00
|
|
|
return ['${CMAKE_CURRENT_BINARY_DIR}']
|
2019-05-29 07:57:34 +00:00
|
|
|
else:
|
2019-06-07 16:13:53 +00:00
|
|
|
return ['${CMAKE_CURRENT_BINARY_DIR}/' + os.path.relpath(self.currentdir, self.basedir),]
|
2019-03-28 12:54:56 +00:00
|
|
|
|
2019-07-15 13:38:47 +00:00
|
|
|
return self._evalOps(key, None, [], inherit=inherit)
|
2019-03-28 12:54:56 +00:00
|
|
|
|
2019-08-01 13:43:36 +00:00
|
|
|
def get_string(self, key: str, default: str = '', inherit : bool = False) -> str:
|
|
|
|
v = self.get(key, inherit = inherit)
|
2018-12-20 15:15:10 +00:00
|
|
|
if len(v) == 0:
|
|
|
|
return default
|
|
|
|
assert len(v) == 1
|
|
|
|
return v[0]
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
def _map_files(self, files: typing.List[str], *,
|
|
|
|
use_vpath: bool = True, is_include: bool = False) -> typing.List[str]:
|
|
|
|
|
2019-05-08 14:45:25 +00:00
|
|
|
expanded_files = [] # type: typing.List[str]
|
2019-03-28 12:54:56 +00:00
|
|
|
for f in files:
|
2019-04-11 15:06:01 +00:00
|
|
|
r = self._expand_value(f)
|
|
|
|
expanded_files += r
|
2019-03-28 12:54:56 +00:00
|
|
|
|
|
|
|
mapped_files = list(map(lambda f: map_to_file(f, self, is_include=is_include), expanded_files))
|
|
|
|
|
|
|
|
if use_vpath:
|
2019-07-15 13:38:47 +00:00
|
|
|
result = list(map(lambda f: handle_vpath(f, self.basedir, self.get('VPATH', inherit=True)), mapped_files))
|
2019-03-28 12:54:56 +00:00
|
|
|
else:
|
|
|
|
result = mapped_files
|
|
|
|
|
|
|
|
# strip ${CMAKE_CURRENT_SOURCE_DIR}:
|
|
|
|
result = list(map(lambda f: f[28:] if f.startswith('${CMAKE_CURRENT_SOURCE_DIR}/') else f, result))
|
|
|
|
|
2019-04-16 14:32:08 +00:00
|
|
|
# strip leading ./:
|
|
|
|
result = list(map(lambda f: trim_leading_dot(f), result))
|
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
def get_files(self, key: str, *, use_vpath: bool = False,
|
|
|
|
is_include: bool = False) -> typing.List[str]:
|
|
|
|
transformer = lambda scope, files: scope._map_files(files, use_vpath=use_vpath, is_include=is_include)
|
|
|
|
return list(self._evalOps(key, transformer, []))
|
|
|
|
|
2019-02-07 14:30:44 +00:00
|
|
|
def _expand_value(self, value: str) -> typing.List[str]:
|
|
|
|
result = value
|
|
|
|
pattern = re.compile(r'\$\$\{?([A-Za-z_][A-Za-z0-9_]*)\}?')
|
|
|
|
match = re.search(pattern, result)
|
|
|
|
while match:
|
2019-02-11 16:46:31 +00:00
|
|
|
old_result = result
|
2019-02-07 14:30:44 +00:00
|
|
|
if match.group(0) == value:
|
2019-07-15 13:38:47 +00:00
|
|
|
get_result = self.get(match.group(1), inherit = True)
|
|
|
|
if len(get_result) == 1:
|
|
|
|
result = get_result[0]
|
|
|
|
else:
|
|
|
|
return get_result
|
|
|
|
else:
|
|
|
|
replacement = self.get(match.group(1), inherit = True)
|
|
|
|
replacement_str = replacement[0] if replacement else ''
|
|
|
|
result = result[:match.start()] \
|
|
|
|
+ replacement_str \
|
|
|
|
+ result[match.end():]
|
2019-02-11 16:46:31 +00:00
|
|
|
|
|
|
|
if result == old_result:
|
2019-03-28 12:54:56 +00:00
|
|
|
return [result,] # Do not go into infinite loop
|
2019-02-11 16:46:31 +00:00
|
|
|
|
2019-02-07 14:30:44 +00:00
|
|
|
match = re.search(pattern, result)
|
2019-03-28 12:54:56 +00:00
|
|
|
return [result,]
|
2019-02-07 14:30:44 +00:00
|
|
|
|
2019-02-11 16:46:31 +00:00
|
|
|
def expand(self, key: str) -> typing.List[str]:
|
2019-03-28 12:54:56 +00:00
|
|
|
value = self.get(key)
|
2019-02-07 14:30:44 +00:00
|
|
|
result: typing.List[str] = []
|
|
|
|
assert isinstance(value, list)
|
|
|
|
for v in value:
|
|
|
|
result += self._expand_value(v)
|
|
|
|
return result
|
|
|
|
|
|
|
|
def expandString(self, key: str) -> str:
|
2019-03-28 12:54:56 +00:00
|
|
|
result = self._expand_value(self.get_string(key))
|
2019-02-07 14:30:44 +00:00
|
|
|
assert len(result) == 1
|
|
|
|
return result[0]
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
|
|
|
def TEMPLATE(self) -> str:
|
2019-03-28 12:54:56 +00:00
|
|
|
return self.get_string('TEMPLATE', 'app')
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
def _rawTemplate(self) -> str:
|
2019-03-28 12:54:56 +00:00
|
|
|
return self.get_string('TEMPLATE')
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
|
|
|
def TARGET(self) -> str:
|
2019-08-26 08:52:17 +00:00
|
|
|
target = self.expandString('TARGET') \
|
2019-01-29 09:18:21 +00:00
|
|
|
or os.path.splitext(os.path.basename(self.file))[0]
|
2019-08-26 08:52:17 +00:00
|
|
|
return re.sub('\.\./', '', target)
|
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
@property
|
|
|
|
def _INCLUDED(self) -> typing.List[str]:
|
2019-03-28 12:54:56 +00:00
|
|
|
return self.get('_INCLUDED')
|
2019-01-17 12:41:17 +00:00
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
class QmakeParser:
|
|
|
|
def __init__(self, *, debug: bool = False) -> None:
|
2019-05-23 13:20:41 +00:00
|
|
|
self.debug = debug
|
|
|
|
self._Grammar = self._generate_grammar()
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-05-23 13:20:41 +00:00
|
|
|
def _generate_grammar(self):
|
2018-10-24 13:20:27 +00:00
|
|
|
# Define grammar:
|
|
|
|
pp.ParserElement.setDefaultWhitespaceChars(' \t')
|
|
|
|
|
2019-05-23 13:20:41 +00:00
|
|
|
def add_element(name: str, value: pp.ParserElement):
|
|
|
|
nonlocal self
|
|
|
|
if self.debug:
|
|
|
|
value.setName(name)
|
|
|
|
value.setDebug()
|
|
|
|
return value
|
|
|
|
|
|
|
|
EOL = add_element('EOL', pp.Suppress(pp.LineEnd()))
|
|
|
|
Else = add_element('Else', pp.Keyword('else'))
|
|
|
|
Identifier = add_element('Identifier', pp.Word(pp.alphas + '_',
|
|
|
|
bodyChars=pp.alphanums+'_-./'))
|
|
|
|
BracedValue = add_element('BracedValue',
|
|
|
|
pp.nestedExpr(
|
|
|
|
ignoreExpr=pp.quotedString |
|
|
|
|
pp.QuotedString(quoteChar='$(',
|
|
|
|
endQuoteChar=')',
|
|
|
|
escQuote='\\',
|
|
|
|
unquoteResults=False)
|
|
|
|
).setParseAction(lambda s, l, t: ['(', *t[0], ')']))
|
2019-02-11 17:02:22 +00:00
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
Substitution \
|
2019-05-23 13:20:41 +00:00
|
|
|
= add_element('Substitution',
|
|
|
|
pp.Combine(pp.Literal('$')
|
|
|
|
+ (((pp.Literal('$') + Identifier
|
|
|
|
+ pp.Optional(pp.nestedExpr()))
|
|
|
|
| (pp.Literal('(') + Identifier + pp.Literal(')'))
|
|
|
|
| (pp.Literal('{') + Identifier + pp.Literal('}'))
|
|
|
|
| (pp.Literal('$') + pp.Literal('{')
|
|
|
|
+ Identifier + pp.Optional(pp.nestedExpr())
|
|
|
|
+ pp.Literal('}'))
|
|
|
|
| (pp.Literal('$') + pp.Literal('[') + Identifier
|
|
|
|
+ pp.Literal(']'))
|
|
|
|
))))
|
|
|
|
LiteralValuePart = add_element('LiteralValuePart',
|
|
|
|
pp.Word(pp.printables, excludeChars='$#{}()'))
|
2018-12-21 11:13:38 +00:00
|
|
|
SubstitutionValue \
|
2019-05-23 13:20:41 +00:00
|
|
|
= add_element('SubstitutionValue',
|
|
|
|
pp.Combine(pp.OneOrMore(Substitution
|
|
|
|
| LiteralValuePart
|
|
|
|
| pp.Literal('$'))))
|
2019-08-08 11:21:23 +00:00
|
|
|
FunctionValue \
|
|
|
|
= add_element('FunctionValue',
|
|
|
|
pp.Group(pp.Suppress(pp.Literal('$') + pp.Literal('$'))
|
|
|
|
+ Identifier
|
|
|
|
+ pp.nestedExpr() #.setParseAction(lambda s, l, t: ['(', *t[0], ')'])
|
|
|
|
).setParseAction(lambda s, l, t: handle_function_value(*t)))
|
2019-05-23 13:20:41 +00:00
|
|
|
Value \
|
|
|
|
= add_element('Value',
|
|
|
|
pp.NotAny(Else | pp.Literal('}') | EOL) \
|
|
|
|
+ (pp.QuotedString(quoteChar='"', escChar='\\')
|
2019-08-08 11:21:23 +00:00
|
|
|
| FunctionValue
|
2019-05-23 13:20:41 +00:00
|
|
|
| SubstitutionValue
|
|
|
|
| BracedValue))
|
|
|
|
|
2019-05-23 13:57:59 +00:00
|
|
|
Values = add_element('Values', pp.ZeroOrMore(Value)('value'))
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
Op = add_element('OP',
|
|
|
|
pp.Literal('=') | pp.Literal('-=') | pp.Literal('+=') \
|
|
|
|
| pp.Literal('*='))
|
|
|
|
|
|
|
|
Key = add_element('Key', Identifier)
|
|
|
|
|
2019-05-23 13:57:59 +00:00
|
|
|
Operation = add_element('Operation', Key('key') + Op('operation') + Values('value'))
|
|
|
|
CallArgs = add_element('CallArgs', pp.nestedExpr())
|
2019-03-28 12:25:04 +00:00
|
|
|
|
|
|
|
def parse_call_args(results):
|
|
|
|
out = ''
|
|
|
|
for item in chain(*results):
|
|
|
|
if isinstance(item, str):
|
|
|
|
out += item
|
|
|
|
else:
|
|
|
|
out += "(" + parse_call_args(item) + ")"
|
|
|
|
return out
|
|
|
|
|
|
|
|
CallArgs.setParseAction(parse_call_args)
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
Load = add_element('Load', pp.Keyword('load') + CallArgs('loaded'))
|
|
|
|
Include = add_element('Include', pp.Keyword('include') + CallArgs('included'))
|
|
|
|
Option = add_element('Option', pp.Keyword('option') + CallArgs('option'))
|
|
|
|
|
|
|
|
# ignore the whole thing...
|
|
|
|
DefineTestDefinition = add_element(
|
|
|
|
'DefineTestDefinition',
|
|
|
|
pp.Suppress(pp.Keyword('defineTest') + CallArgs
|
|
|
|
+ pp.nestedExpr(opener='{', closer='}', ignoreExpr=pp.LineEnd())))
|
|
|
|
|
|
|
|
# ignore the whole thing...
|
|
|
|
ForLoop = add_element(
|
|
|
|
'ForLoop',
|
|
|
|
pp.Suppress(pp.Keyword('for') + CallArgs
|
|
|
|
+ pp.nestedExpr(opener='{', closer='}', ignoreExpr=pp.LineEnd())))
|
|
|
|
|
|
|
|
# ignore the whole thing...
|
|
|
|
ForLoopSingleLine = add_element(
|
|
|
|
'ForLoopSingleLine',
|
2019-05-23 13:57:59 +00:00
|
|
|
pp.Suppress(pp.Keyword('for') + CallArgs + pp.Literal(':') + pp.SkipTo(EOL)))
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
# ignore the whole thing...
|
|
|
|
FunctionCall = add_element('FunctionCall', pp.Suppress(Identifier + pp.nestedExpr()))
|
|
|
|
|
|
|
|
Scope = add_element('Scope', pp.Forward())
|
|
|
|
|
|
|
|
Statement = add_element('Statement',
|
|
|
|
pp.Group(Load | Include | Option | ForLoop | ForLoopSingleLine
|
|
|
|
| DefineTestDefinition | FunctionCall | Operation))
|
|
|
|
StatementLine = add_element('StatementLine', Statement + (EOL | pp.FollowedBy('}')))
|
|
|
|
StatementGroup = add_element('StatementGroup',
|
|
|
|
pp.ZeroOrMore(StatementLine | Scope | pp.Suppress(EOL)))
|
|
|
|
|
|
|
|
Block = add_element('Block',
|
2019-05-23 13:57:59 +00:00
|
|
|
pp.Suppress('{') + pp.Optional(EOL)
|
|
|
|
+ StatementGroup + pp.Optional(EOL)
|
|
|
|
+ pp.Suppress('}') + pp.Optional(EOL))
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
ConditionEnd = add_element('ConditionEnd',
|
|
|
|
pp.FollowedBy((pp.Optional(pp.White())
|
2019-05-23 13:57:59 +00:00
|
|
|
+ (pp.Literal(':')
|
|
|
|
| pp.Literal('{')
|
|
|
|
| pp.Literal('|')))))
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
ConditionPart1 = add_element('ConditionPart1',
|
|
|
|
(pp.Optional('!') + Identifier + pp.Optional(BracedValue)))
|
|
|
|
ConditionPart2 = add_element('ConditionPart2', pp.CharsNotIn('#{}|:=\\\n'))
|
|
|
|
ConditionPart = add_element(
|
|
|
|
'ConditionPart',
|
2019-05-23 13:57:59 +00:00
|
|
|
(ConditionPart1 ^ ConditionPart2) + ConditionEnd)
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
ConditionOp = add_element('ConditionOp', pp.Literal('|') ^ pp.Literal(':'))
|
2019-05-23 13:57:59 +00:00
|
|
|
ConditionWhiteSpace = add_element('ConditionWhiteSpace',
|
|
|
|
pp.Suppress(pp.Optional(pp.White(' '))))
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
ConditionRepeated = add_element('ConditionRepeated',
|
2019-05-23 13:57:59 +00:00
|
|
|
pp.ZeroOrMore(ConditionOp
|
|
|
|
+ ConditionWhiteSpace + ConditionPart))
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
Condition = add_element('Condition', pp.Combine(ConditionPart + ConditionRepeated))
|
2019-02-11 17:02:22 +00:00
|
|
|
Condition.setParseAction(lambda x: ' '.join(x).strip().replace(':', ' && ').strip(' && '))
|
|
|
|
|
2019-05-23 09:53:41 +00:00
|
|
|
# Weird thing like write_file(a)|error() where error() is the alternative condition
|
|
|
|
# which happens to be a function call. In this case there is no scope, but our code expects
|
|
|
|
# a scope with a list of statements, so create a fake empty statement.
|
2019-05-23 13:20:41 +00:00
|
|
|
ConditionEndingInFunctionCall = add_element(
|
|
|
|
'ConditionEndingInFunctionCall', pp.Suppress(ConditionOp) + FunctionCall
|
|
|
|
+ pp.Empty().setParseAction(lambda x: [[]])
|
|
|
|
.setResultsName('statements'))
|
|
|
|
|
|
|
|
SingleLineScope = add_element('SingleLineScope',
|
2019-05-23 13:57:59 +00:00
|
|
|
pp.Suppress(pp.Literal(':'))
|
2019-05-23 13:20:41 +00:00
|
|
|
+ pp.Group(Block | (Statement + EOL))('statements'))
|
2019-05-23 13:57:59 +00:00
|
|
|
MultiLineScope = add_element('MultiLineScope', Block('statements'))
|
2019-05-23 13:20:41 +00:00
|
|
|
|
|
|
|
SingleLineElse = add_element('SingleLineElse',
|
2019-05-23 13:57:59 +00:00
|
|
|
pp.Suppress(pp.Literal(':'))
|
2019-05-23 13:20:41 +00:00
|
|
|
+ (Scope | Block | (Statement + pp.Optional(EOL))))
|
|
|
|
MultiLineElse = add_element('MultiLineElse', Block)
|
|
|
|
ElseBranch = add_element('ElseBranch', pp.Suppress(Else) + (SingleLineElse | MultiLineElse))
|
|
|
|
|
|
|
|
# Scope is already add_element'ed in the forward declaration above.
|
2019-05-23 13:57:59 +00:00
|
|
|
Scope <<= \
|
|
|
|
pp.Group(Condition('condition')
|
|
|
|
+ (SingleLineScope | MultiLineScope | ConditionEndingInFunctionCall)
|
|
|
|
+ pp.Optional(ElseBranch)('else_statements'))
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
Grammar = StatementGroup('statements')
|
2019-05-17 16:31:19 +00:00
|
|
|
Grammar.ignore(pp.pythonStyleComment())
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
return Grammar
|
|
|
|
|
|
|
|
def parseFile(self, file: str):
|
|
|
|
print('Parsing \"{}\"...'.format(file))
|
|
|
|
try:
|
2019-02-13 11:24:14 +00:00
|
|
|
with open(file, 'r') as file_fd:
|
|
|
|
contents = file_fd.read()
|
|
|
|
|
|
|
|
old_contents = contents
|
2019-05-17 16:31:19 +00:00
|
|
|
contents = fixup_comments(contents)
|
2019-06-06 08:31:03 +00:00
|
|
|
contents = fixup_linecontinuation(contents)
|
2019-02-13 11:24:14 +00:00
|
|
|
|
|
|
|
if old_contents != contents:
|
|
|
|
print('Warning: Fixed line continuation in .pro-file!\n'
|
|
|
|
' Position information in Parsing output might be wrong!')
|
|
|
|
result = self._Grammar.parseString(contents, parseAll=True)
|
2018-10-24 13:20:27 +00:00
|
|
|
except pp.ParseException as pe:
|
|
|
|
print(pe.line)
|
|
|
|
print(' '*(pe.col-1) + '^')
|
|
|
|
print(pe)
|
|
|
|
raise pe
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def parseProFile(file: str, *, debug=False):
|
|
|
|
parser = QmakeParser(debug=debug)
|
|
|
|
return parser.parseFile(file)
|
|
|
|
|
|
|
|
|
|
|
|
def map_condition(condition: str) -> str:
|
2019-05-29 15:39:56 +00:00
|
|
|
# Some hardcoded cases that are too bothersome to generalize.
|
2019-08-08 12:34:56 +00:00
|
|
|
condition = re.sub(r'qtConfig\(opengl\(es1\|es2\)\?\)',
|
2019-05-29 15:39:56 +00:00
|
|
|
r'QT_FEATURE_opengl OR QT_FEATURE_opengles2 OR QT_FEATURE_opengles3',
|
|
|
|
condition)
|
2019-08-08 12:34:56 +00:00
|
|
|
condition = re.sub(r'qtConfig\(opengl\.\*\)', r'QT_FEATURE_opengl', condition)
|
2019-05-29 15:39:56 +00:00
|
|
|
condition = re.sub(r'^win\*$', r'win', condition)
|
2019-08-27 17:15:36 +00:00
|
|
|
condition = re.sub(r'^no-png$', r'NOT QT_FEATURE_png', condition)
|
2019-08-20 14:18:02 +00:00
|
|
|
condition = re.sub(r'contains\(CONFIG, static\)', r'NOT QT_BUILD_SHARED_LIBS', condition)
|
2019-08-27 17:15:36 +00:00
|
|
|
condition = re.sub(r'contains\(QT_CONFIG,\w*shared\)', r'QT_BUILD_SHARED_LIBS', condition)
|
2019-05-29 15:39:56 +00:00
|
|
|
|
2019-06-03 15:42:43 +00:00
|
|
|
def gcc_version_handler(match_obj: re.Match):
|
|
|
|
operator = match_obj.group(1)
|
|
|
|
version_type = match_obj.group(2)
|
|
|
|
if operator == 'equals':
|
|
|
|
operator = 'STREQUAL'
|
|
|
|
elif operator == 'greaterThan':
|
|
|
|
operator = 'STRGREATER'
|
|
|
|
elif operator == 'lessThan':
|
|
|
|
operator = 'STRLESS'
|
|
|
|
|
|
|
|
version = match_obj.group(3)
|
|
|
|
return '(QT_COMPILER_VERSION_{} {} {})'.format(version_type, operator, version)
|
|
|
|
|
|
|
|
# TODO: Possibly fix for other compilers.
|
|
|
|
pattern = r'(equals|greaterThan|lessThan)\(QT_GCC_([A-Z]+)_VERSION,[ ]*([0-9]+)\)'
|
|
|
|
condition = re.sub(pattern, gcc_version_handler, condition)
|
|
|
|
|
|
|
|
# TODO: the current if(...) replacement makes the parentheses
|
|
|
|
# unbalanced when there are nested expressions.
|
|
|
|
# Need to fix this either with pypi regex recursive regexps,
|
|
|
|
# using pyparsing, or some other proper means of handling
|
|
|
|
# balanced parentheses.
|
2019-02-07 14:28:49 +00:00
|
|
|
condition = re.sub(r'\bif\s*\((.*?)\)', r'\1', condition)
|
2019-06-03 15:42:43 +00:00
|
|
|
|
2019-02-07 14:28:49 +00:00
|
|
|
condition = re.sub(r'\bisEmpty\s*\((.*?)\)', r'\1_ISEMPTY', condition)
|
2019-03-01 14:00:19 +00:00
|
|
|
condition = re.sub(r'\bcontains\s*\((.*?),\s*"?(.*?)"?\)',
|
2019-02-07 14:28:49 +00:00
|
|
|
r'\1___contains___\2', condition)
|
2019-03-01 14:00:19 +00:00
|
|
|
condition = re.sub(r'\bequals\s*\((.*?),\s*"?(.*?)"?\)',
|
2019-02-11 10:36:00 +00:00
|
|
|
r'\1___equals___\2', condition)
|
2019-06-03 15:42:43 +00:00
|
|
|
condition = re.sub(r'\bisEqual\s*\((.*?),\s*"?(.*?)"?\)',
|
|
|
|
r'\1___equals___\2', condition)
|
2019-02-07 14:28:49 +00:00
|
|
|
condition = re.sub(r'\s*==\s*', '___STREQUAL___', condition)
|
2019-05-28 12:16:01 +00:00
|
|
|
condition = re.sub(r'\bexists\s*\((.*?)\)', r'EXISTS \1', condition)
|
|
|
|
|
|
|
|
pattern = r'CONFIG\((debug|release),debug\|release\)'
|
|
|
|
match_result = re.match(pattern, condition)
|
|
|
|
if match_result:
|
|
|
|
build_type = match_result.group(1)
|
|
|
|
if build_type == 'debug':
|
|
|
|
build_type = 'Debug'
|
|
|
|
elif build_type == 'release':
|
|
|
|
build_type = 'Release'
|
|
|
|
condition = re.sub(pattern, '(CMAKE_BUILD_TYPE STREQUAL {})'.format(build_type), condition)
|
2019-01-23 11:57:06 +00:00
|
|
|
|
|
|
|
condition = condition.replace('*', '_x_')
|
|
|
|
condition = condition.replace('.$$', '__ss_')
|
|
|
|
condition = condition.replace('$$', '_ss_')
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
condition = condition.replace('!', 'NOT ')
|
|
|
|
condition = condition.replace('&&', ' AND ')
|
|
|
|
condition = condition.replace('|', ' OR ')
|
2019-01-17 16:11:52 +00:00
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
cmake_condition = ''
|
|
|
|
for part in condition.split():
|
2018-12-21 11:13:38 +00:00
|
|
|
# some features contain e.g. linux, that should not be
|
|
|
|
# turned upper case
|
|
|
|
feature = re.match(r"(qtConfig|qtHaveModule)\(([a-zA-Z0-9_-]+)\)",
|
|
|
|
part)
|
2018-10-24 13:20:27 +00:00
|
|
|
if feature:
|
2018-11-01 13:56:13 +00:00
|
|
|
if (feature.group(1) == "qtHaveModule"):
|
2019-03-28 12:54:56 +00:00
|
|
|
part = 'TARGET {}'.format(map_qt_library(feature.group(2)))
|
2018-11-01 13:56:13 +00:00
|
|
|
else:
|
2019-05-08 14:45:25 +00:00
|
|
|
feature_name = featureName(feature.group(2))
|
|
|
|
if feature_name.startswith('system_') and is_known_3rd_party_library(feature_name[7:]):
|
2019-01-28 14:06:44 +00:00
|
|
|
part = 'ON'
|
2019-01-31 15:20:32 +00:00
|
|
|
elif feature == 'dlopen':
|
|
|
|
part = 'ON'
|
2019-01-28 14:06:44 +00:00
|
|
|
else:
|
2019-05-08 14:45:25 +00:00
|
|
|
part = 'QT_FEATURE_' + feature_name
|
2018-10-24 13:20:27 +00:00
|
|
|
else:
|
2019-05-06 10:26:31 +00:00
|
|
|
part = map_platform(part)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
part = part.replace('true', 'ON')
|
|
|
|
part = part.replace('false', 'OFF')
|
|
|
|
cmake_condition += ' ' + part
|
|
|
|
return cmake_condition.strip()
|
|
|
|
|
|
|
|
|
2019-08-01 17:14:58 +00:00
|
|
|
def handle_subdir(scope: Scope,
|
|
|
|
cm_fh: typing.IO[str],
|
|
|
|
*,
|
|
|
|
indent: int = 0,
|
|
|
|
is_example: bool = False) -> None:
|
|
|
|
|
|
|
|
# Global nested dictionary that will contain sub_dir assignments and their conditions.
|
|
|
|
# Declared as a global in order not to pollute the nested function signatures with giant
|
|
|
|
# type hints.
|
|
|
|
sub_dirs: typing.Dict[str, typing.Dict[str, typing.Set[typing.FrozenSet[str]]]] = {}
|
|
|
|
|
|
|
|
# Collects assignment conditions into global sub_dirs dict.
|
|
|
|
def collect_subdir_info(sub_dir_assignment: str,
|
|
|
|
*,
|
|
|
|
current_conditions: typing.FrozenSet[str] = None):
|
|
|
|
subtraction = sub_dir_assignment.startswith('-')
|
|
|
|
if subtraction:
|
|
|
|
subdir_name = sub_dir_assignment[1:]
|
|
|
|
else:
|
|
|
|
subdir_name = sub_dir_assignment
|
|
|
|
if subdir_name not in sub_dirs:
|
|
|
|
sub_dirs[subdir_name] = {}
|
|
|
|
additions = sub_dirs[subdir_name].get('additions', set())
|
|
|
|
subtractions = sub_dirs[subdir_name].get('subtractions', set())
|
|
|
|
if current_conditions:
|
|
|
|
if subtraction:
|
|
|
|
subtractions.add(current_conditions)
|
|
|
|
else:
|
|
|
|
additions.add(current_conditions)
|
|
|
|
if additions:
|
|
|
|
sub_dirs[subdir_name]['additions'] = additions
|
|
|
|
if subtractions:
|
|
|
|
sub_dirs[subdir_name]['subtractions'] = subtractions
|
|
|
|
|
|
|
|
# Recursive helper that collects subdir info for given scope,
|
|
|
|
# and the children of the given scope.
|
|
|
|
def handle_subdir_helper(scope: Scope,
|
|
|
|
cm_fh: typing.IO[str],
|
|
|
|
*,
|
|
|
|
indent: int = 0,
|
|
|
|
current_conditions: typing.FrozenSet[str] = None,
|
|
|
|
is_example: bool = False):
|
2019-07-03 12:39:03 +00:00
|
|
|
for sd in scope.get_files('SUBDIRS'):
|
2019-08-01 17:14:58 +00:00
|
|
|
# Collect info about conditions and SUBDIR assignments in the
|
|
|
|
# current scope.
|
|
|
|
if os.path.isdir(sd) or sd.startswith('-'):
|
|
|
|
collect_subdir_info(sd, current_conditions=current_conditions)
|
|
|
|
# For the file case, directly write into the file handle.
|
|
|
|
elif os.path.isfile(sd):
|
2019-08-08 07:52:36 +00:00
|
|
|
# Handle cases with SUBDIRS += Foo/bar/z.pro. We want to be able
|
|
|
|
# to generate add_subdirectory(Foo/bar) instead of parsing the full
|
|
|
|
# .pro file in the current CMakeLists.txt. This causes issues
|
|
|
|
# with relative paths in certain projects otherwise.
|
|
|
|
dirname = os.path.dirname(sd)
|
|
|
|
if dirname:
|
|
|
|
collect_subdir_info(dirname, current_conditions=current_conditions)
|
|
|
|
else:
|
|
|
|
subdir_result = parseProFile(sd, debug=False)
|
|
|
|
subdir_scope \
|
|
|
|
= Scope.FromDict(scope, sd,
|
|
|
|
subdir_result.asDict().get('statements'),
|
|
|
|
'', scope.basedir)
|
|
|
|
|
|
|
|
do_include(subdir_scope)
|
|
|
|
cmakeify_scope(subdir_scope, cm_fh, indent=indent, is_example=is_example)
|
2019-08-01 17:14:58 +00:00
|
|
|
else:
|
|
|
|
print(' XXXX: SUBDIR {} in {}: Not found.'.format(sd, scope))
|
|
|
|
|
|
|
|
# Collect info about conditions and SUBDIR assignments in child
|
|
|
|
# scopes, aka recursively call the same function, but with an
|
|
|
|
# updated current_conditions frozen set.
|
|
|
|
for c in scope.children:
|
2019-08-27 17:17:44 +00:00
|
|
|
# Use total_condition for 'else' conditions, otherwise just use the regular value to
|
|
|
|
# simplify the logic.
|
|
|
|
child_condition = c.total_condition if c.condition == 'else' else c.condition
|
2019-08-01 17:14:58 +00:00
|
|
|
handle_subdir_helper(c, cm_fh,
|
|
|
|
indent=indent + 1,
|
|
|
|
is_example=is_example,
|
2019-08-27 17:17:44 +00:00
|
|
|
current_conditions=frozenset((*current_conditions,
|
|
|
|
child_condition)))
|
2019-08-01 17:14:58 +00:00
|
|
|
|
|
|
|
def group_and_print_sub_dirs(indent: int = 0):
|
|
|
|
# Simplify conditions, and group
|
|
|
|
# subdirectories with the same conditions.
|
|
|
|
grouped_sub_dirs = {}
|
2019-08-27 17:17:44 +00:00
|
|
|
|
|
|
|
# Wraps each element in the given interable with parentheses,
|
|
|
|
# to make sure boolean simplification happens correctly.
|
|
|
|
def wrap_in_parenthesis(iterable):
|
|
|
|
return ['({})'.format(c) for c in iterable]
|
|
|
|
|
|
|
|
def join_all_conditions(set_of_alternatives):
|
|
|
|
# Elements within one frozen set represent one single
|
|
|
|
# alternative whose pieces are ANDed together.
|
|
|
|
# This is repeated for each alternative that would
|
|
|
|
# enable a subdir, and are thus ORed together.
|
|
|
|
final_str = ''
|
|
|
|
if set_of_alternatives:
|
|
|
|
wrapped_set_of_alternatives = [wrap_in_parenthesis(alternative)
|
|
|
|
for alternative in set_of_alternatives]
|
|
|
|
alternatives = ['({})'.format(" AND ".join(alternative))
|
|
|
|
for alternative in wrapped_set_of_alternatives]
|
|
|
|
final_str = ' OR '.join(sorted(alternatives))
|
|
|
|
return final_str
|
|
|
|
|
2019-08-01 17:14:58 +00:00
|
|
|
for subdir_name in sub_dirs:
|
|
|
|
additions = sub_dirs[subdir_name].get('additions', set())
|
|
|
|
subtractions = sub_dirs[subdir_name].get('subtractions', set())
|
|
|
|
|
|
|
|
# An empty condition key represents the group of sub dirs
|
|
|
|
# that should be added unconditionally.
|
|
|
|
condition_key = ''
|
|
|
|
if additions or subtractions:
|
2019-08-27 17:17:44 +00:00
|
|
|
addition_str = join_all_conditions(additions)
|
|
|
|
if addition_str:
|
|
|
|
addition_str = '({})'.format(addition_str)
|
|
|
|
subtraction_str = join_all_conditions(subtractions)
|
|
|
|
if subtraction_str:
|
|
|
|
subtraction_str = 'NOT ({})'.format(subtraction_str)
|
2019-08-01 17:14:58 +00:00
|
|
|
|
|
|
|
condition_str = addition_str
|
|
|
|
if condition_str and subtraction_str:
|
|
|
|
condition_str += ' AND '
|
|
|
|
condition_str += subtraction_str
|
|
|
|
condition_simplified = simplify_condition(condition_str)
|
|
|
|
condition_key = condition_simplified
|
|
|
|
|
|
|
|
sub_dir_list_by_key = grouped_sub_dirs.get(condition_key, [])
|
|
|
|
sub_dir_list_by_key.append(subdir_name)
|
|
|
|
grouped_sub_dirs[condition_key] = sub_dir_list_by_key
|
|
|
|
|
|
|
|
# Print the groups.
|
|
|
|
ind = ' ' * indent
|
|
|
|
for condition_key in grouped_sub_dirs:
|
2019-07-03 12:39:03 +00:00
|
|
|
cond_ind = ind
|
2019-08-01 17:14:58 +00:00
|
|
|
if condition_key:
|
|
|
|
cm_fh.write(f'{ind}if({condition_key})\n')
|
2019-07-03 12:39:03 +00:00
|
|
|
cond_ind += " "
|
2019-08-01 17:14:58 +00:00
|
|
|
|
|
|
|
sub_dir_list_by_key = grouped_sub_dirs.get(condition_key, [])
|
|
|
|
for subdir_name in sub_dir_list_by_key:
|
|
|
|
cm_fh.write(f'{cond_ind}add_subdirectory({subdir_name})\n')
|
|
|
|
if condition_key:
|
2019-07-03 12:39:03 +00:00
|
|
|
cm_fh.write(f'{ind}endif()\n')
|
2019-08-01 17:14:58 +00:00
|
|
|
|
|
|
|
# A set of conditions which will be ANDed together. The set is recreated with more conditions
|
|
|
|
# as the scope deepens.
|
|
|
|
current_conditions = frozenset()
|
|
|
|
|
2019-08-27 17:17:44 +00:00
|
|
|
# Compute the total condition for scopes. Needed for scopes that
|
|
|
|
# have 'else' as a condition.
|
|
|
|
recursive_evaluate_scope(scope)
|
|
|
|
|
2019-08-01 17:14:58 +00:00
|
|
|
# Do the work.
|
|
|
|
handle_subdir_helper(scope, cm_fh,
|
|
|
|
indent=indent,
|
|
|
|
current_conditions=current_conditions,
|
|
|
|
is_example=is_example)
|
|
|
|
group_and_print_sub_dirs(indent=indent)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
def sort_sources(sources: typing.List[str]) -> typing.List[str]:
|
2018-12-21 11:13:38 +00:00
|
|
|
to_sort = {} # type: typing.Dict[str, typing.List[str]]
|
2018-10-24 13:20:27 +00:00
|
|
|
for s in sources:
|
|
|
|
if s is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
dir = os.path.dirname(s)
|
|
|
|
base = os.path.splitext(os.path.basename(s))[0]
|
|
|
|
if base.endswith('_p'):
|
|
|
|
base = base[:-2]
|
|
|
|
sort_name = os.path.join(dir, base)
|
|
|
|
|
|
|
|
array = to_sort.get(sort_name, [])
|
|
|
|
array.append(s)
|
|
|
|
|
|
|
|
to_sort[sort_name] = array
|
|
|
|
|
|
|
|
lines = []
|
|
|
|
for k in sorted(to_sort.keys()):
|
|
|
|
lines.append(' '.join(sorted(to_sort[k])))
|
|
|
|
|
|
|
|
return lines
|
|
|
|
|
|
|
|
|
2019-05-09 08:02:37 +00:00
|
|
|
def _map_libraries_to_cmake(libraries: typing.List[str],
|
|
|
|
known_libraries: typing.Set[str]) -> typing.List[str]:
|
|
|
|
result = [] # type: typing.List[str]
|
|
|
|
is_framework = False
|
|
|
|
|
|
|
|
for l in libraries:
|
|
|
|
if l == '-framework':
|
|
|
|
is_framework = True
|
|
|
|
continue
|
|
|
|
if is_framework:
|
|
|
|
l = '${FW%s}' % l
|
|
|
|
if l.startswith('-l'):
|
|
|
|
l = l[2:]
|
|
|
|
|
|
|
|
if l.startswith('-'):
|
|
|
|
l = '# Remove: {}'.format(l[1:])
|
|
|
|
else:
|
|
|
|
l = map_3rd_party_library(l)
|
|
|
|
|
|
|
|
if not l or l in result or l in known_libraries:
|
|
|
|
continue
|
|
|
|
|
|
|
|
result.append(l)
|
|
|
|
is_framework = False
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def extract_cmake_libraries(scope: Scope, *, known_libraries: typing.Set[str]=set()) \
|
|
|
|
-> typing.Tuple[typing.List[str], typing.List[str]]:
|
|
|
|
public_dependencies = [] # type: typing.List[str]
|
|
|
|
private_dependencies = [] # type: typing.List[str]
|
|
|
|
|
|
|
|
for key in ['QMAKE_USE', 'LIBS',]:
|
|
|
|
public_dependencies += scope.expand(key)
|
|
|
|
for key in ['QMAKE_USE_PRIVATE', 'QMAKE_USE_FOR_PRIVATE', 'LIBS_PRIVATE',]:
|
|
|
|
private_dependencies += scope.expand(key)
|
|
|
|
|
2019-07-22 08:20:50 +00:00
|
|
|
for key in ['QT_FOR_PRIVATE','QT_PRIVATE']:
|
2019-05-09 08:02:37 +00:00
|
|
|
private_dependencies += [map_qt_library(q) for q in scope.expand(key)]
|
|
|
|
|
|
|
|
for key in ['QT',]:
|
|
|
|
# Qt public libs: These may include FooPrivate in which case we get
|
|
|
|
# a private dependency on FooPrivate as well as a public dependency on Foo
|
|
|
|
for lib in scope.expand(key):
|
|
|
|
mapped_lib = map_qt_library(lib)
|
|
|
|
|
|
|
|
if mapped_lib.endswith('Private'):
|
|
|
|
private_dependencies.append(mapped_lib)
|
|
|
|
public_dependencies.append(mapped_lib[:-7])
|
|
|
|
else:
|
|
|
|
public_dependencies.append(mapped_lib)
|
|
|
|
|
|
|
|
return (_map_libraries_to_cmake(public_dependencies, known_libraries),
|
|
|
|
_map_libraries_to_cmake(private_dependencies, known_libraries))
|
|
|
|
|
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def write_header(cm_fh: typing.IO[str], name: str,
|
|
|
|
typename: str, *, indent: int = 0):
|
|
|
|
cm_fh.write('{}###########################################'
|
|
|
|
'##########################\n'.format(spaces(indent)))
|
2018-10-24 13:20:27 +00:00
|
|
|
cm_fh.write('{}## {} {}:\n'.format(spaces(indent), name, typename))
|
2018-12-21 11:13:38 +00:00
|
|
|
cm_fh.write('{}###########################################'
|
|
|
|
'##########################\n\n'.format(spaces(indent)))
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def write_scope_header(cm_fh: typing.IO[str], *, indent: int = 0):
|
2018-10-24 13:20:27 +00:00
|
|
|
cm_fh.write('\n{}## Scopes:\n'.format(spaces(indent)))
|
2018-12-21 11:13:38 +00:00
|
|
|
cm_fh.write('{}###########################################'
|
|
|
|
'##########################\n'.format(spaces(indent)))
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2019-05-08 15:00:48 +00:00
|
|
|
def write_list(cm_fh: typing.IO[str], entries: typing.List[str],
|
|
|
|
cmake_parameter: str,
|
|
|
|
indent: int = 0, *,
|
|
|
|
header: str = '', footer: str = ''):
|
|
|
|
if not entries:
|
|
|
|
return
|
|
|
|
|
|
|
|
ind = spaces(indent)
|
|
|
|
extra_indent = ''
|
|
|
|
|
|
|
|
if header:
|
|
|
|
cm_fh.write('{}{}'.format(ind, header))
|
|
|
|
extra_indent += ' '
|
|
|
|
if cmake_parameter:
|
|
|
|
cm_fh.write('{}{}{}\n'.format(ind, extra_indent, cmake_parameter))
|
|
|
|
extra_indent += ' '
|
|
|
|
for s in sort_sources(entries):
|
|
|
|
cm_fh.write('{}{}{}\n'.format(ind, extra_indent, s))
|
|
|
|
if footer:
|
2019-06-04 13:27:48 +00:00
|
|
|
cm_fh.write('{}{}\n'.format(ind, footer))
|
2019-05-08 15:00:48 +00:00
|
|
|
|
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
def write_source_file_list(cm_fh: typing.IO[str], scope, cmake_parameter: str,
|
|
|
|
keys: typing.List[str], indent: int = 0, *,
|
|
|
|
header: str = '', footer: str = ''):
|
|
|
|
# collect sources
|
|
|
|
sources: typing.List[str] = []
|
|
|
|
for key in keys:
|
|
|
|
sources += scope.get_files(key, use_vpath=True)
|
|
|
|
|
2019-05-08 15:00:48 +00:00
|
|
|
write_list(cm_fh, sources, cmake_parameter, indent, header=header, footer=footer)
|
2019-03-28 12:54:56 +00:00
|
|
|
|
|
|
|
|
2019-05-07 09:27:33 +00:00
|
|
|
def write_all_source_file_lists(cm_fh: typing.IO[str], scope: Scope, header: str, *,
|
2019-06-04 20:40:42 +00:00
|
|
|
indent: int = 0, footer: str = '',
|
2019-06-05 07:49:30 +00:00
|
|
|
extra_keys: typing.Optional[typing.List[str]] = None):
|
2019-06-04 20:40:42 +00:00
|
|
|
if extra_keys is None:
|
|
|
|
extra_keys = []
|
2019-05-07 09:27:33 +00:00
|
|
|
write_source_file_list(cm_fh, scope, header,
|
2019-06-04 20:40:42 +00:00
|
|
|
['SOURCES', 'HEADERS', 'OBJECTIVE_SOURCES', 'NO_PCH_SOURCES', 'FORMS'] + extra_keys,
|
2019-06-04 14:19:46 +00:00
|
|
|
indent, footer=footer)
|
2019-05-07 09:27:33 +00:00
|
|
|
|
|
|
|
|
2019-05-08 15:00:48 +00:00
|
|
|
def write_defines(cm_fh: typing.IO[str], scope: Scope, cmake_parameter: str, *,
|
2019-05-07 09:27:33 +00:00
|
|
|
indent: int = 0, footer: str = ''):
|
|
|
|
defines = scope.expand('DEFINES')
|
|
|
|
defines += [d[2:] for d in scope.expand('QMAKE_CXXFLAGS') if d.startswith('-D')]
|
2019-05-08 15:00:48 +00:00
|
|
|
defines = [d.replace('=\\\\\\"$$PWD/\\\\\\"',
|
|
|
|
'="${CMAKE_CURRENT_SOURCE_DIR}/"') for d in defines]
|
|
|
|
|
2019-08-12 11:38:17 +00:00
|
|
|
if 'qml_debug' in scope.get('CONFIG'):
|
|
|
|
defines.append('QT_QML_DEBUG')
|
|
|
|
|
2019-06-04 14:19:46 +00:00
|
|
|
write_list(cm_fh, defines, cmake_parameter, indent, footer=footer)
|
2019-05-08 15:00:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
def write_include_paths(cm_fh: typing.IO[str], scope: Scope, cmake_parameter: str, *,
|
2019-05-07 09:27:33 +00:00
|
|
|
indent: int = 0, footer: str = ''):
|
2019-05-08 15:00:48 +00:00
|
|
|
includes = [i.rstrip('/') or ('/') for i in scope.get_files('INCLUDEPATH')]
|
2019-05-07 09:27:33 +00:00
|
|
|
|
2019-06-04 14:19:46 +00:00
|
|
|
write_list(cm_fh, includes, cmake_parameter, indent, footer=footer)
|
2019-05-07 09:27:33 +00:00
|
|
|
|
|
|
|
|
2019-05-08 15:00:48 +00:00
|
|
|
def write_compile_options(cm_fh: typing.IO[str], scope: Scope, cmake_parameter: str, *,
|
2019-05-07 09:27:33 +00:00
|
|
|
indent: int = 0, footer: str = ''):
|
|
|
|
compile_options = [d for d in scope.expand('QMAKE_CXXFLAGS') if not d.startswith('-D')]
|
2019-05-08 15:00:48 +00:00
|
|
|
|
2019-06-04 14:19:46 +00:00
|
|
|
write_list(cm_fh, compile_options, cmake_parameter, indent, footer=footer)
|
2019-05-07 09:27:33 +00:00
|
|
|
|
2019-04-08 12:44:34 +00:00
|
|
|
|
2019-05-09 08:02:37 +00:00
|
|
|
def write_library_section(cm_fh: typing.IO[str], scope: Scope, *,
|
|
|
|
indent: int = 0, known_libraries: typing.Set[str]=set()):
|
|
|
|
(public_dependencies, private_dependencies) \
|
|
|
|
= extract_cmake_libraries(scope, known_libraries=known_libraries)
|
2019-04-08 12:44:34 +00:00
|
|
|
|
2019-05-09 08:02:37 +00:00
|
|
|
write_list(cm_fh, private_dependencies, 'LIBRARIES', indent + 1)
|
|
|
|
write_list(cm_fh, public_dependencies, 'PUBLIC_LIBRARIES', indent + 1)
|
2019-04-08 12:44:34 +00:00
|
|
|
|
|
|
|
|
2019-05-22 13:09:39 +00:00
|
|
|
def write_autogen_section(cm_fh: typing.IO[str], scope: Scope, *,
|
|
|
|
indent: int = 0):
|
|
|
|
forms = scope.get_files('FORMS')
|
|
|
|
if forms:
|
|
|
|
write_list(cm_fh, ['uic'], 'ENABLE_AUTOGEN_TOOLS', indent)
|
|
|
|
|
2019-04-08 12:44:34 +00:00
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def write_sources_section(cm_fh: typing.IO[str], scope: Scope, *,
|
2019-03-12 18:55:51 +00:00
|
|
|
indent: int = 0, known_libraries=set()):
|
2018-10-24 13:20:27 +00:00
|
|
|
ind = spaces(indent)
|
|
|
|
|
2019-03-07 10:06:23 +00:00
|
|
|
# mark RESOURCES as visited:
|
2019-03-28 12:54:56 +00:00
|
|
|
scope.get('RESOURCES')
|
2019-03-07 10:06:23 +00:00
|
|
|
|
2019-05-07 09:27:33 +00:00
|
|
|
write_all_source_file_lists(cm_fh, scope, 'SOURCES', indent=indent + 1)
|
2019-03-07 10:06:23 +00:00
|
|
|
|
2019-05-07 09:27:33 +00:00
|
|
|
write_source_file_list(cm_fh, scope, 'DBUS_ADAPTOR_SOURCES', ['DBUS_ADAPTORS',], indent + 1)
|
2019-03-28 14:31:07 +00:00
|
|
|
dbus_adaptor_flags = scope.expand('QDBUSXML2CPP_ADAPTOR_HEADER_FLAGS')
|
|
|
|
if dbus_adaptor_flags:
|
|
|
|
cm_fh.write('{} DBUS_ADAPTOR_FLAGS\n'.format(ind))
|
|
|
|
cm_fh.write('{} "{}"\n'.format(ind, '" "'.join(dbus_adaptor_flags)))
|
2019-03-18 18:13:41 +00:00
|
|
|
|
2019-05-07 09:27:33 +00:00
|
|
|
write_source_file_list(cm_fh, scope, 'DBUS_INTERFACE_SOURCES', ['DBUS_INTERFACES',], indent + 1)
|
2019-03-28 14:31:07 +00:00
|
|
|
dbus_interface_flags = scope.expand('QDBUSXML2CPP_INTERFACE_HEADER_FLAGS')
|
|
|
|
if dbus_interface_flags:
|
|
|
|
cm_fh.write('{} DBUS_INTERFACE_FLAGS\n'.format(ind))
|
|
|
|
cm_fh.write('{} "{}"\n'.format(ind, '" "'.join(dbus_interface_flags)))
|
2019-03-18 18:13:41 +00:00
|
|
|
|
2019-05-07 09:27:33 +00:00
|
|
|
write_defines(cm_fh, scope, 'DEFINES', indent=indent + 1)
|
|
|
|
|
|
|
|
write_include_paths(cm_fh, scope, 'INCLUDE_DIRECTORIES', indent=indent + 1)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-05-09 08:02:37 +00:00
|
|
|
write_library_section(cm_fh, scope, indent=indent, known_libraries=known_libraries)
|
2019-03-28 14:08:00 +00:00
|
|
|
|
2019-05-07 09:27:33 +00:00
|
|
|
write_compile_options(cm_fh, scope, 'COMPILE_OPTIONS', indent=indent + 1)
|
2019-01-29 11:07:24 +00:00
|
|
|
|
2019-05-22 13:09:39 +00:00
|
|
|
write_autogen_section(cm_fh, scope, indent=indent + 1)
|
|
|
|
|
2019-01-29 11:07:24 +00:00
|
|
|
link_options = scope.get('QMAKE_LFLAGS')
|
|
|
|
if link_options:
|
|
|
|
cm_fh.write('{} LINK_OPTIONS\n'.format(ind))
|
|
|
|
for lo in link_options:
|
|
|
|
cm_fh.write('{} "{}"\n'.format(ind, lo))
|
|
|
|
|
2019-01-30 15:43:11 +00:00
|
|
|
moc_options = scope.get('QMAKE_MOC_OPTIONS')
|
|
|
|
if moc_options:
|
|
|
|
cm_fh.write('{} MOC_OPTIONS\n'.format(ind))
|
|
|
|
for mo in moc_options:
|
|
|
|
cm_fh.write('{} "{}"\n'.format(ind, mo))
|
|
|
|
|
2019-01-17 16:11:52 +00:00
|
|
|
|
|
|
|
def is_simple_condition(condition: str) -> bool:
|
2019-01-25 14:41:02 +00:00
|
|
|
return ' ' not in condition \
|
|
|
|
or (condition.startswith('NOT ') and ' ' not in condition[4:])
|
2019-01-17 16:11:52 +00:00
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-03-12 18:55:51 +00:00
|
|
|
def write_ignored_keys(scope: Scope, indent: str) -> str:
|
2019-01-18 11:43:11 +00:00
|
|
|
result = ''
|
2019-03-12 18:55:51 +00:00
|
|
|
ignored_keys = scope.keys - scope.visited_keys
|
2019-01-18 11:43:11 +00:00
|
|
|
for k in sorted(ignored_keys):
|
2019-02-07 14:30:44 +00:00
|
|
|
if k == '_INCLUDED' or k == 'TARGET' or k == 'QMAKE_DOCS' or k == 'QT_SOURCE_TREE' \
|
2019-03-12 18:55:51 +00:00
|
|
|
or k == 'QT_BUILD_TREE' or k == 'TRACEPOINT_PROVIDER':
|
2019-01-25 14:41:02 +00:00
|
|
|
# All these keys are actually reported already
|
2019-01-18 11:43:11 +00:00
|
|
|
continue
|
|
|
|
values = scope.get(k)
|
2019-01-25 14:41:02 +00:00
|
|
|
value_string = '<EMPTY>' if not values \
|
|
|
|
else '"' + '" "'.join(scope.get(k)) + '"'
|
2019-01-18 11:43:11 +00:00
|
|
|
result += '{}# {} = {}\n'.format(indent, k, value_string)
|
2019-03-12 18:55:51 +00:00
|
|
|
|
|
|
|
if result:
|
|
|
|
result = '\n#### Keys ignored in scope {}:\n{}'.format(scope, result)
|
|
|
|
|
2019-01-18 11:43:11 +00:00
|
|
|
return result
|
|
|
|
|
|
|
|
|
2019-01-23 15:40:23 +00:00
|
|
|
def _iterate_expr_tree(expr, op, matches):
|
|
|
|
assert expr.func == op
|
|
|
|
keepers = ()
|
|
|
|
for arg in expr.args:
|
|
|
|
if arg in matches:
|
|
|
|
matches = tuple(x for x in matches if x != arg)
|
|
|
|
elif arg == op:
|
|
|
|
(matches, extra_keepers) = _iterate_expr_tree(arg, op, matches)
|
|
|
|
keepers = (*keepers, *extra_keepers)
|
|
|
|
else:
|
|
|
|
keepers = (*keepers, arg)
|
2019-02-12 23:15:15 +00:00
|
|
|
return matches, keepers
|
2019-01-23 15:40:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _simplify_expressions(expr, op, matches, replacement):
|
2019-02-12 23:15:15 +00:00
|
|
|
for arg in expr.args:
|
2019-01-23 15:40:23 +00:00
|
|
|
expr = expr.subs(arg, _simplify_expressions(arg, op, matches,
|
|
|
|
replacement))
|
|
|
|
|
|
|
|
if expr.func == op:
|
|
|
|
(to_match, keepers) = tuple(_iterate_expr_tree(expr, op, matches))
|
|
|
|
if len(to_match) == 0:
|
|
|
|
# build expression with keepers and replacement:
|
|
|
|
if keepers:
|
|
|
|
start = replacement
|
|
|
|
current_expr = None
|
|
|
|
last_expr = keepers[-1]
|
|
|
|
for repl_arg in keepers[:-1]:
|
|
|
|
current_expr = op(start, repl_arg)
|
|
|
|
start = current_expr
|
|
|
|
top_expr = op(start, last_expr)
|
|
|
|
else:
|
|
|
|
top_expr = replacement
|
|
|
|
|
|
|
|
expr = expr.subs(expr, top_expr)
|
|
|
|
|
|
|
|
return expr
|
|
|
|
|
|
|
|
|
|
|
|
def _simplify_flavors_in_condition(base: str, flavors, expr):
|
|
|
|
''' Simplify conditions based on the knownledge of which flavors
|
|
|
|
belong to which OS. '''
|
|
|
|
base_expr = simplify_logic(base)
|
|
|
|
false_expr = simplify_logic('false')
|
|
|
|
for flavor in flavors:
|
|
|
|
flavor_expr = simplify_logic(flavor)
|
|
|
|
expr = _simplify_expressions(expr, And, (base_expr, flavor_expr,),
|
|
|
|
flavor_expr)
|
|
|
|
expr = _simplify_expressions(expr, Or, (base_expr, flavor_expr),
|
|
|
|
base_expr)
|
|
|
|
expr = _simplify_expressions(expr, And, (Not(base_expr), flavor_expr,),
|
|
|
|
false_expr)
|
|
|
|
return expr
|
|
|
|
|
|
|
|
|
2019-02-12 23:15:15 +00:00
|
|
|
def _simplify_os_families(expr, family_members, other_family_members):
|
|
|
|
for family in family_members:
|
|
|
|
for other in other_family_members:
|
|
|
|
if other in family_members:
|
|
|
|
continue # skip those in the sub-family
|
|
|
|
|
|
|
|
f_expr = simplify_logic(family)
|
|
|
|
o_expr = simplify_logic(other)
|
|
|
|
|
|
|
|
expr = _simplify_expressions(expr, And, (f_expr, Not(o_expr)), f_expr)
|
|
|
|
expr = _simplify_expressions(expr, And, (Not(f_expr), o_expr), o_expr)
|
|
|
|
expr = _simplify_expressions(expr, And, (f_expr, o_expr), simplify_logic('false'))
|
|
|
|
return expr
|
|
|
|
|
|
|
|
|
2019-01-23 15:40:23 +00:00
|
|
|
def _recursive_simplify(expr):
|
|
|
|
''' Simplify the expression as much as possible based on
|
|
|
|
domain knowledge. '''
|
|
|
|
input_expr = expr
|
|
|
|
|
|
|
|
# Simplify even further, based on domain knowledge:
|
2019-02-12 23:15:15 +00:00
|
|
|
windowses = ('WIN32', 'WINRT')
|
2019-01-23 15:40:23 +00:00
|
|
|
apples = ('APPLE_OSX', 'APPLE_UIKIT', 'APPLE_IOS',
|
|
|
|
'APPLE_TVOS', 'APPLE_WATCHOS',)
|
2019-04-02 12:13:27 +00:00
|
|
|
bsds = ('FREEBSD', 'OPENBSD', 'NETBSD',)
|
2019-02-12 23:15:15 +00:00
|
|
|
androids = ('ANDROID', 'ANDROID_EMBEDDED')
|
2019-01-23 15:40:23 +00:00
|
|
|
unixes = ('APPLE', *apples, 'BSD', *bsds, 'LINUX',
|
2019-02-12 23:15:15 +00:00
|
|
|
*androids, 'HAIKU',
|
2019-01-23 15:40:23 +00:00
|
|
|
'INTEGRITY', 'VXWORKS', 'QNX', 'WASM')
|
|
|
|
|
|
|
|
unix_expr = simplify_logic('UNIX')
|
|
|
|
win_expr = simplify_logic('WIN32')
|
|
|
|
false_expr = simplify_logic('false')
|
|
|
|
true_expr = simplify_logic('true')
|
|
|
|
|
|
|
|
expr = expr.subs(Not(unix_expr), win_expr) # NOT UNIX -> WIN32
|
|
|
|
expr = expr.subs(Not(win_expr), unix_expr) # NOT WIN32 -> UNIX
|
|
|
|
|
|
|
|
# UNIX [OR foo ]OR WIN32 -> ON [OR foo]
|
|
|
|
expr = _simplify_expressions(expr, Or, (unix_expr, win_expr,), true_expr)
|
|
|
|
# UNIX [AND foo ]AND WIN32 -> OFF [AND foo]
|
|
|
|
expr = _simplify_expressions(expr, And, (unix_expr, win_expr,), false_expr)
|
|
|
|
|
|
|
|
expr = _simplify_flavors_in_condition('WIN32', ('WINRT',), expr)
|
|
|
|
expr = _simplify_flavors_in_condition('APPLE', apples, expr)
|
|
|
|
expr = _simplify_flavors_in_condition('BSD', bsds, expr)
|
|
|
|
expr = _simplify_flavors_in_condition('UNIX', unixes, expr)
|
2019-02-11 16:52:39 +00:00
|
|
|
expr = _simplify_flavors_in_condition('ANDROID', ('ANDROID_EMBEDDED',), expr)
|
2019-01-23 15:40:23 +00:00
|
|
|
|
2019-02-12 23:15:15 +00:00
|
|
|
# Simplify families of OSes against other families:
|
|
|
|
expr = _simplify_os_families(expr, ('WIN32', 'WINRT'), unixes)
|
|
|
|
expr = _simplify_os_families(expr, androids, unixes)
|
|
|
|
expr = _simplify_os_families(expr, ('BSD', *bsds), unixes)
|
|
|
|
|
|
|
|
for family in ('HAIKU', 'QNX', 'INTEGRITY', 'LINUX', 'VXWORKS'):
|
|
|
|
expr = _simplify_os_families(expr, (family,), unixes)
|
|
|
|
|
2019-01-23 15:40:23 +00:00
|
|
|
# Now simplify further:
|
|
|
|
expr = simplify_logic(expr)
|
|
|
|
|
|
|
|
while expr != input_expr:
|
|
|
|
input_expr = expr
|
|
|
|
expr = _recursive_simplify(expr)
|
|
|
|
|
|
|
|
return expr
|
|
|
|
|
|
|
|
|
|
|
|
def simplify_condition(condition: str) -> str:
|
|
|
|
input_condition = condition.strip()
|
|
|
|
|
|
|
|
# Map to sympy syntax:
|
|
|
|
condition = ' ' + input_condition + ' '
|
|
|
|
condition = condition.replace('(', ' ( ')
|
|
|
|
condition = condition.replace(')', ' ) ')
|
|
|
|
|
|
|
|
tmp = ''
|
|
|
|
while tmp != condition:
|
|
|
|
tmp = condition
|
|
|
|
|
|
|
|
condition = condition.replace(' NOT ', ' ~ ')
|
|
|
|
condition = condition.replace(' AND ', ' & ')
|
|
|
|
condition = condition.replace(' OR ', ' | ')
|
2019-02-11 17:02:22 +00:00
|
|
|
condition = condition.replace(' ON ', ' true ')
|
|
|
|
condition = condition.replace(' OFF ', ' false ')
|
2019-08-27 17:15:36 +00:00
|
|
|
# Replace dashes with a token
|
|
|
|
condition = condition.replace('-', '_dash_')
|
2019-01-23 15:40:23 +00:00
|
|
|
|
2019-08-01 15:30:26 +00:00
|
|
|
# SymPy chokes on expressions that contain two tokens one next to
|
|
|
|
# the other delimited by a space, which are not an operation.
|
|
|
|
# So a CMake condition like "TARGET Foo::Bar" fails the whole
|
|
|
|
# expression simplifying process.
|
|
|
|
# Turn these conditions into a single token so that SymPy can parse
|
|
|
|
# the expression, and thus simplify it.
|
|
|
|
# Do this by replacing and keeping a map of conditions to single
|
|
|
|
# token symbols.
|
2019-08-27 17:15:36 +00:00
|
|
|
# Support both target names without double colons, and with double
|
|
|
|
# colons.
|
|
|
|
pattern = re.compile(r'(TARGET [a-zA-Z]+(?:::[a-zA-Z]+)?)')
|
2019-08-01 15:30:26 +00:00
|
|
|
target_symbol_mapping = {}
|
|
|
|
all_target_conditions = re.findall(pattern, condition)
|
|
|
|
for target_condition in all_target_conditions:
|
|
|
|
# Replace spaces and colons with underscores.
|
|
|
|
target_condition_symbol_name = re.sub('[ :]', '_', target_condition)
|
|
|
|
target_symbol_mapping[target_condition_symbol_name] = target_condition
|
|
|
|
condition = re.sub(target_condition, target_condition_symbol_name, condition)
|
|
|
|
|
2019-01-23 15:40:23 +00:00
|
|
|
try:
|
|
|
|
# Generate and simplify condition using sympy:
|
|
|
|
condition_expr = simplify_logic(condition)
|
|
|
|
condition = str(_recursive_simplify(condition_expr))
|
|
|
|
|
2019-08-01 15:30:26 +00:00
|
|
|
# Restore the target conditions.
|
|
|
|
for symbol_name in target_symbol_mapping:
|
|
|
|
condition = re.sub(symbol_name, target_symbol_mapping[symbol_name], condition)
|
|
|
|
|
2019-01-23 15:40:23 +00:00
|
|
|
# Map back to CMake syntax:
|
|
|
|
condition = condition.replace('~', 'NOT ')
|
|
|
|
condition = condition.replace('&', 'AND')
|
|
|
|
condition = condition.replace('|', 'OR')
|
|
|
|
condition = condition.replace('True', 'ON')
|
|
|
|
condition = condition.replace('False', 'OFF')
|
2019-08-27 17:15:36 +00:00
|
|
|
condition = condition.replace('_dash_', '-')
|
2019-01-23 15:40:23 +00:00
|
|
|
except:
|
|
|
|
# sympy did not like our input, so leave this condition alone:
|
|
|
|
condition = input_condition
|
|
|
|
|
2019-02-11 17:02:22 +00:00
|
|
|
return condition or 'ON'
|
2019-01-23 15:40:23 +00:00
|
|
|
|
|
|
|
|
2019-01-22 13:20:47 +00:00
|
|
|
def recursive_evaluate_scope(scope: Scope, parent_condition: str = '',
|
|
|
|
previous_condition: str = '') -> str:
|
2019-01-29 09:18:21 +00:00
|
|
|
current_condition = scope.condition
|
2019-01-22 13:23:59 +00:00
|
|
|
total_condition = current_condition
|
2018-10-24 13:20:27 +00:00
|
|
|
if total_condition == 'else':
|
2019-01-17 16:11:52 +00:00
|
|
|
assert previous_condition, \
|
2019-01-29 09:18:21 +00:00
|
|
|
"Else branch without previous condition in: %s" % scope.file
|
2019-02-11 16:53:54 +00:00
|
|
|
total_condition = 'NOT ({})'.format(previous_condition)
|
2018-10-24 13:20:27 +00:00
|
|
|
if parent_condition:
|
2019-01-17 16:11:52 +00:00
|
|
|
if not total_condition:
|
|
|
|
total_condition = parent_condition
|
|
|
|
else:
|
2019-02-11 16:53:54 +00:00
|
|
|
total_condition = '({}) AND ({})'.format(parent_condition,
|
2019-01-17 16:11:52 +00:00
|
|
|
total_condition)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-01-29 09:18:21 +00:00
|
|
|
scope.total_condition = simplify_condition(total_condition)
|
2019-01-22 13:20:47 +00:00
|
|
|
|
|
|
|
prev_condition = ''
|
2019-01-29 09:18:21 +00:00
|
|
|
for c in scope.children:
|
2019-01-22 13:20:47 +00:00
|
|
|
prev_condition = recursive_evaluate_scope(c, total_condition,
|
|
|
|
prev_condition)
|
|
|
|
|
2019-01-22 13:23:59 +00:00
|
|
|
return current_condition
|
2019-01-22 13:20:47 +00:00
|
|
|
|
|
|
|
|
2019-05-08 14:45:25 +00:00
|
|
|
def map_to_cmake_condition(condition: typing.Optional[str]) -> str:
|
2019-06-05 11:39:41 +00:00
|
|
|
condition = condition.replace("QTDIR_build", "QT_BUILDING_QT")
|
2019-03-01 14:00:19 +00:00
|
|
|
condition = re.sub(r'\bQT_ARCH___equals___([a-zA-Z_0-9]*)',
|
2019-05-08 14:45:25 +00:00
|
|
|
r'(TEST_architecture_arch STREQUAL "\1")', condition or '')
|
2019-03-01 14:00:19 +00:00
|
|
|
condition = re.sub(r'\bQT_ARCH___contains___([a-zA-Z_0-9]*)',
|
2019-05-08 14:45:25 +00:00
|
|
|
r'(TEST_architecture_arch STREQUAL "\1")', condition or '')
|
2019-03-01 14:00:19 +00:00
|
|
|
return condition
|
|
|
|
|
|
|
|
|
2019-08-21 13:47:38 +00:00
|
|
|
def write_resources(cm_fh: typing.IO[str], target: str, scope: Scope, indent: int = 0, is_example = False):
|
2019-03-07 10:06:23 +00:00
|
|
|
vpath = scope.expand('VPATH')
|
|
|
|
|
|
|
|
# Handle QRC files by turning them into add_qt_resource:
|
2019-03-28 12:54:56 +00:00
|
|
|
resources = scope.get_files('RESOURCES')
|
2019-08-09 09:01:41 +00:00
|
|
|
qtquickcompiler_skipped = scope.get_files('QTQUICK_COMPILER_SKIPPED_RESOURCES')
|
2019-08-12 14:31:30 +00:00
|
|
|
qtquickcompiler_retained = scope.get_files('QTQUICK_COMPILER_RETAINED_RESOURCES')
|
2019-03-07 10:06:23 +00:00
|
|
|
qrc_output = ''
|
|
|
|
if resources:
|
2019-08-09 12:15:42 +00:00
|
|
|
standalone_files: typing.List[str] = []
|
2019-03-07 10:06:23 +00:00
|
|
|
for r in resources:
|
2019-08-09 12:15:42 +00:00
|
|
|
skip_qtquick_compiler = r in qtquickcompiler_skipped
|
2019-08-12 14:31:30 +00:00
|
|
|
retain_qtquick_compiler = r in qtquickcompiler_retained
|
2019-03-07 10:06:23 +00:00
|
|
|
if r.endswith('.qrc'):
|
2019-08-12 14:31:30 +00:00
|
|
|
qrc_output += process_qrc_file(target, r, scope.basedir, scope.file_absolute_path,
|
2019-08-21 13:47:38 +00:00
|
|
|
skip_qtquick_compiler, retain_qtquick_compiler, is_example)
|
2019-03-07 10:06:23 +00:00
|
|
|
else:
|
2019-08-09 12:15:42 +00:00
|
|
|
immediate_files = {f:"" for f in scope.get_files(r + ".files")}
|
|
|
|
if immediate_files:
|
|
|
|
immediate_prefix = scope.get(r + ".prefix")
|
|
|
|
if immediate_prefix:
|
|
|
|
immediate_prefix = immediate_prefix[0]
|
|
|
|
else:
|
|
|
|
immediate_prefix = "/"
|
|
|
|
immediate_base = scope.get(r + ".base")
|
|
|
|
immediate_lang = None
|
|
|
|
immediate_name = "qmake_" + r
|
2019-08-12 14:31:30 +00:00
|
|
|
qrc_output += write_add_qt_resource_call(target, immediate_name, immediate_prefix, immediate_base, immediate_lang,
|
2019-08-21 13:47:38 +00:00
|
|
|
immediate_files, skip_qtquick_compiler, retain_qtquick_compiler, is_example)
|
2019-08-09 12:15:42 +00:00
|
|
|
else:
|
2019-08-12 14:31:30 +00:00
|
|
|
# stadalone source file properties need to be set as they
|
|
|
|
# are parsed.
|
|
|
|
if skip_qtquick_compiler:
|
|
|
|
output += 'set_source_files_properties("{}" PROPERTIES QT_SKIP_QUICKCOMPILER 1)\n\n'.format(r)
|
|
|
|
|
|
|
|
if retain_qtquick_compiler:
|
|
|
|
output += 'set_source_files_properties("{}" PROPERTIES QT_RETAIN_QUICKCOMPILER 1)\n\n'.format(r)
|
2019-08-09 12:15:42 +00:00
|
|
|
standalone_files.append(r)
|
|
|
|
|
|
|
|
if standalone_files:
|
|
|
|
name = "qmake_immediate"
|
|
|
|
prefix = "/"
|
|
|
|
base = None
|
|
|
|
lang = None
|
|
|
|
files = {f:"" for f in standalone_files}
|
|
|
|
skip_qtquick_compiler = False
|
2019-08-12 14:31:30 +00:00
|
|
|
qrc_output += write_add_qt_resource_call(target, name, prefix, base, lang, files,
|
2019-08-21 13:47:38 +00:00
|
|
|
skip_qtquick_compiler = False, retain_qtquick_compiler = False,
|
|
|
|
is_example = is_example)
|
2019-03-07 10:06:23 +00:00
|
|
|
|
|
|
|
|
|
|
|
if qrc_output:
|
|
|
|
cm_fh.write('\n# Resources:\n')
|
|
|
|
for line in qrc_output.split('\n'):
|
|
|
|
cm_fh.write(' ' * indent + line + '\n')
|
|
|
|
|
|
|
|
|
2019-01-22 13:20:47 +00:00
|
|
|
def write_extend_target(cm_fh: typing.IO[str], target: str,
|
|
|
|
scope: Scope, indent: int = 0):
|
2019-03-12 18:55:51 +00:00
|
|
|
ind = spaces(indent)
|
2018-10-24 13:20:27 +00:00
|
|
|
extend_qt_io_string = io.StringIO()
|
2019-03-12 18:55:51 +00:00
|
|
|
write_sources_section(extend_qt_io_string, scope)
|
2018-10-24 13:20:27 +00:00
|
|
|
extend_qt_string = extend_qt_io_string.getvalue()
|
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
extend_scope = '\n{}extend_target({} CONDITION {}\n' \
|
2019-03-12 18:55:51 +00:00
|
|
|
'{}{})\n'.format(ind, target,
|
2019-03-01 14:00:19 +00:00
|
|
|
map_to_cmake_condition(scope.total_condition),
|
2019-03-12 18:55:51 +00:00
|
|
|
extend_qt_string, ind)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
if not extend_qt_string:
|
2019-03-12 18:55:51 +00:00
|
|
|
extend_scope = '' # Nothing to report, so don't!
|
2019-01-18 11:43:11 +00:00
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
cm_fh.write(extend_scope)
|
|
|
|
|
2019-03-07 10:06:23 +00:00
|
|
|
write_resources(cm_fh, target, scope, indent)
|
|
|
|
|
2019-01-22 13:20:47 +00:00
|
|
|
def flatten_scopes(scope: Scope) -> typing.List[Scope]:
|
2019-01-24 15:01:17 +00:00
|
|
|
result = [scope] # type: typing.List[Scope]
|
2019-01-29 09:18:21 +00:00
|
|
|
for c in scope.children:
|
2019-01-22 13:20:47 +00:00
|
|
|
result += flatten_scopes(c)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def merge_scopes(scopes: typing.List[Scope]) -> typing.List[Scope]:
|
|
|
|
result = [] # type: typing.List[Scope]
|
|
|
|
|
2019-01-24 15:01:17 +00:00
|
|
|
# Merge scopes with their parents:
|
|
|
|
known_scopes = {} # type: typing.Mapping[str, Scope]
|
2019-01-22 13:20:47 +00:00
|
|
|
for scope in scopes:
|
2019-01-29 09:18:21 +00:00
|
|
|
total_condition = scope.total_condition
|
2019-05-08 14:45:25 +00:00
|
|
|
assert total_condition
|
2019-01-24 15:01:17 +00:00
|
|
|
if total_condition == 'OFF':
|
|
|
|
# ignore this scope entirely!
|
|
|
|
pass
|
|
|
|
elif total_condition in known_scopes:
|
|
|
|
known_scopes[total_condition].merge(scope)
|
|
|
|
else:
|
|
|
|
# Keep everything else:
|
|
|
|
result.append(scope)
|
|
|
|
known_scopes[total_condition] = scope
|
2019-01-22 13:20:47 +00:00
|
|
|
|
|
|
|
return result
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2019-03-11 14:09:33 +00:00
|
|
|
def write_simd_part(cm_fh: typing.IO[str], target: str, scope: Scope, indent: int = 0):
|
|
|
|
simd_options = [ 'sse2', 'sse3', 'ssse3', 'sse4_1', 'sse4_2', 'aesni', 'shani', 'avx', 'avx2',
|
|
|
|
'avx512f', 'avx512cd', 'avx512er', 'avx512pf', 'avx512dq', 'avx512bw',
|
|
|
|
'avx512vl', 'avx512ifma', 'avx512vbmi', 'f16c', 'rdrnd', 'neon', 'mips_dsp',
|
|
|
|
'mips_dspr2',
|
|
|
|
'arch_haswell', 'avx512common', 'avx512core'];
|
|
|
|
for simd in simd_options:
|
|
|
|
SIMD = simd.upper();
|
2019-03-28 12:54:56 +00:00
|
|
|
write_source_file_list(cm_fh, scope, 'SOURCES',
|
2019-05-08 15:00:48 +00:00
|
|
|
['{}_HEADERS'.format(SIMD),
|
|
|
|
'{}_SOURCES'.format(SIMD),
|
|
|
|
'{}_C_SOURCES'.format(SIMD),
|
|
|
|
'{}_ASM'.format(SIMD)],
|
|
|
|
indent,
|
|
|
|
header = 'add_qt_simd_part({} SIMD {}\n'.format(target, simd),
|
|
|
|
footer = ')\n\n')
|
2019-03-11 14:09:33 +00:00
|
|
|
|
2019-06-24 12:54:40 +00:00
|
|
|
def write_android_part(cm_fh: typing.IO[str], target: str, scope:Scope, indent: int = 0):
|
|
|
|
keys = [ 'ANDROID_BUNDLED_JAR_DEPENDENCIES'
|
|
|
|
, 'ANDROID_LIB_DEPENDENCIES'
|
|
|
|
, 'ANDROID_JAR_DEPENDENCIES'
|
|
|
|
, 'ANDROID_LIB_DEPENDENCY_REPLACEMENTS'
|
|
|
|
, 'ANDROID_BUNDLED_FILES'
|
|
|
|
, 'ANDROID_PERMISSIONS' ]
|
|
|
|
|
|
|
|
has_no_values = True
|
|
|
|
for key in keys:
|
|
|
|
value = scope.get(key)
|
|
|
|
if len(value) != 0:
|
|
|
|
if has_no_values:
|
|
|
|
if scope.condition:
|
|
|
|
cm_fh.write('\n{}if(ANDROID AND ({}))\n'.format(spaces(indent), scope.condition))
|
|
|
|
else:
|
|
|
|
cm_fh.write('\n{}if(ANDROID)\n'.format(spaces(indent)))
|
|
|
|
indent += 1
|
|
|
|
has_no_values = False
|
|
|
|
cm_fh.write('{}set_property(TARGET {} APPEND PROPERTY QT_{}\n'.format(spaces(indent), target, key))
|
|
|
|
write_list(cm_fh, value, '', indent + 1)
|
|
|
|
cm_fh.write('{})\n'.format(spaces(indent)))
|
|
|
|
indent -= 1
|
|
|
|
|
|
|
|
if not has_no_values:
|
|
|
|
cm_fh.write('{}endif()\n'.format(spaces(indent)))
|
2019-03-11 14:09:33 +00:00
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def write_main_part(cm_fh: typing.IO[str], name: str, typename: str,
|
2018-10-24 13:20:27 +00:00
|
|
|
cmake_function: str, scope: Scope, *,
|
|
|
|
extra_lines: typing.List[str] = [],
|
2019-03-18 18:16:40 +00:00
|
|
|
indent: int = 0, extra_keys: typing.List[str],
|
|
|
|
**kwargs: typing.Any):
|
2019-01-24 15:01:17 +00:00
|
|
|
# Evaluate total condition of all scopes:
|
|
|
|
recursive_evaluate_scope(scope)
|
|
|
|
|
2019-08-01 13:43:36 +00:00
|
|
|
is_qml_plugin = any('qml_plugin' == s for s in scope.get('_LOADED'))
|
|
|
|
|
2019-06-04 13:55:41 +00:00
|
|
|
if 'exceptions' in scope.get('CONFIG'):
|
|
|
|
extra_lines.append('EXCEPTIONS')
|
|
|
|
|
2019-01-24 15:01:17 +00:00
|
|
|
# Get a flat list of all scopes but the main one:
|
|
|
|
scopes = flatten_scopes(scope)
|
|
|
|
total_scopes = len(scopes)
|
|
|
|
# Merge scopes based on their conditions:
|
|
|
|
scopes = merge_scopes(scopes)
|
|
|
|
|
|
|
|
assert len(scopes)
|
2019-01-29 09:18:21 +00:00
|
|
|
assert scopes[0].total_condition == 'ON'
|
2019-01-24 15:01:17 +00:00
|
|
|
|
2019-03-12 18:55:51 +00:00
|
|
|
scopes[0].reset_visited_keys()
|
2019-03-18 18:16:40 +00:00
|
|
|
for k in extra_keys:
|
|
|
|
scopes[0].get(k)
|
2019-03-12 18:55:51 +00:00
|
|
|
|
2019-01-24 15:01:17 +00:00
|
|
|
# Now write out the scopes:
|
2018-10-24 13:20:27 +00:00
|
|
|
write_header(cm_fh, name, typename, indent=indent)
|
|
|
|
|
2019-07-12 14:54:26 +00:00
|
|
|
# collect all testdata and insert globbing commands
|
|
|
|
has_test_data = False
|
|
|
|
if typename == 'Test':
|
|
|
|
test_data = scope.expand('TESTDATA')
|
|
|
|
if test_data:
|
|
|
|
has_test_data = True
|
|
|
|
cm_fh.write('# Collect test data\n')
|
|
|
|
for data in test_data:
|
2019-07-26 16:15:41 +00:00
|
|
|
if '*' in data:
|
|
|
|
cm_fh.write(dedent("""
|
2019-08-26 08:11:27 +00:00
|
|
|
{indent}file(GLOB_RECURSE test_data_glob
|
2019-07-26 16:15:41 +00:00
|
|
|
{indent1}RELATIVE ${{CMAKE_CURRENT_SOURCE_DIR}}
|
|
|
|
{indent1}"{}")
|
|
|
|
""").format(
|
|
|
|
data,
|
|
|
|
indent=spaces(indent),
|
|
|
|
indent1=spaces(indent + 1)
|
|
|
|
))
|
2019-07-12 14:54:26 +00:00
|
|
|
cm_fh.write('{}list(APPEND test_data ${{test_data_glob}})\n'.format(spaces(indent)))
|
|
|
|
else:
|
|
|
|
cm_fh.write('{}list(APPEND test_data "{}")\n'.format(spaces(indent), data))
|
|
|
|
cm_fh.write('\n')
|
|
|
|
|
2019-08-07 12:45:17 +00:00
|
|
|
# Check for DESTDIR override
|
|
|
|
destdir = scope.get_string('DESTDIR')
|
|
|
|
if destdir:
|
2019-08-09 13:20:19 +00:00
|
|
|
if destdir.startswith('./') or destdir.startswith('../'):
|
|
|
|
destdir = '${CMAKE_CURRENT_BINARY_DIR}/' + destdir
|
2019-08-07 12:45:17 +00:00
|
|
|
extra_lines.append('OUTPUT_DIRECTORY "{}"'.format(destdir))
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
cm_fh.write('{}{}({}\n'.format(spaces(indent), cmake_function, name))
|
|
|
|
for extra_line in extra_lines:
|
|
|
|
cm_fh.write('{} {}\n'.format(spaces(indent), extra_line))
|
|
|
|
|
2019-03-12 18:55:51 +00:00
|
|
|
write_sources_section(cm_fh, scopes[0], indent=indent, **kwargs)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-07-12 14:54:26 +00:00
|
|
|
if has_test_data:
|
|
|
|
cm_fh.write('{} TESTDATA ${{test_data}}\n'.format(spaces(indent)))
|
2018-10-24 13:20:27 +00:00
|
|
|
# Footer:
|
|
|
|
cm_fh.write('{})\n'.format(spaces(indent)))
|
|
|
|
|
2019-03-07 10:06:23 +00:00
|
|
|
write_resources(cm_fh, name, scope, indent)
|
|
|
|
|
2019-03-11 14:09:33 +00:00
|
|
|
write_simd_part(cm_fh, name, scope, indent)
|
|
|
|
|
2019-06-24 12:54:40 +00:00
|
|
|
write_android_part(cm_fh, name, scopes[0], indent)
|
|
|
|
|
2019-08-01 13:43:36 +00:00
|
|
|
if is_qml_plugin:
|
|
|
|
write_qml_plugin_qml_files(cm_fh, name, scopes[0], indent)
|
|
|
|
|
2019-03-12 18:55:51 +00:00
|
|
|
ignored_keys_report = write_ignored_keys(scopes[0], spaces(indent))
|
|
|
|
if ignored_keys_report:
|
|
|
|
cm_fh.write(ignored_keys_report)
|
|
|
|
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
# Scopes:
|
2019-01-24 15:01:17 +00:00
|
|
|
if len(scopes) == 1:
|
2018-10-24 13:20:27 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
write_scope_header(cm_fh, indent=indent)
|
|
|
|
|
2019-01-24 15:01:17 +00:00
|
|
|
for c in scopes[1:]:
|
2019-03-12 18:55:51 +00:00
|
|
|
c.reset_visited_keys()
|
2019-06-24 12:54:40 +00:00
|
|
|
write_android_part(cm_fh, name, c, indent=indent)
|
2019-01-22 13:20:47 +00:00
|
|
|
write_extend_target(cm_fh, name, c, indent=indent)
|
2019-03-12 18:55:51 +00:00
|
|
|
ignored_keys_report = write_ignored_keys(c, spaces(indent))
|
|
|
|
if ignored_keys_report:
|
|
|
|
cm_fh.write(ignored_keys_report)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def write_module(cm_fh: typing.IO[str], scope: Scope, *,
|
|
|
|
indent: int = 0) -> None:
|
2019-01-29 09:18:21 +00:00
|
|
|
module_name = scope.TARGET
|
2019-02-13 12:04:45 +00:00
|
|
|
if not module_name.startswith('Qt'):
|
|
|
|
print('XXXXXX Module name {} does not start with Qt!'.format(module_name))
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
extra = []
|
2019-06-07 16:13:53 +00:00
|
|
|
|
|
|
|
# A module should be static when 'static' is in CONFIG
|
|
|
|
# or when option(host_build) is used, as described in qt_module.prf.
|
|
|
|
is_static = 'static' in scope.get('CONFIG') or 'host_build' in scope.get('_OPTION')
|
|
|
|
|
|
|
|
if is_static:
|
2018-10-24 13:20:27 +00:00
|
|
|
extra.append('STATIC')
|
2019-06-05 11:49:08 +00:00
|
|
|
if 'internal_module' in scope.get('CONFIG'):
|
|
|
|
extra.append('INTERNAL_MODULE')
|
2018-10-24 13:20:27 +00:00
|
|
|
if 'no_module_headers' in scope.get('CONFIG'):
|
|
|
|
extra.append('NO_MODULE_HEADERS')
|
2019-06-07 08:51:55 +00:00
|
|
|
if 'minimal_syncqt' in scope.get('CONFIG'):
|
|
|
|
extra.append('NO_SYNC_QT')
|
2019-08-19 13:07:22 +00:00
|
|
|
if 'no_private_module' in scope.get('CONFIG'):
|
|
|
|
extra.append('NO_PRIVATE_MODULE')
|
|
|
|
if 'header_module' in scope.get('CONFIG'):
|
|
|
|
extra.append('HEADER_MODULE')
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-05-29 14:56:49 +00:00
|
|
|
module_config = scope.get("MODULE_CONFIG")
|
|
|
|
if len(module_config):
|
|
|
|
extra.append('QMAKE_MODULE_CONFIG {}'.format(" ".join(module_config)))
|
|
|
|
|
2019-05-03 14:03:15 +00:00
|
|
|
module_plugin_types = scope.get_files('MODULE_PLUGIN_TYPES')
|
|
|
|
if module_plugin_types:
|
|
|
|
extra.append('PLUGIN_TYPES {}'.format(" ".join(module_plugin_types)))
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
write_main_part(cm_fh, module_name[2:], 'Module', 'add_qt_module', scope,
|
2018-12-21 11:13:38 +00:00
|
|
|
extra_lines=extra, indent=indent,
|
2019-05-17 15:41:59 +00:00
|
|
|
known_libraries={}, extra_keys=[])
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
if 'qt_tracepoints' in scope.get('CONFIG'):
|
2019-03-28 12:54:56 +00:00
|
|
|
tracepoints = scope.get_files('TRACEPOINT_PROVIDER')
|
2018-12-21 11:13:38 +00:00
|
|
|
cm_fh.write('\n\n{}qt_create_tracepoints({} {})\n'
|
2019-03-28 12:54:56 +00:00
|
|
|
.format(spaces(indent), module_name[2:], ' '.join(tracepoints)))
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def write_tool(cm_fh: typing.IO[str], scope: Scope, *,
|
|
|
|
indent: int = 0) -> None:
|
2019-01-29 09:18:21 +00:00
|
|
|
tool_name = scope.TARGET
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-04-16 14:32:08 +00:00
|
|
|
extra = ['BOOTSTRAP'] if 'force_bootstrap' in scope.get('CONFIG') else []
|
2019-03-01 12:32:44 +00:00
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
write_main_part(cm_fh, tool_name, 'Tool', 'add_qt_tool', scope,
|
2019-03-01 12:32:44 +00:00
|
|
|
indent=indent, known_libraries={'Qt::Core', },
|
2019-03-18 18:16:40 +00:00
|
|
|
extra_lines=extra, extra_keys=['CONFIG'])
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2019-07-22 10:55:48 +00:00
|
|
|
def write_test(cm_fh: typing.IO[str], scope: Scope,
|
|
|
|
gui: bool = False, *, indent: int = 0) -> None:
|
2019-01-29 09:18:21 +00:00
|
|
|
test_name = scope.TARGET
|
2018-10-24 13:20:27 +00:00
|
|
|
assert test_name
|
|
|
|
|
2019-07-22 10:55:48 +00:00
|
|
|
extra = ['GUI',] if gui else []
|
|
|
|
libraries={'Qt::Core', 'Qt::Test'}
|
|
|
|
|
|
|
|
if 'qmltestcase' in scope.get('CONFIG'):
|
|
|
|
libraries.add('Qt::QmlTest')
|
|
|
|
extra.append('QMLTEST')
|
|
|
|
importpath = scope.get('IMPORTPATH')
|
|
|
|
if importpath:
|
|
|
|
qml_importpath = scope.expandString(importpath)
|
|
|
|
if qml_importpath:
|
|
|
|
extra.append('QML_IMPORTPATH "{}"'.format(qml_importpath))
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
write_main_part(cm_fh, test_name, 'Test', 'add_qt_test', scope,
|
2019-07-22 10:55:48 +00:00
|
|
|
indent=indent, known_libraries=libraries,
|
|
|
|
extra_lines=extra, extra_keys=[])
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def write_binary(cm_fh: typing.IO[str], scope: Scope,
|
|
|
|
gui: bool = False, *, indent: int = 0) -> None:
|
2019-01-29 09:18:21 +00:00
|
|
|
binary_name = scope.TARGET
|
2018-10-24 13:20:27 +00:00
|
|
|
assert binary_name
|
|
|
|
|
2019-08-12 13:51:17 +00:00
|
|
|
is_qt_test_helper = 'qt_test_helper' in scope.get('_LOADED')
|
|
|
|
|
|
|
|
extra = ['GUI'] if gui and not is_qt_test_helper else []
|
|
|
|
cmake_function_call = 'add_qt_executable'
|
|
|
|
|
|
|
|
if is_qt_test_helper:
|
|
|
|
binary_name += '_helper'
|
|
|
|
cmake_function_call = 'add_qt_test_helper'
|
2019-03-18 18:15:22 +00:00
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
target_path = scope.get_string('target.path')
|
2019-03-18 18:15:22 +00:00
|
|
|
if target_path:
|
|
|
|
target_path = target_path.replace('$$[QT_INSTALL_EXAMPLES]', '${INSTALL_EXAMPLESDIR}')
|
|
|
|
extra.append('OUTPUT_DIRECTORY "{}"'.format(target_path))
|
|
|
|
if 'target' in scope.get('INSTALLS'):
|
|
|
|
extra.append('INSTALL_DIRECTORY "{}"'.format(target_path))
|
|
|
|
|
2019-08-12 13:51:17 +00:00
|
|
|
write_main_part(cm_fh, binary_name, 'Binary', cmake_function_call, scope,
|
2018-12-21 11:13:38 +00:00
|
|
|
extra_lines=extra, indent=indent,
|
2019-03-18 18:16:40 +00:00
|
|
|
known_libraries={'Qt::Core', }, extra_keys=['target.path', 'INSTALLS'])
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2019-05-09 08:59:13 +00:00
|
|
|
def write_find_package_section(cm_fh: typing.IO[str],
|
|
|
|
public_libs: typing.List[str],
|
|
|
|
private_libs: typing.List[str], *, indent: int=0):
|
|
|
|
packages = [] # type: typing.List[LibraryMapping]
|
|
|
|
all_libs = public_libs + private_libs
|
|
|
|
|
|
|
|
for l in all_libs:
|
|
|
|
info = find_library_info_for_target(l)
|
|
|
|
if info and info not in packages:
|
|
|
|
packages.append(info)
|
|
|
|
|
|
|
|
ind = spaces(indent)
|
|
|
|
|
|
|
|
for p in packages:
|
|
|
|
cm_fh.write(generate_find_package_info(p, use_qt_find_package=False, indent=indent))
|
|
|
|
|
|
|
|
if packages:
|
|
|
|
cm_fh.write('\n')
|
|
|
|
|
|
|
|
|
2019-05-07 09:27:33 +00:00
|
|
|
def write_example(cm_fh: typing.IO[str], scope: Scope,
|
|
|
|
gui: bool = False, *, indent: int = 0) -> None:
|
|
|
|
binary_name = scope.TARGET
|
|
|
|
assert binary_name
|
|
|
|
|
|
|
|
cm_fh.write('cmake_minimum_required(VERSION 3.14)\n' +
|
2019-05-09 08:59:13 +00:00
|
|
|
'project({} LANGUAGES CXX)\n\n'.format(binary_name) +
|
2019-05-07 09:27:33 +00:00
|
|
|
'set(CMAKE_INCLUDE_CURRENT_DIR ON)\n\n' +
|
|
|
|
'set(CMAKE_AUTOMOC ON)\n' +
|
|
|
|
'set(CMAKE_AUTORCC ON)\n' +
|
|
|
|
'set(CMAKE_AUTOUIC ON)\n\n' +
|
|
|
|
'set(INSTALL_EXAMPLEDIR "examples")\n\n')
|
|
|
|
|
2019-05-09 08:59:13 +00:00
|
|
|
(public_libs, private_libs) = extract_cmake_libraries(scope)
|
|
|
|
write_find_package_section(cm_fh, public_libs, private_libs, indent=indent)
|
|
|
|
|
2019-06-11 13:46:31 +00:00
|
|
|
add_executable = 'add_{}executable({}'.format("qt_gui_" if gui else "", binary_name);
|
2019-05-07 09:27:33 +00:00
|
|
|
|
2019-08-21 13:47:38 +00:00
|
|
|
write_all_source_file_lists(cm_fh, scope, add_executable, indent=0)
|
2019-05-07 09:27:33 +00:00
|
|
|
|
|
|
|
cm_fh.write(')\n')
|
|
|
|
|
2019-06-04 13:28:59 +00:00
|
|
|
write_include_paths(cm_fh, scope, 'target_include_directories({} PUBLIC'.format(binary_name),
|
2019-05-07 09:27:33 +00:00
|
|
|
indent=0, footer=')')
|
2019-06-04 14:19:46 +00:00
|
|
|
write_defines(cm_fh, scope, 'target_compile_definitions({} PUBLIC'.format(binary_name),
|
2019-05-07 09:27:33 +00:00
|
|
|
indent=0, footer=')')
|
2019-05-09 09:07:02 +00:00
|
|
|
write_list(cm_fh, private_libs, '', indent=indent,
|
|
|
|
header='target_link_libraries({} PRIVATE\n'.format(binary_name), footer=')')
|
|
|
|
write_list(cm_fh, public_libs, '', indent=indent,
|
|
|
|
header='target_link_libraries({} PUBLIC\n'.format(binary_name), footer=')')
|
2019-05-07 09:27:33 +00:00
|
|
|
write_compile_options(cm_fh, scope, 'target_compile_options({}'.format(binary_name),
|
|
|
|
indent=0, footer=')')
|
|
|
|
|
2019-08-21 13:47:38 +00:00
|
|
|
write_resources(cm_fh, binary_name, scope, indent = indent, is_example = True)
|
|
|
|
|
2019-05-09 08:59:13 +00:00
|
|
|
cm_fh.write('\ninstall(TARGETS {}\n'.format(binary_name) +
|
|
|
|
' RUNTIME DESTINATION "${INSTALL_EXAMPLEDIR}"\n' +
|
|
|
|
' BUNDLE DESTINATION "${INSTALL_EXAMPLEDIR}"\n' +
|
2019-06-12 08:21:40 +00:00
|
|
|
' LIBRARY DESTINATION "${INSTALL_EXAMPLEDIR}"\n' +
|
2019-05-07 09:27:33 +00:00
|
|
|
')\n')
|
|
|
|
|
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def write_plugin(cm_fh, scope, *, indent: int = 0):
|
2019-01-29 09:18:21 +00:00
|
|
|
plugin_name = scope.TARGET
|
2018-10-24 13:20:27 +00:00
|
|
|
assert plugin_name
|
|
|
|
|
2019-05-03 14:03:15 +00:00
|
|
|
extra = []
|
|
|
|
|
|
|
|
plugin_type = scope.get_string('PLUGIN_TYPE')
|
2019-06-05 11:39:41 +00:00
|
|
|
is_qml_plugin = any('qml_plugin' == s for s in scope.get('_LOADED'))
|
2019-07-18 07:38:37 +00:00
|
|
|
plugin_function_name = 'add_qt_plugin'
|
2019-05-03 14:03:15 +00:00
|
|
|
if plugin_type:
|
|
|
|
extra.append('TYPE {}'.format(plugin_type))
|
2019-06-05 11:39:41 +00:00
|
|
|
elif is_qml_plugin:
|
2019-07-18 07:38:37 +00:00
|
|
|
plugin_function_name = 'add_qml_module'
|
|
|
|
write_qml_plugin(cm_fh, plugin_name, scope, indent=indent, extra_lines=extra)
|
2019-05-03 14:03:15 +00:00
|
|
|
|
|
|
|
plugin_class_name = scope.get_string('PLUGIN_CLASS_NAME')
|
|
|
|
if plugin_class_name:
|
|
|
|
extra.append('CLASS_NAME {}'.format(plugin_class_name))
|
|
|
|
|
2019-07-18 07:38:37 +00:00
|
|
|
write_main_part(cm_fh, plugin_name, 'Plugin', plugin_function_name, scope,
|
2019-05-03 14:03:15 +00:00
|
|
|
indent=indent, extra_lines=extra, known_libraries={}, extra_keys=[])
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-08-01 13:43:36 +00:00
|
|
|
|
2019-06-05 11:39:41 +00:00
|
|
|
def write_qml_plugin(cm_fh: typing.IO[str],
|
|
|
|
target: str,
|
|
|
|
scope: Scope, *,
|
|
|
|
extra_lines: typing.List[str] = [],
|
|
|
|
indent: int = 0,
|
|
|
|
**kwargs: typing.Any):
|
|
|
|
# Collect other args if available
|
2019-07-18 07:38:37 +00:00
|
|
|
indent += 2
|
2019-07-25 14:45:11 +00:00
|
|
|
scope_config = scope.get('CONFIG')
|
|
|
|
is_embedding_qml_files = False
|
|
|
|
|
|
|
|
|
2019-08-01 13:43:36 +00:00
|
|
|
sources = scope.get_files('SOURCES')
|
|
|
|
if len(sources) != 0:
|
|
|
|
extra_lines.append('CPP_PLUGIN')
|
2019-07-25 14:45:11 +00:00
|
|
|
|
2019-07-18 07:38:37 +00:00
|
|
|
target_path = scope.get_string('TARGETPATH')
|
|
|
|
if target_path:
|
2019-08-01 13:43:36 +00:00
|
|
|
uri = target_path.replace('/','.')
|
2019-08-06 12:06:47 +00:00
|
|
|
import_name = scope.get_string('IMPORT_NAME')
|
2019-08-01 13:43:36 +00:00
|
|
|
# Catch special cases such as foo.QtQuick.2.bar, which when converted
|
|
|
|
# into a target path via cmake will result in foo/QtQuick/2/bar, which is
|
|
|
|
# not what we want. So we supply the target path override.
|
|
|
|
target_path_from_uri = uri.replace('.', '/')
|
|
|
|
if target_path != target_path_from_uri:
|
|
|
|
extra_lines.append('TARGET_PATH "{}"'.format(target_path))
|
2019-08-06 12:06:47 +00:00
|
|
|
if import_name:
|
|
|
|
extra_lines.append('URI "{}"'.format(import_name))
|
|
|
|
else:
|
2019-08-06 13:15:38 +00:00
|
|
|
uri = re.sub('\\.\\d+', '', uri)
|
2019-08-06 12:06:47 +00:00
|
|
|
extra_lines.append('URI "{}"'.format(uri))
|
2019-08-01 13:43:36 +00:00
|
|
|
|
2019-06-05 11:39:41 +00:00
|
|
|
import_version = scope.get_string('IMPORT_VERSION')
|
|
|
|
if import_version:
|
|
|
|
import_version = import_version.replace("$$QT_MINOR_VERSION","${CMAKE_PROJECT_VERSION_MINOR}")
|
2019-08-01 13:43:36 +00:00
|
|
|
extra_lines.append('VERSION "{}"'.format(import_version))
|
|
|
|
|
2019-06-05 11:39:41 +00:00
|
|
|
plugindump_dep = scope.get_string('QML_PLUGINDUMP_DEPENDENCIES')
|
2019-08-01 13:43:36 +00:00
|
|
|
|
2019-06-05 11:39:41 +00:00
|
|
|
if plugindump_dep:
|
|
|
|
extra_lines.append('QML_PLUGINDUMP_DEPENDENCIES "{}"'.format(plugindump_dep))
|
|
|
|
|
2019-08-01 13:43:36 +00:00
|
|
|
|
|
|
|
def write_qml_plugin_qml_files(cm_fh: typing.IO[str],
|
|
|
|
target: str,
|
|
|
|
scope: Scope,
|
|
|
|
indent: int = 0):
|
|
|
|
|
2019-07-25 09:32:38 +00:00
|
|
|
qml_files = scope.get_files('QML_FILES', use_vpath=True)
|
2019-06-05 11:39:41 +00:00
|
|
|
if qml_files:
|
2019-08-12 12:05:05 +00:00
|
|
|
|
|
|
|
# Quote file paths in case there are spaces.
|
|
|
|
qml_files = ['"{}"'.format(f) for f in qml_files]
|
|
|
|
|
2019-08-05 12:46:03 +00:00
|
|
|
cm_fh.write('\n{}set(qml_files\n{}{}\n)\n'.format(
|
|
|
|
spaces(indent),
|
|
|
|
spaces(indent + 1),
|
|
|
|
'\n{}'.format(spaces(indent + 1)).join(qml_files)))
|
|
|
|
|
2019-08-01 13:43:36 +00:00
|
|
|
target_path = scope.get_string('TARGETPATH', inherit=True)
|
|
|
|
target_path_mangled = target_path.replace('/', '_')
|
|
|
|
target_path_mangled = target_path_mangled.replace('.', '_')
|
|
|
|
resource_name = 'qmake_' + target_path_mangled
|
2019-08-09 09:42:37 +00:00
|
|
|
cm_fh.write('\n{}add_qt_resource({} {}\n{}FILES\n{}${{qml_files}}\n)\n'.format(
|
2019-07-18 07:38:37 +00:00
|
|
|
spaces(indent),
|
2019-08-01 13:43:36 +00:00
|
|
|
target,
|
|
|
|
resource_name,
|
|
|
|
spaces(indent + 1),
|
2019-08-05 12:46:03 +00:00
|
|
|
spaces(indent + 2)))
|
2019-08-01 13:43:36 +00:00
|
|
|
|
2019-08-05 12:46:03 +00:00
|
|
|
cm_fh.write('\nqt_install_qml_files({}\n FILES ${{qml_files}}\n)\n\n'.format(
|
|
|
|
target))
|
2019-06-05 11:39:41 +00:00
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def handle_app_or_lib(scope: Scope, cm_fh: typing.IO[str], *,
|
2019-05-07 09:27:33 +00:00
|
|
|
indent: int = 0, is_example: bool=False) -> None:
|
2019-01-29 09:18:21 +00:00
|
|
|
assert scope.TEMPLATE in ('app', 'lib')
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-08-07 12:25:50 +00:00
|
|
|
config = scope.get('CONFIG')
|
2019-01-29 09:18:21 +00:00
|
|
|
is_lib = scope.TEMPLATE == 'lib'
|
2019-06-05 11:39:41 +00:00
|
|
|
is_qml_plugin = any('qml_plugin' == s for s in scope.get('_LOADED'))
|
2019-08-07 12:25:50 +00:00
|
|
|
is_plugin = any('qt_plugin' == s for s in scope.get('_LOADED')) or is_qml_plugin or 'plugin' in config
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-08-07 12:25:50 +00:00
|
|
|
if is_plugin:
|
2019-05-07 09:27:33 +00:00
|
|
|
assert not is_example
|
2018-10-24 13:20:27 +00:00
|
|
|
write_plugin(cm_fh, scope, indent=indent)
|
2019-08-07 12:25:50 +00:00
|
|
|
elif is_lib or 'qt_module' in scope.get('_LOADED'):
|
|
|
|
assert not is_example
|
|
|
|
write_module(cm_fh, scope, indent=indent)
|
2019-03-28 12:54:56 +00:00
|
|
|
elif 'qt_tool' in scope.get('_LOADED'):
|
2019-05-07 09:27:33 +00:00
|
|
|
assert not is_example
|
2018-10-24 13:20:27 +00:00
|
|
|
write_tool(cm_fh, scope, indent=indent)
|
|
|
|
else:
|
2019-07-22 10:55:48 +00:00
|
|
|
gui = all(val not in config for val in ['console', 'cmdline'])
|
|
|
|
if 'testcase' in config \
|
|
|
|
or 'testlib' in config \
|
|
|
|
or 'qmltestcase' in config:
|
2019-05-07 09:27:33 +00:00
|
|
|
assert not is_example
|
2019-07-22 10:55:48 +00:00
|
|
|
write_test(cm_fh, scope, gui, indent=indent)
|
2018-10-24 13:20:27 +00:00
|
|
|
else:
|
2019-05-07 09:27:33 +00:00
|
|
|
if is_example:
|
|
|
|
write_example(cm_fh, scope, gui, indent=indent)
|
|
|
|
else:
|
|
|
|
write_binary(cm_fh, scope, gui, indent=indent)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
ind = spaces(indent)
|
|
|
|
write_source_file_list(cm_fh, scope, '',
|
|
|
|
['QMAKE_DOCS',],
|
2019-05-08 15:00:48 +00:00
|
|
|
indent,
|
|
|
|
header = 'add_qt_docs(\n',
|
|
|
|
footer = ')\n')
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2019-09-09 08:52:27 +00:00
|
|
|
def handle_top_level_repo_project(scope: Scope, cm_fh: typing.IO[str]):
|
|
|
|
# qtdeclarative
|
|
|
|
project_file_name = os.path.splitext(os.path.basename(scope.file_absolute_path))[0]
|
|
|
|
|
|
|
|
# declarative
|
|
|
|
file_name_without_qt_prefix = project_file_name[2:]
|
|
|
|
|
|
|
|
# Qt::Declarative
|
|
|
|
qt_lib = map_qt_library(file_name_without_qt_prefix)
|
|
|
|
|
|
|
|
# Found a mapping, adjust name.
|
|
|
|
if qt_lib != file_name_without_qt_prefix:
|
|
|
|
# QtDeclarative
|
|
|
|
qt_lib = re.sub(r':', r'', qt_lib)
|
|
|
|
|
|
|
|
# Declarative
|
|
|
|
qt_lib_no_prefix = qt_lib[2:]
|
|
|
|
else:
|
|
|
|
qt_lib += "_FIXME"
|
|
|
|
qt_lib_no_prefix = qt_lib
|
|
|
|
|
|
|
|
content = """cmake_minimum_required(VERSION {})
|
|
|
|
|
|
|
|
project({}
|
|
|
|
VERSION 6.0.0
|
|
|
|
DESCRIPTION "Qt {} Libraries"
|
|
|
|
HOMEPAGE_URL "https://qt.io/"
|
|
|
|
LANGUAGES CXX C
|
|
|
|
)
|
|
|
|
|
|
|
|
find_package(Qt6 ${{PROJECT_VERSION}} CONFIG REQUIRED COMPONENTS BuildInternals Core SET_ME_TO_SOMETHING_USEFUL)
|
|
|
|
find_package(Qt6 ${{PROJECT_VERSION}} CONFIG OPTIONAL_COMPONENTS SET_ME_TO_SOMETHING_USEFUL)
|
|
|
|
qt_build_repo()
|
|
|
|
""".format(cmake_version_string, qt_lib, qt_lib_no_prefix)
|
|
|
|
|
|
|
|
cm_fh.write('{}'.format(content))
|
|
|
|
|
|
|
|
|
|
|
|
def find_top_level_repo_project_file(project_file_path: str = '') -> typing.Optional[str]:
|
|
|
|
qmake_conf_path = find_qmake_conf(project_file_path)
|
|
|
|
qmake_dir = os.path.dirname(qmake_conf_path)
|
|
|
|
|
|
|
|
# Hope to a programming god that there's only one .pro file at the
|
|
|
|
# top level directory of repository.
|
|
|
|
glob_result = glob.glob(os.path.join(qmake_dir, '*.pro'))
|
|
|
|
if len(glob_result) > 0:
|
|
|
|
return glob_result[0]
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def handle_top_level_repo_tests_project(scope: Scope, cm_fh: typing.IO[str]):
|
|
|
|
top_level_project_path = find_top_level_repo_project_file(scope.file_absolute_path)
|
|
|
|
if top_level_project_path:
|
|
|
|
# qtdeclarative
|
|
|
|
file_name = os.path.splitext(os.path.basename(top_level_project_path))[0]
|
|
|
|
|
|
|
|
# declarative
|
|
|
|
file_name_without_qt = file_name[2:]
|
|
|
|
|
|
|
|
# Qt::Declarative
|
|
|
|
qt_lib = map_qt_library(file_name_without_qt)
|
|
|
|
|
|
|
|
# Found a mapping, adjust name.
|
|
|
|
if qt_lib != file_name_without_qt:
|
|
|
|
# QtDeclarative
|
|
|
|
qt_lib = re.sub(r':', r'', qt_lib) + "Tests"
|
|
|
|
else:
|
|
|
|
qt_lib += "Tests_FIXME"
|
|
|
|
else:
|
|
|
|
qt_lib = "Tests_FIXME"
|
|
|
|
|
|
|
|
content = """if(NOT TARGET Qt::Test)
|
|
|
|
cmake_minimum_required(VERSION {})
|
|
|
|
project({} VERSION 6.0.0 LANGUAGES C CXX)
|
|
|
|
find_package(Qt6 ${{PROJECT_VERSION}} REQUIRED COMPONENTS BuildInternals Core SET_ME_TO_SOMETHING_USEFUL)
|
|
|
|
find_package(Qt6 ${{PROJECT_VERSION}} OPTIONAL_COMPONENTS SET_ME_TO_SOMETHING_USEFUL)
|
|
|
|
qt_set_up_standalone_tests_build()
|
|
|
|
endif()
|
|
|
|
|
|
|
|
qt_build_tests()
|
|
|
|
""".format(cmake_version_string, qt_lib)
|
|
|
|
|
|
|
|
cm_fh.write('{}'.format(content))
|
|
|
|
|
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def cmakeify_scope(scope: Scope, cm_fh: typing.IO[str], *,
|
2019-05-07 09:27:33 +00:00
|
|
|
indent: int = 0, is_example: bool=False) -> None:
|
2019-01-29 09:18:21 +00:00
|
|
|
template = scope.TEMPLATE
|
2019-09-09 08:52:27 +00:00
|
|
|
|
|
|
|
temp_buffer = io.StringIO()
|
|
|
|
|
|
|
|
# Handle top level repo project in a special way.
|
|
|
|
if is_top_level_repo_project(scope.file_absolute_path):
|
|
|
|
handle_top_level_repo_project(scope, temp_buffer)
|
|
|
|
# Same for top-level tests.
|
|
|
|
elif is_top_level_repo_tests_project(scope.file_absolute_path):
|
|
|
|
handle_top_level_repo_tests_project(scope, temp_buffer)
|
|
|
|
elif template == 'subdirs':
|
|
|
|
handle_subdir(scope, temp_buffer, indent=indent, is_example=is_example)
|
2018-11-01 14:52:21 +00:00
|
|
|
elif template in ('app', 'lib'):
|
2019-09-09 08:52:27 +00:00
|
|
|
handle_app_or_lib(scope, temp_buffer, indent=indent, is_example=is_example)
|
2018-10-24 13:20:27 +00:00
|
|
|
else:
|
|
|
|
print(' XXXX: {}: Template type {} not yet supported.'
|
2019-01-29 09:18:21 +00:00
|
|
|
.format(scope.file, template))
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-09-09 08:52:27 +00:00
|
|
|
buffer_value = temp_buffer.getvalue()
|
|
|
|
|
|
|
|
if is_top_level_repo_examples_project(scope.file_absolute_path):
|
|
|
|
# Wrap top level examples project with some commands which
|
|
|
|
# are necessary to build examples as part of the overall
|
|
|
|
# build.
|
|
|
|
buffer_value = """qt_examples_build_begin()
|
|
|
|
|
|
|
|
{}
|
|
|
|
qt_examples_build_end()
|
|
|
|
""".format(buffer_value)
|
|
|
|
|
|
|
|
cm_fh.write(buffer_value)
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
|
2019-05-04 11:08:19 +00:00
|
|
|
def generate_new_cmakelists(scope: Scope, *, is_example: bool=False) -> None:
|
|
|
|
print('Generating CMakeLists.gen.txt')
|
|
|
|
with open(scope.generated_cmake_lists_path, 'w') as cm_fh:
|
2019-01-29 09:18:21 +00:00
|
|
|
assert scope.file
|
2018-12-21 11:13:38 +00:00
|
|
|
cm_fh.write('# Generated from {}.\n\n'
|
2019-01-29 09:18:21 +00:00
|
|
|
.format(os.path.basename(scope.file)))
|
2019-09-09 12:16:26 +00:00
|
|
|
|
|
|
|
is_example_heuristic = is_example_project(scope.file_absolute_path)
|
|
|
|
final_is_example_decision = is_example or is_example_heuristic
|
|
|
|
cmakeify_scope(scope, cm_fh, is_example=final_is_example_decision)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
2018-12-21 11:13:38 +00:00
|
|
|
def do_include(scope: Scope, *, debug: bool = False) -> None:
|
2019-01-29 09:18:21 +00:00
|
|
|
for c in scope.children:
|
2019-01-17 16:14:19 +00:00
|
|
|
do_include(c)
|
|
|
|
|
2019-03-28 12:54:56 +00:00
|
|
|
for include_file in scope.get_files('_INCLUDED', is_include=True):
|
2018-12-21 11:13:38 +00:00
|
|
|
if not include_file:
|
|
|
|
continue
|
2018-10-24 13:20:27 +00:00
|
|
|
if not os.path.isfile(include_file):
|
|
|
|
print(' XXXX: Failed to include {}.'.format(include_file))
|
|
|
|
continue
|
|
|
|
|
|
|
|
include_result = parseProFile(include_file, debug=debug)
|
2018-12-21 11:13:38 +00:00
|
|
|
include_scope \
|
2019-01-18 11:44:15 +00:00
|
|
|
= Scope.FromDict(None, include_file,
|
2018-12-21 11:13:38 +00:00
|
|
|
include_result.asDict().get('statements'),
|
2019-03-28 12:54:56 +00:00
|
|
|
'', scope.basedir) # This scope will be merged into scope!
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
do_include(include_scope)
|
|
|
|
|
|
|
|
scope.merge(include_scope)
|
|
|
|
|
|
|
|
|
2019-05-04 11:08:19 +00:00
|
|
|
def copy_generated_file_to_final_location(scope: Scope, keep_temporary_files=False) -> None:
|
|
|
|
print('Copying {} to {}'.format(scope.generated_cmake_lists_path,
|
|
|
|
scope.original_cmake_lists_path))
|
|
|
|
copyfile(scope.generated_cmake_lists_path, scope.original_cmake_lists_path)
|
|
|
|
if not keep_temporary_files:
|
|
|
|
os.remove(scope.generated_cmake_lists_path)
|
|
|
|
|
|
|
|
|
2019-09-09 12:29:12 +00:00
|
|
|
def should_convert_project(project_file_path: str = '') -> bool:
|
|
|
|
qmake_conf_path = find_qmake_conf(project_file_path)
|
|
|
|
qmake_conf_dir_path = os.path.dirname(qmake_conf_path)
|
|
|
|
|
|
|
|
project_relative_path = os.path.relpath(project_file_path, qmake_conf_dir_path)
|
|
|
|
|
|
|
|
# Skip cmake auto tests, they should not be converted.
|
|
|
|
if project_relative_path.startswith('tests/auto/cmake'):
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Skip qmake testdata projects.
|
|
|
|
if project_relative_path.startswith('tests/auto/tools/qmake/testdata'):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
def main() -> None:
|
|
|
|
args = _parse_commandline()
|
|
|
|
|
2018-11-01 14:55:19 +00:00
|
|
|
debug_parsing = args.debug_parser or args.debug
|
|
|
|
|
2019-05-07 14:46:28 +00:00
|
|
|
backup_current_dir = os.getcwd()
|
|
|
|
|
2018-10-24 13:20:27 +00:00
|
|
|
for file in args.files:
|
2019-05-07 14:46:28 +00:00
|
|
|
new_current_dir = os.path.dirname(file)
|
|
|
|
file_relative_path = os.path.basename(file)
|
2019-05-08 11:14:37 +00:00
|
|
|
if new_current_dir:
|
|
|
|
os.chdir(new_current_dir)
|
2019-05-07 14:46:28 +00:00
|
|
|
|
2019-09-09 12:29:12 +00:00
|
|
|
project_file_absolute_path = os.path.abspath(file)
|
|
|
|
if not should_convert_project(project_file_absolute_path):
|
|
|
|
print('Skipping conversion of project: "{}"'.format(project_file_absolute_path))
|
|
|
|
continue
|
|
|
|
|
2019-05-07 14:46:28 +00:00
|
|
|
parseresult = parseProFile(file_relative_path, debug=debug_parsing)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
if args.debug_parse_result or args.debug:
|
|
|
|
print('\n\n#### Parser result:')
|
|
|
|
print(parseresult)
|
|
|
|
print('\n#### End of parser result.\n')
|
|
|
|
if args.debug_parse_dictionary or args.debug:
|
|
|
|
print('\n\n####Parser result dictionary:')
|
|
|
|
print(parseresult.asDict())
|
|
|
|
print('\n#### End of parser result dictionary.\n')
|
|
|
|
|
2019-05-07 14:46:28 +00:00
|
|
|
file_scope = Scope.FromDict(None, file_relative_path,
|
2018-12-21 11:13:38 +00:00
|
|
|
parseresult.asDict().get('statements'))
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
if args.debug_pro_structure or args.debug:
|
|
|
|
print('\n\n#### .pro/.pri file structure:')
|
2019-05-08 14:45:25 +00:00
|
|
|
file_scope.dump()
|
2018-10-24 13:20:27 +00:00
|
|
|
print('\n#### End of .pro/.pri file structure.\n')
|
|
|
|
|
2018-11-01 14:55:19 +00:00
|
|
|
do_include(file_scope, debug=debug_parsing)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
if args.debug_full_pro_structure or args.debug:
|
|
|
|
print('\n\n#### Full .pro/.pri file structure:')
|
2019-05-08 14:45:25 +00:00
|
|
|
file_scope.dump()
|
2018-10-24 13:20:27 +00:00
|
|
|
print('\n#### End of full .pro/.pri file structure.\n')
|
|
|
|
|
2019-05-04 11:08:19 +00:00
|
|
|
generate_new_cmakelists(file_scope, is_example=args.is_example)
|
|
|
|
|
|
|
|
copy_generated_file = True
|
|
|
|
if not args.skip_special_case_preservation:
|
|
|
|
debug_special_case = args.debug_special_case_preservation or args.debug
|
|
|
|
handler = SpecialCaseHandler(file_scope.original_cmake_lists_path,
|
|
|
|
file_scope.generated_cmake_lists_path,
|
|
|
|
file_scope.basedir,
|
|
|
|
keep_temporary_files=args.keep_temporary_files,
|
|
|
|
debug=debug_special_case)
|
|
|
|
|
|
|
|
copy_generated_file = handler.handle_special_cases()
|
|
|
|
|
|
|
|
if copy_generated_file:
|
|
|
|
copy_generated_file_to_final_location(file_scope,
|
|
|
|
keep_temporary_files=args.keep_temporary_files)
|
2019-05-07 14:46:28 +00:00
|
|
|
os.chdir(backup_current_dir)
|
2018-10-24 13:20:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|