spark: 探索 spark with git 构建 - 再次移除部分项目模块

This commit is contained in:
zinface 2024-07-08 21:04:23 +08:00
parent 03080aceff
commit 1807b73840
5 changed files with 3 additions and 551 deletions

View File

@ -1,132 +0,0 @@
# spark_install_target
# /
# : A
# : A B C...
macro(spark_install_target INSTALL_TARGET_DIR INSTALL_TARGETS)
install(TARGETS
${INSTALL_TARGETS} ${ARGN}
DESTINATION ${INSTALL_TARGET_DIR})
endmacro(spark_install_target INSTALL_TARGET_DIR INSTALL_TARGETS)
# spark_install_file
# /
# : A
# : A B C...
macro(spark_install_file INSTALL_FILE_DIR INSTALL_FILE)
install(FILES
${INSTALL_FILE} ${ARGN}
DESTINATION ${INSTALL_FILE_DIR})
endmacro(spark_install_file INSTALL_FILE_DIR INSTALL_FILE)
# spark_install_program
# /
# : A
# : A B C...
macro(spark_install_program INSTALL_PROGRAM_DIR INSTALL_PROGRAM)
install(PROGRAMS
${INSTALL_PROGRAM} ${ARGN}
DESTINATION ${INSTALL_PROGRAM_DIR})
endmacro(spark_install_program INSTALL_PROGRAM_DIR INSTALL_PROGRAM)
# spark_install_directory
# /
# : A
# : A/* A
macro(spark_install_directory INSTALL_DIRECTORY_DIR INSTALL_DIRECOTRY)
# INSTALL_DIRECOTRY *
# 1. '*',
# 2. 使 spark_install_file
# 2. 使 spark_install_directory
# message(FATAL_ERROR "${INSTALL_DIRECTORY_DIR}")
# string(FIND <string> <substring> <output_variable> [REVERSE])
string(FIND "${INSTALL_DIRECOTRY}" "*" INSTALL_DIRECTORY_FIND_INDEX)
# message(FATAL_ERROR "${INSTALL_DIRECTORY_FIND_INDEX}: ${INSTALL_DIRECTORY_DIR}")
# file(GLOB <variable>
# [LIST_DIRECTORIES true|false] [RELATIVE <path>] [CONFIGURE_DEPENDS]
# [<globbing-expressions>...])
if (NOT INSTALL_DIRECTORY_FIND_INDEX EQUAL -1)
# string(SUBSTRING <string> <begin> <length> <output_variable>)
string(SUBSTRING "${INSTALL_DIRECOTRY}" 0 ${INSTALL_DIRECTORY_FIND_INDEX} INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING)
# message(FATAL_ERROR "directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING}")
# file(GLOB <variable>
# [LIST_DIRECTORIES true|false] [RELATIVE <path>] [CONFIGURE_DEPENDS]
# [<globbing-expressions>...])
file(GLOB INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING}/*)
list(LENGTH INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST_LENGTH)
foreach(item IN LISTS INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST)
# message("-> ${item}")
if(IS_DIRECTORY ${item})
message("-> ${item} IS_DIRECTORY")
# spark_install_directory(${INSTALL_DIRECTORY_DIR} ${item})
install(DIRECTORY
${item}
DESTINATION ${INSTALL_DIRECTORY_DIR}
USE_SOURCE_PERMISSIONS)
else()
message("-> ${item} NOT IS_DIRECTORY")
spark_install_program(${INSTALL_DIRECTORY_DIR} ${item})
# spark_install_file(${INSTALL_DIRECTORY_DIR} ${item})
endif(IS_DIRECTORY ${item})
endforeach(item IN LISTS INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST)
# message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST}")
# message(FATAL_ERROR " directory: ${INSTALL_DIRECTORY_FIND_INDEX_SUBSTRING_FILE_GLOB_LIST_LENGTH}")
else()
# ISSUES You Must check here
# message(FATAL_ERROR "install ${INSTALL_DIRECTORY_DIR}")
install(DIRECTORY
${INSTALL_DIRECOTRY} ${ARGN}
DESTINATION ${INSTALL_DIRECTORY_DIR})
endif(NOT INSTALL_DIRECTORY_FIND_INDEX EQUAL -1)
endmacro(spark_install_directory INSTALL_DIRECTORY_DIR INSTALL_DIRECOTRY)
macro(spark_install_changelog CHANGE_LOG_FILE)
set(SOURCE_CHANGE_LOG_FILE ${CHANGE_LOG_FILE})
if (EXISTS ${SOURCE_CHANGE_LOG_FILE})
execute_process(COMMAND test -f ${SOURCE_CHANGE_LOG_FILE}
RESULT_VARIABLE changelog_test
)
execute_process(COMMAND which gzip
RESULT_VARIABLE gzip_test
)
if (NOT changelog_test EQUAL 0)
message(FATAL_ERROR "NOTE: 不是常规文件: ${SOURCE_CHANGE_LOG_FILE}")
endif(NOT changelog_test EQUAL 0)
if (NOT gzip_test EQUAL 0)
message(FATAL_ERROR "NOTE: 未安装 gzip, 无法压缩 changelog")
endif(NOT gzip_test EQUAL 0)
#
add_custom_command(
OUTPUT "${CMAKE_BINARY_DIR}/changelog.gz"
COMMAND gzip -cn9 "${SOURCE_CHANGE_LOG_FILE}" > "${CMAKE_BINARY_DIR}/changelog.gz"
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
COMMENT "Compressing changelog"
)
add_custom_target(changelog ALL DEPENDS "${CMAKE_BINARY_DIR}/changelog.gz")
# include(GNUInstallDirs)
set(SPARK_INSTALL_CHANGE_LOG_DIR "/usr/share/doc/${PROJECT_NAME}/")
install(FILES
${CMAKE_BINARY_DIR}/changelog.gz
debian/copyright
DESTINATION ${SPARK_INSTALL_CHANGE_LOG_DIR}
)
else()
message(FATAL_ERROR "未找到: ${SOURCE_CHANGE_LOG_FILE}")
endif(EXISTS ${SOURCE_CHANGE_LOG_FILE})
endmacro(spark_install_changelog CHANGE_LOG_FILE)

View File

@ -1,416 +0,0 @@
cmake_minimum_required(VERSION 3.5.1)
# macro
# spark_aux_source_directory outvar invar [skip]
#
macro(spark_aux_source_directory OUTVAR INVAR)
# iv: internal_variable
set(iv_args ${ARGN})
list(LENGTH iv_args iv_arglen)
file(GLOB iv_SOURCE_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.c ${INVAR}/*.cpp ${INVAR}/*.cc)
file(GLOB iv_HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.h ${INVAR}/*.hpp)
file(GLOB iv_QT_UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.ui ${INVAR}/*.qrc)
if(iv_arglen EQUAL 1)
list(APPEND ${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST})
else()
set(${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST})
endif(iv_arglen EQUAL 1)
unset(iv_args)
unset(iv_arglen)
unset(iv_SOURCE_LIST)
unset(iv_HEADER_LIST)
unset(iv_QT_UI_LIST)
endmacro(spark_aux_source_directory OUTVAR INVAR)
# spark_aux_source_directories outvar invar [...]
#
# spark_aux_source_directory invar
macro(spark_aux_source_directories OUTVAR INVAR)
set(iv_aux_directories ${ARGN})
spark_aux_source_directory(${OUTVAR} ${INVAR})
foreach(iv_directory IN LISTS iv_aux_directories)
spark_aux_source_directory(${OUTVAR} ${iv_directory} SKIP)
endforeach(iv_directory IN LISTS iv_aux_directories)
unset(iv_aux_directories)
endmacro(spark_aux_source_directories OUTVAR INVAR)
# spark_add_library <lib_name> [files]...
#
# target_link_<lib_name>
macro(spark_add_library _lib_name)
spark_debug_message("================ ${_lib_name} Library ================")
add_library(${_lib_name} ${ARGN})
set(SRCS ${ARGN})
foreach(item IN LISTS SRCS)
spark_debug_message(" -> ${item}")
endforeach(item IN LISTS SRCS)
function(target_link_${_lib_name} TARGET)
spark_debug_message("${_lib_name}")
target_link_libraries(${TARGET} ${_lib_name})
endfunction(target_link_${_lib_name} TARGET)
endmacro(spark_add_library _lib_name)
# spark_add_library_path <lib_name> <lib_path>
#
# target_link_<lib_name>
# <lib_path>
macro(spark_add_library_path _lib_name _lib_path)
# 0.
set(${_lib_name}_TYPE)
set(${_lib_name}_TYPE_MESSAGE "STATIC(Default)")
set(${_lib_name}_ARGN ${ARGN})
# 1. _lib_path SHARED STATIC
if(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC)
set(${_lib_name}_TYPE ${_lib_path})
set(${_lib_name}_TYPE_MESSAGE ${${_lib_name}_TYPE})
if(${ARGC} LESS 3)
message(FATAL_ERROR "Missing parameter, library path not specified.")
endif(${ARGC} LESS 3)
else()
# _lib_path ARGN
list(APPEND ${_lib_name}_ARGN ${_lib_path})
endif(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC)
# 1. spark_add_library_realpaths
spark_debug_message("> Building: ${_lib_name}, type: ${${_lib_name}_TYPE_MESSAGE}")
set(${_lib_name}_ARGN_REF ${${_lib_name}_ARGN})
unset(${_lib_name}_ARGN)
foreach(_old IN LISTS ${_lib_name}_ARGN_REF)
set(_new ${_old})
string(FIND "${_old}" "+" _plus_index)
if(${_plus_index} GREATER 0)
string(SUBSTRING "${_old}" 0 ${_plus_index} _new)
spark_debug_message(" [CONVERT] ${_new} <- ${_old}")
endif(${_plus_index} GREATER 0)
list(APPEND ${_lib_name}_ARGN ${_new})
endforeach(_old IN LISTS ${_lib_name}_ARGN_REF)
# 2.
#
set(${_lib_name}_ARGN_SOURCES)
set(${_lib_name}_ARGN_APPEND_PATHS)
set(${_lib_name}_ARGN_UNKNOW)
foreach(item IN LISTS ${_lib_name}_ARGN)
spark_debug_message(" [ARGN] check:" ${item})
if(NOT EXISTS ${item})
set(item ${CMAKE_CURRENT_LIST_DIR}/${item})
endif()
if(EXISTS ${item})
# spark_debug_message(" exists: true")
file(REAL_PATH ${item} ${_lib_name}_ARGN_item)
if(IS_DIRECTORY ${${_lib_name}_ARGN_item})
list(APPEND ${_lib_name}_ARGN_APPEND_PATHS ${item})
else()
list(APPEND ${_lib_name}_ARGN_SOURCES ${item})
endif(IS_DIRECTORY ${${_lib_name}_ARGN_item})
else()
list(APPEND ${_lib_name}_ARGN_UNKNOW ${item})
spark_debug_message(" exists: false")
endif()
endforeach()
list(LENGTH ${_lib_name}_ARGN_SOURCES ${_lib_name}_ARGN_SOURCES_LENGTH)
list(LENGTH ${_lib_name}_ARGN_APPEND_PATHS ${_lib_name}_ARGN_APPEND_PATHS_LENGTH)
list(LENGTH ${_lib_name}_ARGN_UNKNOW ${_lib_name}_ARGN_UNKNOW_LENGTH)
spark_debug_message(" result: files(${${_lib_name}_ARGN_SOURCES_LENGTH}), paths(${${_lib_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_lib_name}_ARGN_UNKNOW_LENGTH})" ${item})
# 3. any_files
spark_debug_message(" files:")
set(any_files ${${_lib_name}_ARGN_SOURCES})
foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
spark_aux_source_directory(item_files ${item})
list(APPEND any_files ${item_files})
foreach(item_file IN LISTS item_files)
spark_debug_message(" ${item_file}")
endforeach(item_file IN LISTS item_files)
endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
# 3.1 Qt5/6 ui
set(${_lib_name}_SOURCE_UIS)
set(ui_wrap_files)
foreach(item IN LISTS ${_lib_name}_ARGN_SOURCES any_files)
get_filename_component(ex "${item}" LAST_EXT)
if("${ex}" STREQUAL ".ui")
list(APPEND ${_lib_name}_SOURCE_UIS ${item})
endif("${ex}" STREQUAL ".ui")
endforeach(item IN LISTS ${_lib_name}_ARGN_SOURCES any_files)
if(SPARK_FIND_QT6)
qt_wrap_ui(ui_wrap_files ${${_lib_name}_SOURCE_UIS})
elseif(SPARK_FIND_QT5)
qt5_wrap_ui(ui_wrap_files ${${_lib_name}_SOURCE_UIS})
endif(SPARK_FIND_QT6)
# 4.
add_library(${_lib_name} ${${_lib_name}_TYPE}
${${_lib_name}_ARGN_SOURCES}
${any_files} ${ui_wrap_files})
# 5.
# target_link_<_lib_name>
# target_include_<_lib_name>
# target_<_lib_name>_include
# target_<_lib_name>_link
function(target_${_lib_name}_include _include)
spark_debug_message("添加引用: ${_lib_name} <- ${_include} ${${_lib_name}_INCLUDE_ARGN}")
target_include_directories(${_lib_name} PRIVATE ${_include})
endfunction(target_${_lib_name}_include _include)
function(target_${_lib_name}_link _library)
spark_debug_message("添加链接: ${_lib_name} <- ${_library} ${${_lib_name}_LINK_ARGN}")
target_link_libraries(${_lib_name} ${_library})
endfunction(target_${_lib_name}_link _library)
function(target_link_${_lib_name} TARGET)
spark_debug_message("链接引用: ${TARGET} <- ${_lib_name}")
target_include_directories(${TARGET} PRIVATE
"${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS})
target_link_libraries(${TARGET} ${_lib_name})
endfunction(target_link_${_lib_name} TARGET)
function(target_include_${_lib_name} TARGET)
spark_debug_message("引入引用: ${TARGET} <- ${_lib_name}")
target_include_directories(${TARGET} PUBLIC
"${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS})
endfunction(target_include_${_lib_name} TARGET)
target_include_directories(${_lib_name} PRIVATE
"${${_lib_name}_ARGN_APPEND_PATHS}")
# includes
spark_debug_message(" ${_lib_name}_ARGN_APPEND_PATHS: ")
foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
string(REPLACE "${CMAKE_SOURCE_DIR}/" "" item_var "${item}")
spark_debug_message(" ${item_var}")
endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
# target_link_include_directories
# LIST 使 ""
# target_link_include_directories PUBLIC 将会填充(追加) INCLUDE_DIRECTORIES
# target_link_include_directories cmake
# target_link_include_directories
# 使
# get_target_property(_lib_include_directories ${_lib_name} INCLUDE_DIRECTORIES)
# list(APPEND _lib_include_directories "${CMAKE_CURRENT_LIST_DIR}/${${_lib_name}_SOURCE_PATH}")
# spark_debug_message("----> ${CMAKE_CURRENT_LIST_DIR}/${${_lib_name}_SOURCE_PATH}")
# spark_debug_message("----> ${_lib_include_directories}")
# set_target_properties(${_lib_name} PROPERTIES
# INCLUDE_DIRECTORIES "${_lib_include_directories}"
# INTERFACE_INCLUDE_DIRECTORIES "${_lib_include_directories}"
# )
endmacro(spark_add_library_path _lib_name _lib_path)
# spark_add_shared_library <target> [files ...]
#
# target_link_<lib_name>
macro(spark_add_shared_library _lib_name)
spark_add_library(${_lib_name} SHARED ${ARGN})
endmacro(spark_add_shared_library _lib_name)
# spark_add_shared_library_path <target> [files ... paths]
#
# target_link_<lib_name>
macro(spark_add_shared_library_path _lib_name)
spark_add_library_path(${_lib_name} SHARED ${ARGN})
endmacro(spark_add_shared_library_path _lib_name)
# spark_add_executable <exec_name> [files]...
#
# Qt *.h/*.cpp/*.qrc/*.qm/...
macro(spark_add_executable _exec_name)
set(${_exec_name}_TYPE_MESSAGE "可执行程序")
spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
add_executable(${_exec_name} ${ARGN})
endmacro(spark_add_executable _exec_name)
# spark_add_executable_path <target> <path> [files ... paths]
#
macro(spark_add_executable_path _exec_name _exec_path)
spark_add_executable(${_exec_name})
# 0.
# set(${_exec_name}_TYPE)
# set(${_exec_name}_TYPE_MESSAGE "可执行程序")
set(${_exec_name}_ARGN ${ARGN})
# 1. spark_add_executable_realpaths
# spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
set(${_exec_name}_ARGN_REF ${${_exec_name}_ARGN})
unset(${_exec_name}_ARGN)
foreach(_old IN LISTS ${_exec_name}_ARGN_REF)
set(_new ${_old})
string(FIND "${_old}" "+" _plus_index)
if(${_plus_index} GREATER 0)
string(SUBSTRING "${_old}" 0 ${_plus_index} _new)
spark_debug_message(" [CONVERT] ${_new} <- ${_old}")
endif(${_plus_index} GREATER 0)
list(APPEND ${_exec_name}_ARGN ${_new})
endforeach(_old IN LISTS ${_exec_name}_ARGN_REF)
# 1.
#
# spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
set(${_exec_name}_ARGN_SOURCES)
set(${_exec_name}_ARGN_APPEND_PATHS ${_exec_path})
set(${_exec_name}_ARGN_UNKNOW)
foreach(item IN LISTS ${_exec_name}_ARGN)
spark_debug_message(" [ARGN] check:" ${item})
if(NOT EXISTS ${item})
set(item ${CMAKE_CURRENT_LIST_DIR}/${item})
endif()
if(EXISTS ${item})
# spark_debug_message(" exists: true")
file(REAL_PATH ${item} ${_exec_name}_ARGN_item)
if(IS_DIRECTORY ${${_exec_name}_ARGN_item})
list(APPEND ${_exec_name}_ARGN_APPEND_PATHS ${item})
else()
list(APPEND ${_exec_name}_ARGN_SOURCES ${item})
endif(IS_DIRECTORY ${${_exec_name}_ARGN_item})
else()
list(APPEND ${_exec_name}_ARGN_UNKNOW ${item})
spark_debug_message(" exists: false")
endif()
endforeach()
list(LENGTH ${_exec_name}_ARGN_SOURCES ${_exec_name}_ARGN_SOURCES_LENGTH)
list(LENGTH ${_exec_name}_ARGN_APPEND_PATHS ${_exec_name}_ARGN_APPEND_PATHS_LENGTH)
list(LENGTH ${_exec_name}_ARGN_UNKNOW ${_exec_name}_ARGN_UNKNOW_LENGTH)
spark_debug_message(" result: files(${${_exec_name}_ARGN_SOURCES_LENGTH}), paths(${${_exec_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_exec_name}_ARGN_UNKNOW_LENGTH})" ${item})
# 2. any_files
spark_debug_message(" files:")
set(any_files ${${_exec_name}_ARGN_SOURCES})
foreach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS)
spark_aux_source_directory(item_files ${item})
list(APPEND any_files ${item_files})
foreach(item_file IN LISTS item_files)
spark_debug_message(" ${item_file}")
endforeach(item_file IN LISTS item_files)
endforeach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS)
# 3.1 Qt5/6 ui
set(${_exec_name}_SOURCE_UIS)
set(ui_wrap_files)
foreach(item IN LISTS ${_exec_name}_ARGN_SOURCES any_files)
get_filename_component(ex "${item}" LAST_EXT)
if("${ex}" STREQUAL ".ui")
list(APPEND ${_exec_name}_SOURCE_UIS ${item})
endif("${ex}" STREQUAL ".ui")
endforeach(item IN LISTS ${_exec_name}_ARGN_SOURCES any_files)
if(SPARK_FIND_QT6)
qt_wrap_ui(ui_wrap_files ${${_exec_name}_SOURCE_UIS})
elseif(SPARK_FIND_QT5)
qt5_wrap_ui(ui_wrap_files ${${_exec_name}_SOURCE_UIS})
endif(SPARK_FIND_QT6)
# 3.
# add_executable(${_exec_name}
# ${${_exec_name}_ARGN_SOURCES}
# ${any_files})
target_sources(${_exec_name} PRIVATE
${${_exec_name}_ARGN_SOURCES}
${any_files} ${ui_wrap_files})
# 4.
# target_<_exec_name>_include
# target_<_exec_name>_link
function(target_${_exec_name}_include _include)
spark_debug_message("添加引用: ${_exec_name} <- ${_include} ${${_exec_name}_INCLUDE_ARGN}")
target_include_directories(${_exec_name} PRIVATE ${_include})
endfunction(target_${_exec_name}_include _include)
function(target_${_exec_name}_link _library)
spark_debug_message("添加链接: ${_exec_name} <- ${_library} ${${_exec_name}_LINK_ARGN}")
target_link_libraries(${_exec_name} ${_library})
endfunction(target_${_exec_name}_link _library)
target_include_directories(${_exec_name} PRIVATE
${_exec_path})
spark_debug_message(" include: ${_exec_path}\n")
endmacro(spark_add_executable_path _exec_name _exec_path)
# spark_find_library
# pkg-config
# target_link_<prefix>
macro(spark_find_library _prefix)
find_package(PkgConfig REQUIRED)
# libnotify
pkg_check_modules(${_prefix} ${ARGN})
function(target_link_${_prefix} TARGET)
target_include_directories(${TARGET} PUBLIC
${${_prefix}_INCLUDE_DIRS})
target_link_libraries(${TARGET}
${${_prefix}_LIBRARIES})
endfunction(target_link_${_prefix} TARGET)
endmacro(spark_find_library _prefix)
macro(target_link_qt)
if(SPARK_FIND_QT6)
target_link_qt6(${ARGN})
elseif(SPARK_FIND_QT5)
target_link_qt5(${ARGN})
endif(SPARK_FIND_QT6)
endmacro(target_link_qt)
# spark_add_executable_paths
#
# item: python3 中的 (for item in items:)
# file: 为在目录中不以递归(GLOB_RECURSE) qrc rcc
# prefix-<item>
macro(spark_add_executable_paths _prefix_path)
set(PATHS ${ARGN})
foreach(item IN LISTS PATHS)
file(GLOB QRCS "${item}/*.qrc")
spark_debug_message(">>> add_executable: " "${_prefix_path}-${item} ${item} + ${QRCS}")
string(REPLACE "/" "-" new_item "${item}")
spark_add_executable_path(${_prefix_path}-${new_item} ${item} ${QRCS})
target_link_qt(${_prefix_path}-${item})
endforeach(item IN LISTS PATHS)
endmacro(spark_add_executable_paths _prefix_path)
# spark_add_link
# 使 fucntion target_link_<name>
# _IN_NAME: target_link_<name>
# ARGN:
# 使 target_link_<name>
# _NAME: fucntion : <_NAME>
macro(spark_add_link _name)
function(target_link_${_name} _link)
spark_debug_message("> Linking: ${_link}")
spark_debug_message(" <- ${ARGN}\n")
target_link_libraries(${_link}
${ARGN})
endfunction(target_link_${_name} _link)
endmacro(spark_add_link _name)

View File

@ -28,7 +28,7 @@
# CMAKE_HOST_SYSTEM_NAME STREQUAL "Linux" ?? # CMAKE_HOST_SYSTEM_NAME STREQUAL "Linux" ??
# 使 Linux.cmake ? # 使 Linux.cmake ?
if(CMAKE_HOST_UNIX) if(CMAKE_HOST_UNIX)
include(cmake/SparkInstallMacrosConfig.cmake) spark_include(cmake/SparkInstallMacrosConfig.cmake)
# 使 Linux # 使 Linux
set(LINUX_APPLICATION_DIR /usr/share/${PROJECT_NAME}) set(LINUX_APPLICATION_DIR /usr/share/${PROJECT_NAME})

View File

@ -43,7 +43,7 @@ if(USE_LINUX_UOS)
# 使 Linux # 使 Linux
set(LINUX_UOS_APP_HOME_DIR ${UOS_APP_HOME_DIR}) set(LINUX_UOS_APP_HOME_DIR ${UOS_APP_HOME_DIR})
include(cmake/SparkInstallMacrosConfig.cmake) spark_include(cmake/SparkInstallMacrosConfig.cmake)
# ------------------ ------------------ # # ------------------ ------------------ #
# 1. Uos /opt/apps/ # 1. Uos /opt/apps/

View File

@ -24,7 +24,7 @@
if(CMAKE_HOST_WIN32) if(CMAKE_HOST_WIN32)
include(cmake/SparkInstallMacrosConfig.cmake) spark_include(cmake/SparkInstallMacrosConfig.cmake)
# ------------------ INSTALL PLUGIN CONFIG ------------------ # # ------------------ INSTALL PLUGIN CONFIG ------------------ #
# ------------------ INSTALL PLUGIN CONFIG ------------------ # # ------------------ INSTALL PLUGIN CONFIG ------------------ #
# ------------------ INSTALL PLUGIN CONFIG ------------------ # # ------------------ INSTALL PLUGIN CONFIG ------------------ #