spark: 全量级构建模板更新

This commit is contained in:
zinface 2023-12-19 00:56:50 +08:00
parent 599fb3a5cd
commit 1da49440b6
18 changed files with 781 additions and 266 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.8 KiB

After

Width:  |  Height:  |  Size: 8.6 KiB

View File

@ -24,52 +24,76 @@
# default.desktop default.png
# copy-desktop-appimage
# if ()
set(APPIMAGE_OUTPUT "${CMAKE_BINARY_DIR}/appimage")
set(APPIMAGE_ICON "${APPIMAGE_OUTPUT}/default.png")
set(APPIMAGE_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop")
# set(LINUXDEPLOYQT)
# set(APPIMAGETOOL)
# :
# LINUXDEPLOYQT linuxdeployqt
# APPIMAGETOOL appimagetool
function(execute_linuxdeploy _PATH)
execute_process(COMMAND ${LINUXDEPLOYQT}
WORKING_DIRECTORY "${APPIMAGE_OUTPUT}"
)
endfunction(execute_linuxdeploy _PATH)
option(USE_APPIMAGE_NEW_GLIBC "允许在打包过程中使用较新版本的 glibc 库" ON)
function(target_linuxdeploy)
add_custom_target(linuxdeploy pwd
BYPRODUCTS appimage
COMMAND cp ../${PROJECT_NAME} .
COMMAND "${LINUXDEPLOYQT}" ${PROJECT_NAME} -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip|| true
COMMAND cp ../spark-appimage.desktop default.desktop
COMMAND cp ../spark-appimage.png default.png
WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
endfunction(target_linuxdeploy)
set(APPIMAGE_OUTPUT "${CMAKE_BINARY_DIR}/appimage")
set(APPIMAGE_OUTPUT_ICON "${APPIMAGE_OUTPUT}/default.png")
set(APPIMAGE_OUTPUT_DESTKOP "${APPIMAGE_OUTPUT}/default.desktop")
# 1. Appimage 使
function(add_appimage_icon _icon)
if(CMAKE_VERSION VERSION_LESS 3.21)
message("> cmake version is less than 3.21")
configure_file(${_icon} ${APPIMAGE_OUTPUT_ICON} COPYONLY)
else()
file(MAKE_DIRECTORY ${APPIMAGE_OUTPUT})
file(COPY_FILE ${_icon} ${APPIMAGE_OUTPUT_ICON})
endif(CMAKE_VERSION VERSION_LESS 3.21)
endfunction(add_appimage_icon _icon)
# 2. SparkDesktopMacros.cmake desktop
# 使 desktop.in
function(add_appimage_desktop)
configure_file(cmake/spark-appimage.desktop.in.txt
${APPIMAGE_OUTPUT_DESTKOP} @ONLY)
endfunction(add_appimage_desktop)
function(target_linuxdeploy _target)
if(USE_APPIMAGE_NEW_GLIBC)
message("Use New glibc")
add_custom_target(linuxdeploy pwd
BYPRODUCTS appimage
COMMAND "${LINUXDEPLOYQT}" $<TARGET_FILE:${_target}> -appimage -unsupported-allow-new-glibc -verbose=3 -no-strip || true
WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
else()
message("Un Use New glibc")
add_custom_target(linuxdeploy pwd
BYPRODUCTS appimage
COMMAND "${LINUXDEPLOYQT}" $<TARGET_FILE:${_target}> -appimage -verbose=3 -no-strip || true
WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
endif(USE_APPIMAGE_NEW_GLIBC)
endfunction(target_linuxdeploy _target)
function(target_appimage)
add_custom_target(copy-desktop-appimage
COMMAND cp ../spark-appimage.desktop default.desktop
COMMAND cp ../spark-appimage.png default.png
WORKING_DIRECTORY "${APPIMAGE_OUTPUT}")
add_custom_target(appimage pwd
COMMAND ${APPIMAGETOOL} ${APPIMAGE_OUTPUT}
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
DEPENDS copy-desktop-appimage)
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}")
endfunction(target_appimage)
function(add_appimage)
# 3. Appimage Appimage
function(add_appimage_target _target)
# check linuxdeploy
if(NOT DEFINED LINUXDEPLOYQT)
message("AppImage> Not Found LINUXDEPLOYQT Variable!")
return()
endif(NOT DEFINED LINUXDEPLOYQT)
if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
if(CMAKE_VERSION VERSION_LESS 3.19)
message("> cmake version is less than 3.19")
message(WARNING "!Relative paths are not supported!")
if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(LINUXDEPLOYQT_REAL_PATH ${LINUXDEPLOYQT} REALPATH)
else()
message("> cmake version is less than 3.4")
message(WARNING "!Relative paths are not supported!")
endif(CMAKE_VERSION VERSION_GREATER 3.4)
else()
file(REAL_PATH ${LINUXDEPLOYQT} LINUXDEPLOYQT_REAL_PATH)
endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
endif(CMAKE_VERSION VERSION_LESS 3.19)
message("AppImage> Found LINUXDEPLOYQT Variable: ${LINUXDEPLOYQT_REAL_PATH}")
# check appimagetool
@ -77,47 +101,58 @@ function(add_appimage)
message("AppImage> Not Found APPIMAGETOOL Variable!")
return()
endif(NOT DEFINED APPIMAGETOOL)
if(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
if(CMAKE_VERSION VERSION_LESS 3.19)
# execute_process(COMMAND realpath ${APPIMAGETOOL} OUTPUT_VARIABLE APPIMAGETOOL_REAL_PATH)
message("> cmake version is less than 3.19")
message(WARNING "!Relative paths are not supported!")
if(CMAKE_VERSION VERSION_GREATER 3.4)
get_filename_component(APPIMAGETOOL_REAL_PATH ${APPIMAGETOOL} REALPATH)
else()
message("> cmake version is less than 3.4")
message(WARNING "!Relative paths are not supported!")
endif(CMAKE_VERSION VERSION_GREATER 3.4)
else()
file(REAL_PATH ${APPIMAGETOOL} APPIMAGETOOL_REAL_PATH)
endif(CMAKE_VERSION VERSION_LESS 3.19 AND NOT EXISTS ${LINUXDEPLOYQT})
message("AppImage> Found APPIMAGETOOL Variable: ${LINUXDEPLOYQT_REAL_PATH}")
endif(CMAKE_VERSION VERSION_LESS 3.19)
message("AppImage> Found APPIMAGETOOL Variable: ${APPIMAGETOOL}")
# do add_custome_target
make_directory(${APPIMAGE_OUTPUT})
target_linuxdeploy()
target_linuxdeploy(${_target})
target_appimage()
endfunction(add_appimage)
function(add_appimage_desktop)
configure_file(cmake/spark-appimage.desktop.in
${CMAKE_BINARY_DIR}/spark-appimage.desktop @ONLY)
endfunction(add_appimage_desktop)
#
set_target_properties(${_target}
PROPERTIES
RUNTIME_OUTPUT_DIRECTORY "${APPIMAGE_OUTPUT}")
function(add_appimage_icon _ICON_PATH)
if(CMAKE_VERSION VERSION_LESS 3.21)
message("> cmake version is less than 3.21")
configure_file(${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png COPYONLY)
else()
file(COPY_FILE ${_ICON_PATH} ${CMAKE_BINARY_DIR}/spark-appimage.png)
endif(CMAKE_VERSION VERSION_LESS 3.21)
endfunction(add_appimage_icon _ICON_PATH)
# 使 -unsupported-allow-new-glibc
# AppRun
if(NOT USE_APPIMAGE_NEW_GLIBC)
set_target_properties(${_target}
PROPERTIES
RUNTIME_OUTPUT_NAME "AppRun")
endif(NOT USE_APPIMAGE_NEW_GLIBC)
endfunction(add_appimage_target _target)
# glic>=2.27, -unsupported-allow-new-glibc (意思就是不再低版本发行版使用了)
# -unsupported-bundle-everything
# glic>=2.27, -unsupported-allow-new-glibc 使
# -unsupported-bundle-everything
#
# -unsupported-bundle-everything
# ld-linux.so glibc使
# ld-linux.so glibc
#
# 使
# -unsupported-allow-new-glibc
# linuxdeployqt Ubuntu LTS AppImage
# linuxdeployqt Ubuntu LTS
# AppImage
# ./linuxdeployqt-7-x86_64.AppImage / -appimage -unsupported-allow-new-glibc
# ./linuxdeployqt-7-x86_64.AppImage / -appimage -unsupported-bundle-everything
# linuxdeployqt 使
# ./linuxdeployqt-7-x86_64.AppImage
# / -appimage -unsupported-allow-new-glibc
# ./linuxdeployqt-7-x86_64.AppImage
# / -appimage -unsupported-bundle-everything
@ -126,7 +161,7 @@ endfunction(add_appimage_icon _ICON_PATH)
# include(cmake/SparkAppimageConfig.cmake) # Spark Appimage
# add_appimage_icon(assets/spark.png) # Appimage
# add_appimage_desktop() # Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop))
# add_appimage() # Appimage
# add_appimage_target(${PROJECT_NAME}) # Appimage Appimage
# 2. Makefile Appimage --
# Makefile Appimage (要求提供工具的绝对路径然后可依次进行linuxdeployqt, genrate-appimage)
@ -150,3 +185,8 @@ endfunction(add_appimage_icon _ICON_PATH)
# genrate-appimage:
# cd build && cmake .. -DLINUXDEPLOYQT=$(LINUXDEPLOYQT) -DAPPIMAGETOOL=$(APPIMAGETOOL)
# cd build && make appimage
# NOTE:
# 使 export LD_LIBRARY_PATH=<> 便 linuxdeployqt

View File

@ -0,0 +1,8 @@
cmake_minimum_required(VERSION 3.5.1)
# : make builddeps
add_custom_target(builddeps
COMMAND "${CMAKE_COMMAND}" "--graphviz=graphviz/builddeps.dot" .
COMMAND dot -Tpng graphviz/builddeps.dot -o builddeps.png
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
)

View File

@ -1,7 +1,7 @@
cmake_minimum_required(VERSION 3.0.0)
# function(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
# endfunction(add_deb_package PACKAGE_NAME PACKAGE_VERSION PACKAGE_MAINTAINER PACKAGE_EMAIL PACKAGE_SHORT_DESCRIPTION PACKAGE_LONG_DESCRIPTION)
# if(add_deb_package VALUE) set(Package ${VALUE} PARENT_SCOPE) endif(add_deb_package VALUE)
@ -119,7 +119,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
else()
set(CPACK_DEBIAN_PACKAGE_VERSION "${_IN_VAL}" PARENT_SCOPE)
endif(_IN_VAL STREQUAL "auto")
message("--> 软件版本: ${_IN_VAL}")
endif(_Version EQUAL "0")
@ -137,7 +137,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
find_str("${_IN_KEY}" "Architecture" _Architecture)
if(_Architecture EQUAL "0")
set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE)
set(CPACK_DEBIAN_PACKAGE_ARCHITECTURE "${_IN_VAL}" PARENT_SCOPE)
if(_IN_VAL STREQUAL "auto")
execute_process(
COMMAND dpkg --print-architecture
@ -148,7 +148,7 @@ function(set_package_vars _IN_KEY _IN_VAL)
endif(_IN_VAL STREQUAL "auto")
message("--> 软件架构: ${_IN_VAL}")
endif(_Architecture EQUAL "0")
find_str("${_IN_KEY}" "Priority" _Priority)
if(_Priority EQUAL "0")
set(CPACK_DEBIAN_PACKAGE_PRIORITY "${_IN_VAL}" PARENT_SCOPE)
@ -179,6 +179,12 @@ function(set_package_vars _IN_KEY _IN_VAL)
message("--> 软件建议: ${_IN_VAL}")
endif(_Recommends EQUAL "0")
find_str("${_IN_KEY}" "Conflicts" _Conflicts)
if(_Conflicts EQUAL "0")
set(CPACK_DEBIAN_PACKAGE_CONFLICTS "${_IN_VAL}" PARENT_SCOPE)
message("--> 软件冲突: ${_IN_VAL}")
endif(_Conflicts EQUAL "0")
endfunction(set_package_vars _IN_KEY _IN_VAL)
# 定义一个自定义(add_package_descript)
@ -194,7 +200,7 @@ function(add_package_descript IN_DES)
message(FATAL_ERROR "!! Not Found Path: ${PACKAGE_DES_PATH}")
return()
endif(EXISTS ${IN_DES})
file(READ ${PACKAGE_DES_PATH} DES_CONTENT)
trim_str("${DES_CONTENT}" DES_CONTENT)
@ -244,7 +250,12 @@ function(add_package_descript IN_DES)
set(PREV_DES_LINE "")
while(NOT PREV_DES_LINE STREQUAL DES_LINE)
if(NOT PREV_DES_LINE STREQUAL "")
set(Descrition "${Descrition}\n${DES_LINE}")
if ("${CMAKE_VERSION}" VERSION_LESS "3.15")
set(Descrition "${Descrition}\n${DES_LINE}")
else()
string(STRIP "${DES_LINE}" STRIP_DES_LINE)
set(Descrition "${Descrition}\n${STRIP_DES_LINE}")
endif("${CMAKE_VERSION}" VERSION_LESS "3.15")
endif(NOT PREV_DES_LINE STREQUAL "")
set(PREV_DES_LINE "${DES_LINE}")
sub_next(${DES_CONTENT} NEXT_INDEX DES_LINE DES_CONTENT)
@ -284,16 +295,28 @@ function(add_package_descript IN_DES)
endif("${OSDVer}" STREQUAL "true")
##################### deb file name #####################
set(_Package "${CPACK_DEBIAN_PACKAGE_NAME}")
set(_Version "${CPACK_DEBIAN_PACKAGE_VERSION}")
set(_Architecture "${CPACK_DEBIAN_PACKAGE_ARCHITECTURE}")
set(_DebFileName
set(_DebFileName
"${_Package}_${_Version}_${_Architecture}${PACKAGE_SUFFIX}.deb"
)
set(CPACK_DEBIAN_FILE_NAME ${_DebFileName})
# : spark-deb-package
if(NOT "${PACKAGE_SUFFIX}" STREQUAL "")
# eg: remove '_' of '_Debian'
string(SUBSTRING "${PACKAGE_SUFFIX}" 1 -1 DISTRIBUTION)
if ("${CMAKE_VERSION}" VERSION_LESS "3.15")
set(CPACK_DEBIAN_PACKAGE_DESCRIPTION "${Descrition}\n .\n Build for ${DISTRIBUTION} through spark-deb-build.")
else()
set(CPACK_DEBIAN_PACKAGE_DESCRIPTION ${Descrition} "\n.\nBuild for ${DISTRIBUTION} through spark-deb-build.")
endif("${CMAKE_VERSION}" VERSION_LESS "3.15")
endif(NOT "${PACKAGE_SUFFIX}" STREQUAL "")
# set(CPACK_DEBIAN_PACKAGE_NAME "${Package}")
# set(CPACK_DEBIAN_PACKAGE_VERSION "${Version}")
@ -326,7 +349,7 @@ endfunction(add_package_descript IN_DES)
# CPACK_DEBIAN_FILE_NAME - n
# CPACK_DEBIAN_PACKAGE_NAME - y
# CPACK_DEBIAN_PACKAGE_VERSION - y
# CPACK_DEBIAN_PACKAGE_ARCHITECTURE - y(auto)
# CPACK_DEBIAN_PACKAGE_ARCHITECTURE - y(auto) -> dpkg --print-architecture
# CPACK_DEBIAN_PACKAGE_DEPENDS - y
# CPACK_DEBIAN_PACKAGE_PRIORITY - y
# CPACK_DEBIAN_PACKAGE_MAINTAINER - y
@ -339,4 +362,5 @@ endfunction(add_package_descript IN_DES)
# set(ARCHITECTURE "arm64")
# endif()
# string(TIMESTAMP BUILD_TIME "%Y%m%d")

View File

@ -1,17 +1,19 @@
# SparkDesktopMacros.cmake
macro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
set(APP_NAME ${_APP_NAME})
set(APP_NAME_ZH_CN ${_APP_NAME_ZH_CN})
set(APP_COMMENT ${_APP_COMMENT})
set(APP_TYPE ${_APP_TYPE})
set(APP_EXECUTE_PATH ${_APP_EXECUTE_PATH})
set(APP_EXECUTE_ICON_PATH ${_APP_EXECUTE_ICON_PATH})
set(APP_CATEGORIES ${_APP_CATEGORIES})
configure_file(cmake/spark-desktop.desktop.in
${CMAKE_BINARY_DIR}/${_APP_NAME}.desktop
macro(spark_desktop_macros)
set(APP_NAME ${ARGV0})
set(APP_NAME_ZH_CN ${ARGV1})
set(APP_COMMENT ${ARGV2})
set(APP_TYPE ${ARGV3})
set(APP_EXECUTE_PATH ${ARGV4})
set(APP_EXECUTE_ICON_PATH ${ARGV5})
set(APP_CATEGORIES ${ARGV6})
set(APP_MIME_TYPE ${ARGV7})
configure_file(cmake/spark-desktop.desktop.in.txt
${CMAKE_BINARY_DIR}/${ARGV0}.desktop
)
set(SPARK_DESKTOP_FILE ${CMAKE_BINARY_DIR}/${_APP_NAME}.desktop)
endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _APP_EXECUTE_PATH _APP_EXECUTE_ICON_PATH _APP_CATEGORIES)
set(SPARK_DESKTOP_FILE ${CMAKE_BINARY_DIR}/${ARGV0}.desktop)
endmacro(spark_desktop_macros)
# include(cmake/SparkDesktopMacros.cmake)
# : Name=
@ -22,15 +24,27 @@ endmacro(spark_desktop_macros _APP_NAME _APP_NAME_ZH_CN _APP_COMMENT _APP_TYPE _
# : Type=
# : Exec=
# : Icon=
# : Category=
# : Categories=
# MIME: MimeType=
# )
# TODO INSTALL(将自动实现 install 文件,如 /usr/share/applications)
# install(FILES ${APP_NAME}.desktop
# DESTINATION /usr/share/applications
# )
#
# install(FILES ${SPARK_DESKTOP_FILE}
# DESTINATION /usr/share/applications
# )
# configure_file
# configure_file(<input> <output>
# [NO_SOURCE_PERMISSIONS | USE_SOURCE_PERMISSIONS |
# FILE_PERMISSIONS <permissions>...]
# [COPYONLY] [ESCAPE_QUOTES] [@ONLY]
# [NEWLINE_STYLE [UNIX|DOS|WIN32|LF|CRLF] ])
# install(FILES ${APP_NAME}.desktop
# install(FILES ${SPARK_DESKTOP_FILE}.desktop
# DESTINATION /usr/share/applications
# )
# )

View File

@ -1,16 +1,22 @@
cmake_minimum_required(VERSION 3.5.1)
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
# set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTOUIC ON)
set(CMAKE_AUTORCC ON)
# set(CMAKE_BUILD_TYPE "Debug")
option(SPARK_DEBUG_MESSAGE "CMake Spark Module Debug Message." OFF)
set(SPAKK_DEBUG_LOGFILE "${CMAKE_BINARY_DIR}/spark_debug.log" CACHE STRING "Spark Build Debug logfile." FORCE)
file(WRITE ${SPAKK_DEBUG_LOGFILE})
macro(spark_debug_message)
if(SPARK_DEBUG_MESSAGE)
message(${ARGN})
set(SPARK_ONECE_LOG ${ARGN})
message("[SPARK_MESSAGE]: " ${SPARK_ONECE_LOG})
file(APPEND ${SPAKK_DEBUG_LOGFILE} ${SPARK_ONECE_LOG} "\n")
unset(SPARK_ONECE_LOG)
endif(SPARK_DEBUG_MESSAGE)
endmacro(spark_debug_message)

View File

@ -0,0 +1,11 @@
cmake_minimum_required(VERSION 3.5.1)
# include(SparkFindQt5Config.cmake)
find_package(Dtk COMPONENTS Core Widget Gui)
function(target_link_dtk NAME)
target_link_libraries(${NAME}
${DtkCore_LIBRARIES}
${DtkWidget_LIBRARIES}
${DtkGui_LIBRARIES})
endfunction(target_link_dtk NAME)

View File

@ -0,0 +1,7 @@
cmake_minimum_required(VERSION 3.5.1)
# spark_find_library(notify libnotify)
# function(target_link_${_prefix} TARGET)
# target_link_libraries(${TARGET} ${_prefix})
# endfunction(target_link_${_prefix} TARGET)

View File

@ -2,6 +2,48 @@ cmake_minimum_required(VERSION 3.5.1)
# macro
# spark_aux_source_directory outvar invar [skip]
#
macro(spark_aux_source_directory OUTVAR INVAR)
# iv: internal_variable
set(iv_args ${ARGN})
list(LENGTH iv_args iv_arglen)
file(GLOB iv_SOURCE_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.c ${INVAR}/*.cpp)
file(GLOB iv_HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.h ${INVAR}/*.hpp)
file(GLOB iv_QT_UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${INVAR}/*.ui ${INVAR}/*.qrc)
if(iv_arglen EQUAL 1)
list(APPEND ${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST})
else()
set(${OUTVAR} ${iv_SOURCE_LIST} ${iv_HEADER_LIST} ${iv_QT_UI_LIST})
endif(iv_arglen EQUAL 1)
unset(iv_args)
unset(iv_arglen)
unset(iv_SOURCE_LIST)
unset(iv_HEADER_LIST)
unset(iv_QT_UI_LIST)
endmacro(spark_aux_source_directory OUTVAR INVAR)
# spark_aux_source_directories outvar invar [...]
#
# spark_aux_source_directory invar
macro(spark_aux_source_directories OUTVAR INVAR)
set(iv_aux_directories ${ARGN})
spark_aux_source_directory(${OUTVAR} ${INVAR})
foreach(iv_directory IN LISTS iv_aux_directories)
spark_aux_source_directory(${OUTVAR} ${iv_directory} SKIP)
endforeach(iv_directory IN LISTS iv_aux_directories)
unset(iv_aux_directories)
endmacro(spark_aux_source_directories OUTVAR INVAR)
# spark_add_library <lib_name> [files]...
#
# target_link_<lib_name>
@ -27,44 +69,124 @@ endmacro(spark_add_library _lib_name)
# <lib_path>
macro(spark_add_library_path _lib_name _lib_path)
set(${_lib_name}_SOURCE_PATH ${_lib_path})
# 0.
set(${_lib_name}_TYPE)
if(${${_lib_name}_SOURCE_PATH} STREQUAL SHARED OR ${${_lib_name}_SOURCE_PATH} STREQUAL STATIC)
set(${_lib_name}_SOURCE_PATH ${ARGV2})
set(${_lib_name}_TYPE_MESSAGE "STATIC(Default)")
set(${_lib_name}_ARGN ${ARGN})
# 1. _lib_path SHARED STATIC
if(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC)
set(${_lib_name}_TYPE ${_lib_path})
spark_debug_message("_lib_path: ${${_lib_name}_SOURCE_PATH}(${ARGV2})[${${_lib_name}_TYPE}]")
set(${_lib_name}_TYPE_MESSAGE ${${_lib_name}_TYPE})
if(${ARGC} LESS 3)
message(FATAL_ERROR "Missing parameter, library path not specified.")
endif(${ARGC} LESS 3)
endif(${${_lib_name}_SOURCE_PATH} STREQUAL SHARED OR ${${_lib_name}_SOURCE_PATH} STREQUAL STATIC)
else()
# _lib_path ARGN
list(APPEND ${_lib_name}_ARGN ${_lib_path})
endif(${_lib_path} STREQUAL SHARED OR ${_lib_path} STREQUAL STATIC)
aux_source_directory(${${_lib_name}_SOURCE_PATH} ${_lib_name}_SOURCES)
# 1. spark_add_library_realpaths
spark_debug_message("> Building: ${_lib_name}, type: ${${_lib_name}_TYPE_MESSAGE}")
set(${_lib_name}_ARGN_REF ${${_lib_name}_ARGN})
unset(${_lib_name}_ARGN)
foreach(_old IN LISTS ${_lib_name}_ARGN_REF)
set(_new ${_old})
string(FIND "${_old}" "+" _plus_index)
if(${_plus_index} GREATER 0)
string(SUBSTRING "${_old}" 0 ${_plus_index} _new)
spark_debug_message(" [CONVERT] ${_new} <- ${_old}")
endif(${_plus_index} GREATER 0)
list(APPEND ${_lib_name}_ARGN ${_new})
endforeach(_old IN LISTS ${_lib_name}_ARGN_REF)
# message("================ spark_add_library_path: ${_lib_name} ================")
file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${${_lib_name}_SOURCE_PATH}/*.ui)
add_library(${_lib_name} ${${_lib_name}_TYPE} ${${_lib_name}_SOURCES} ${UI_LIST})
spark_debug_message("${_lib_name}_SOURCES: ${${_lib_name}_SOURCES}, ${${_lib_name}_SOURCE_PATH}")
foreach(item IN LISTS ${_lib_name}_SOURCES)
spark_debug_message(" -> ${item}")
endforeach(item IN LISTS ${_lib_name}_SOURCES)
# 2.
#
set(${_lib_name}_ARGN_SOURCES)
set(${_lib_name}_ARGN_APPEND_PATHS)
set(${_lib_name}_ARGN_UNKNOW)
foreach(item IN LISTS ${_lib_name}_ARGN)
spark_debug_message(" [ARGN] check:" ${item})
if(NOT EXISTS ${item})
set(item ${CMAKE_CURRENT_LIST_DIR}/${item})
endif()
if(EXISTS ${item})
# spark_debug_message(" exists: true")
file(REAL_PATH ${item} ${_lib_name}_ARGN_item)
if(IS_DIRECTORY ${${_lib_name}_ARGN_item})
list(APPEND ${_lib_name}_ARGN_APPEND_PATHS ${item})
else()
list(APPEND ${_lib_name}_ARGN_SOURCES ${item})
endif(IS_DIRECTORY ${${_lib_name}_ARGN_item})
else()
list(APPEND ${_lib_name}_ARGN_UNKNOW ${item})
spark_debug_message(" exists: false")
endif()
endforeach()
list(LENGTH ${_lib_name}_ARGN_SOURCES ${_lib_name}_ARGN_SOURCES_LENGTH)
list(LENGTH ${_lib_name}_ARGN_APPEND_PATHS ${_lib_name}_ARGN_APPEND_PATHS_LENGTH)
list(LENGTH ${_lib_name}_ARGN_UNKNOW ${_lib_name}_ARGN_UNKNOW_LENGTH)
spark_debug_message(" result: files(${${_lib_name}_ARGN_SOURCES_LENGTH}), paths(${${_lib_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_lib_name}_ARGN_UNKNOW_LENGTH})" ${item})
# 3. any_files
spark_debug_message(" files:")
set(any_files ${${_lib_name}_ARGN_SOURCES})
foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
spark_aux_source_directory(item_files ${item})
list(APPEND any_files ${item_files})
foreach(item_file IN LISTS item_files)
spark_debug_message(" ${item_file}")
endforeach(item_file IN LISTS item_files)
endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
# 4.
add_library(${_lib_name} ${${_lib_name}_TYPE}
${${_lib_name}_ARGN_SOURCES}
${any_files})
# 5.
# target_link_<_lib_name>
# target_include_<_lib_name>
# target_<_lib_name>_include
# target_<_lib_name>_link
function(target_${_lib_name}_include _include)
spark_debug_message("添加引用: ${_lib_name} <- ${_include} ${${_lib_name}_INCLUDE_ARGN}")
target_include_directories(${_lib_name} PRIVATE ${_include})
endfunction(target_${_lib_name}_include _include)
function(target_${_lib_name}_link _library)
spark_debug_message("添加链接: ${_lib_name} <- ${_library} ${${_lib_name}_LINK_ARGN}")
target_link_libraries(${_lib_name} ${_library})
endfunction(target_${_lib_name}_link _library)
function(target_link_${_lib_name} TARGET)
# spark_debug_message("target_link_${_lib_name}")
spark_debug_message(" -> (include): ${${_lib_name}_SOURCE_PATH}")
target_include_directories(${TARGET} PUBLIC "${${_lib_name}_SOURCE_PATH}")
spark_debug_message("链接引用: ${TARGET} <- ${_lib_name}")
target_include_directories(${TARGET} PRIVATE
"${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS})
target_link_libraries(${TARGET} ${_lib_name})
endfunction(target_link_${_lib_name} TARGET)
function(target_include_${_lib_name} TARGET)
# spark_debug_message("target_link_${_lib_name}")
spark_debug_message(" -> (include): ${${_lib_name}_SOURCE_PATH}")
target_include_directories(${TARGET} PUBLIC "${${_lib_name}_SOURCE_PATH}")
# target_link_libraries(${TARGET} ${_lib_name})
spark_debug_message("引入引用: ${TARGET} <- ${_lib_name}")
target_include_directories(${TARGET} PUBLIC
"${${_lib_name}_SOURCE_PATH}" ${${_lib_name}_ARGN_APPEND_PATHS})
endfunction(target_include_${_lib_name} TARGET)
# file(GLOB HEADER_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${${_lib_name}_SOURCE_PATH}/*.h)
target_include_directories(${_lib_name} PUBLIC "${${_lib_name}_SOURCE_PATH}")
target_include_directories(${_lib_name} PRIVATE
"${${_lib_name}_ARGN_APPEND_PATHS}")
# includes
spark_debug_message(" ${_lib_name}_ARGN_APPEND_PATHS: ")
foreach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
string(REPLACE "${CMAKE_SOURCE_DIR}/" "" item_var "${item}")
spark_debug_message(" ${item_var}")
endforeach(item IN LISTS ${_lib_name}_ARGN_APPEND_PATHS)
# target_link_include_directories
# LIST 使 ""
@ -88,29 +210,105 @@ endmacro(spark_add_library_path _lib_name _lib_path)
# Qt *.h/*.cpp/*.qrc/*.qm/...
macro(spark_add_executable _exec_name)
spark_debug_message("================ ${_exec_name} Executable ================")
set(${_exec_name}_TYPE_MESSAGE "可执行程序")
spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
add_executable(${_exec_name} ${ARGN})
endmacro(spark_add_executable _exec_name)
# spark_add_executable_path <target> <path> [files ... paths]
#
macro(spark_add_executable_path _exec_name _exec_path)
aux_source_directory(${_exec_path} ${_exec_name}_SOURCES)
spark_add_executable(${_exec_name})
spark_debug_message("================ ${_exec_name} Executable ================")
file(GLOB UI_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${_exec_path}/*.ui)
add_executable(${_exec_name} ${${_exec_name}_SOURCES} ${ARGN} ${UI_LIST})
foreach(item IN LISTS ${_exec_name}_SOURCES)
spark_debug_message(" -> ${item}")
endforeach(item IN LISTS ${_exec_name}_SOURCES)
# 0.
# set(${_exec_name}_TYPE)
# set(${_exec_name}_TYPE_MESSAGE "可执行程序")
set(${_exec_name}_ARGN ${ARGN})
# function(target_link_${_exec_name} TARGET)
# spark_debug_message("target_link_${_lib_name}")
spark_debug_message(" -> (include): ${_exec_path}")
target_include_directories(${_exec_name} PUBLIC "${_exec_path}")
# target_link_libraries(${TARGET} ${_lib_name})
# endfunction(target_link_${_exec_name} TARGET)
# target_link_${_exec_name}(${_exec_name})
# 1. spark_add_executable_realpaths
# spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
set(${_exec_name}_ARGN_REF ${${_exec_name}_ARGN})
unset(${_exec_name}_ARGN)
foreach(_old IN LISTS ${_exec_name}_ARGN_REF)
set(_new ${_old})
string(FIND "${_old}" "+" _plus_index)
if(${_plus_index} GREATER 0)
string(SUBSTRING "${_old}" 0 ${_plus_index} _new)
spark_debug_message(" [CONVERT] ${_new} <- ${_old}")
endif(${_plus_index} GREATER 0)
list(APPEND ${_exec_name}_ARGN ${_new})
endforeach(_old IN LISTS ${_exec_name}_ARGN_REF)
# 1.
#
# spark_debug_message("> Building: ${_exec_name}, type: ${${_exec_name}_TYPE_MESSAGE}")
set(${_exec_name}_ARGN_SOURCES)
set(${_exec_name}_ARGN_APPEND_PATHS ${_exec_path})
set(${_exec_name}_ARGN_UNKNOW)
foreach(item IN LISTS ${_exec_name}_ARGN)
spark_debug_message(" [ARGN] check:" ${item})
if(NOT EXISTS ${item})
set(item ${CMAKE_CURRENT_LIST_DIR}/${item})
endif()
if(EXISTS ${item})
# spark_debug_message(" exists: true")
file(REAL_PATH ${item} ${_exec_name}_ARGN_item)
if(IS_DIRECTORY ${${_exec_name}_ARGN_item})
list(APPEND ${_exec_name}_ARGN_APPEND_PATHS ${item})
else()
list(APPEND ${_exec_name}_ARGN_SOURCES ${item})
endif(IS_DIRECTORY ${${_exec_name}_ARGN_item})
else()
list(APPEND ${_exec_name}_ARGN_UNKNOW ${item})
spark_debug_message(" exists: false")
endif()
endforeach()
list(LENGTH ${_exec_name}_ARGN_SOURCES ${_exec_name}_ARGN_SOURCES_LENGTH)
list(LENGTH ${_exec_name}_ARGN_APPEND_PATHS ${_exec_name}_ARGN_APPEND_PATHS_LENGTH)
list(LENGTH ${_exec_name}_ARGN_UNKNOW ${_exec_name}_ARGN_UNKNOW_LENGTH)
spark_debug_message(" result: files(${${_exec_name}_ARGN_SOURCES_LENGTH}), paths(${${_exec_name}_ARGN_APPEND_PATHS_LENGTH}), unknow(${${_exec_name}_ARGN_UNKNOW_LENGTH})" ${item})
# 2. any_files
spark_debug_message(" files:")
set(any_files ${${_exec_name}_ARGN_SOURCES})
foreach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS)
spark_aux_source_directory(item_files ${item})
list(APPEND any_files ${item_files})
foreach(item_file IN LISTS item_files)
spark_debug_message(" ${item_file}")
endforeach(item_file IN LISTS item_files)
endforeach(item IN LISTS ${_exec_name}_ARGN_APPEND_PATHS)
# 3.
# add_executable(${_exec_name}
# ${${_exec_name}_ARGN_SOURCES}
# ${any_files})
target_sources(${_exec_name} PRIVATE
${${_exec_name}_ARGN_SOURCES}
${any_files})
# 4.
# target_<_exec_name>_include
# target_<_exec_name>_link
function(target_${_exec_name}_include _include)
spark_debug_message("添加引用: ${_exec_name} <- ${_include} ${${_exec_name}_INCLUDE_ARGN}")
target_include_directories(${_exec_name} PRIVATE ${_include})
endfunction(target_${_exec_name}_include _include)
function(target_${_exec_name}_link _library)
spark_debug_message("添加链接: ${_exec_name} <- ${_library} ${${_exec_name}_LINK_ARGN}")
target_link_libraries(${_exec_name} ${_library})
endfunction(target_${_exec_name}_link _library)
target_include_directories(${_exec_name} PRIVATE
${_exec_path})
spark_debug_message(" include: ${_exec_path}\n")
endmacro(spark_add_executable_path _exec_name _exec_path)
# spark_find_library
@ -152,10 +350,11 @@ endmacro(spark_add_executable_paths _prefix_path)
# ARGN:
# 使 target_link_<name>
# _NAME: fucntion : <_NAME>
macro(spark_add_link _IN_NAME)
function(target_link_${_IN_NAME} _NAME)
spark_debug_message("LINK ${_NAME} ${ARGN}")
target_link_libraries(${_NAME}
macro(spark_add_link _name)
function(target_link_${_name} _link)
spark_debug_message("> Linking: ${_link}")
spark_debug_message(" <- ${ARGN}\n")
target_link_libraries(${_link}
${ARGN})
endfunction(target_link_${_IN_NAME} _NAME)
endmacro(spark_add_link _IN_NAME)
endfunction(target_link_${_name} _link)
endmacro(spark_add_link _name)

View File

@ -4,171 +4,164 @@
function(find_plus INVAL OUTVAL)
string(FIND "${INVAL}" "+" plus_index)
set(${OUTVAL} ${plus_index} PARENT_SCOPE)
# if(plus_index LESS 0)
# set(${OUTVAL} -1 PARENT_SCOPE)
# else()
# set(${OUTVAL} ${plus_index} PARENT_SCOPE)
# endif(plus_index LESS 0)
endfunction(find_plus INVAL OUTVAL)
# find_plus("FF" FFFF)
# message("--> FFFF ${FFFF}") # --> FFFF -1
# find_plus("F+F" FFFF)
# message("--> FFFF ${FFFF}") # --> FFFF 1
# find_plus("+F+F" FFFF)
# message("--> FFFF ${FFFF}") # --> FFFF 0
function(find_plus_v INVAL OUTVAL)
string(FIND "${${INVAL}}" "+" plus_index)
set(${OUTVAL} ${plus_index} PARENT_SCOPE)
endfunction(find_plus_v INVAL OUTVAL)
# set(FFF)
# list(APPEND FFFF )
# list(APPEND FFFF "F")
# list(APPEND FFFF "FA")
# message("--> FFFF: ${FFFF}") # --> FFFF: F;FA
function(find_colon INVAL OUTVAL)
string(FIND "${INVAL}" ":" colon_index)
set(${OUTVAL} ${colon_index} PARENT_SCOPE)
endfunction(find_colon INVAL OUTVAL)
# set(FFFFS "")
# list(APPEND FFFFS ${FFFF})
# message("--> FFFFS: ${FFFFS}") # --> FFFFS: F;FA
function(find_colon_v INVAL OUTVAL)
string(FIND "${${INVAL}}" ":" colon_index)
set(${OUTVAL} ${colon_index} PARENT_SCOPE)
endfunction(find_colon_v INVAL OUTVAL)
# set(FFFF "+AA+BB+CC+DD")
# string(REPLACE "+" ";" FFFFL "${FFFF}")
# list(LENGTH FFFFL FFFFLEN)
# message("--> FFFFL: ${FFFFL} --> ${FFFFLEN}") # --> FFFFL: F;
function(find_dir INVAL OUTVAL)
string(FIND "${INVAL}" "/" _STR ${ARGN})
set(${OUTVAL} ${_STR} PARENT_SCOPE)
endfunction(find_dir INVAL OUTVAL)
# plus_list
# "+AAA+BBB+CCC" 列表(list)
# 使 string + ";" 使 list
function(plus_list INVAL OUTVAL OUTVALLEN)
# set(${OUTVAL} "..." PARENT_SCOPE)
# set(${OUTVALLEN} 0 PARENT_SCOPE)
function(find_dir_v INVAL OUTVAL)
string(FIND "${${INVAL}}" "/" _STR ${ARGN})
set(${OUTVAL} ${_STR} PARENT_SCOPE)
endfunction(find_dir_v INVAL OUTVAL)
set(_tmps "") #
#
function(str_left INVAL INDEX OUTVAL)
set(LEFT_INDEX ${INDEX})
string(SUBSTRING "${INVAL}" 0 ${LEFT_INDEX} _LEFT_V)
set(${OUTVAL} ${_LEFT_V} PARENT_SCOPE)
endfunction(str_left INVAL INDEX OUTVAL)
# +
find_plus(${INVAL} RIGHT_PLUS)
function(str_right INVAL INDEX OUTVAL)
math(EXPR RIGHT_INDEX ${INDEX}+1)
string(SUBSTRING "${INVAL}" ${RIGHT_INDEX} -1 _RIGHT_V)
set(${OUTVAL} ${_RIGHT_V} PARENT_SCOPE)
endfunction(str_right INVAL INDEX OUTVAL)
string(LENGTH "${INVAL}" INVALLEN)
spark_debug_message("--> 传入的 INVAL: --> 内容: ${INVAL}")
spark_debug_message("--> 传入的 INVAL: --> 长度: ${INVALLEN}")
spark_debug_message("--> 传入的 INVAL: --> +位置: ${RIGHT_PLUS}")
function(str_left_v INVAL INDEX OUTVAL)
set(LEFT_INDEX ${${INDEX}})
string(SUBSTRING "${${INVAL}}" 0 ${LEFT_INDEX} _LEFT_V)
set(${OUTVAL} ${_LEFT_V} PARENT_SCOPE)
endfunction(str_left_v INVAL INDEX OUTVAL)
# +
if(RIGHT_PLUS LESS 0)
spark_debug_message("--> 传入的 INVAL: --> 无需计算新的+位置")
# spark_debug_message("--> 计算新的 + 位置: ${_PLUSINDEX}")
list(APPEND _tmps ${INVAL})
else()
math(EXPR _PLUSINDEX "${RIGHT_PLUS}+1")
spark_debug_message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX}")
function(str_right_v INVAL INDEX OUTVAL)
math(EXPR RIGHT_INDEX ${${INDEX}}+1)
string(SUBSTRING "${${INVAL}}" ${RIGHT_INDEX} -1 _RIGHT_V)
set(${OUTVAL} ${_RIGHT_V} PARENT_SCOPE)
endfunction(str_right_v INVAL INDEX OUTVAL)
string(SUBSTRING "${INVAL}" ${_PLUSINDEX} ${INVALLEN} NewVal)
spark_debug_message("--> 传入的 INVAL: --> 需计算+位置 --> 右移: ${_PLUSINDEX} -> 内容: ${NewVal}")
# string(REPLACE "+" ";" _tmps "${NewVal}")
# list(LENGTH FFFFL FFFFLEN)
#
function(find_colon_plus INVAL OUTVAL)
find_colon(${INVAL} COLON_INDEX)
str_right(${INVAL} ${COLON_INDEX} COLON_RIGHT)
find_plus_v(COLON_RIGHT PLUS_INDEX)
str_left_v(COLON_RIGHT PLUS_INDEX COLON_RIGHT_LEFT_PLUS)
# spark_debug_message("--> 计算新的 + 位置: ${_PLUSINDEX} --> 后面的 NewVal: ${NewVal}")
set(${OUTVAL} ${COLON_RIGHT_LEFT_PLUS} PARENT_SCOPE)
endfunction(find_colon_plus INVAL OUTVAL)
# find_plus(${NewVal} _NextPlus)
# if(_NextPlus LESS 0)
# list(APPEND _tmps ${NewVal})
# spark_debug_message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
# else()
# spark_debug_message("--> 追加新的 + 位置: ${_PLUSINDEX} --> 后面的")
# #
# # plus_list(${NewVal} NewValS )
# # foreach(item)
# # list(APPEND _tmps ${item})
# # endforeach(item)
# endif(_NextPlus LESS 0)
endif(RIGHT_PLUS LESS 0)
function(find_colon_plus_v INVAL OUTVAL)
find_colon_v(${INVAL} COLON_INDEX)
str_right_v(${INVAL} COLON_INDEX COLON_RIGHT)
find_plus_v(COLON_RIGHT PLUS_INDEX)
str_left_v(COLON_RIGHT PLUS_INDEX COLON_RIGHT_LEFT_PLUS)
set(${OUTVAL} ${_tmps} PARENT_SCOPE)
list(LENGTH _tmps _tmps_len)
set(${OUTVALLEN} ${_tmps_len} PARENT_SCOPE)
set(${OUTVAL} ${COLON_RIGHT_LEFT_PLUS} PARENT_SCOPE)
endfunction(find_colon_plus_v INVAL OUTVAL)
endfunction(plus_list INVAL OUTVAL OUTVALLEN)
function(find_dir_plus INVAL OUTVAL)
# t:*/*+d
# ^
find_dir("${INVAL}" SLASH_INDEX REVERSE)
str_right("${INVAL}" ${SLASH_INDEX} SLASH_RIGHT)
find_plus_v(SLASH_RIGHT PLUS_INDEX)
str_left_v(SLASH_RIGHT PLUS_INDEX SLASH_RIGHT_LEFT_PLUS)
# plus_list("+AAA+BBB+CCC+DDD" FFF FFLEN)
# spark_debug_message("--------> ${FFF}: -> ${FFLEN}")
set(${OUTVAL} ${SLASH_RIGHT_LEFT_PLUS} PARENT_SCOPE)
endfunction(find_dir_plus INVAL OUTVAL)
function(find_dir_plus_v INVAL OUTVAL)
# t:*/*+d
# ^
find_dir("${${INVAL}}" SLASH_INDEX REVERSE)
str_right("${${INVAL}}" ${SLASH_INDEX} SLASH_RIGHT)
find_plus_v(SLASH_RIGHT PLUS_INDEX)
str_left_v(SLASH_RIGHT PLUS_INDEX SLASH_RIGHT_LEFT_PLUS)
set(${OUTVAL} ${SLASH_RIGHT_LEFT_PLUS} PARENT_SCOPE)
endfunction(find_dir_plus_v INVAL OUTVAL)
# spark_add_library_source <target> ...
#
#
macro(spark_add_library_source target)
set(${target}_ADD_SOURCE ${ARGN})
endmacro(spark_add_library_source target)
# target_link_qt5 qt6
macro(_handle_spark_target_link_qt_macro _target)
if(SPARK_FIND_QT5)
target_link_qt5(${_target})
endif(SPARK_FIND_QT5)
if(SPARK_FIND_QT6)
target_link_qt6(${_target})
endif(SPARK_FIND_QT6)
endmacro(_handle_spark_target_link_qt_macro _target)
# spark_add_library_realpaths
#
# :
# : +A+B
macro(spark_add_library_realpaths)
spark_debug_message("---> 基于传入的项进行构建 <---")
# spark_debug_message("--> src/unclassified/ItemDelegates/NdStyledItemDelegate")
# string(FIND <string> <substring> <output_variable> [REVERSE])
# string(SUBSTRING <string> <begin> <length> <output_variable>)
# math(EXPR value "100 * 0xA" OUTPUT_FORMAT DECIMAL) # value is set to "1000"
set(REALPATHS ${ARGN})
foreach(REALPATH IN LISTS REALPATHS)
spark_debug_message("---> 传入路径: ${REALPATH} <--- ")
string(LENGTH "${REALPATH}" REALPATH_LENGTH)
spark_debug_message("---> 计算传入路径长度: --> 长度: ${REALPATH_LENGTH}")
string(FIND "${REALPATH}" "/" LASTINDEX REVERSE)
spark_debug_message("---> 计算传入路径末尾/位置: --> 长度: ${LASTINDEX}")
math(EXPR LASTINDEX "${LASTINDEX}+1")
spark_debug_message("---> 计算传入路径末尾/右移: --> 长度: ${LASTINDEX}")
string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALPATH_LENGTH} REALNAME_Dependency)
# # : :
# find_colon(${REALPATH} COLON_INDEX)
# / /
find_dir_v(REALPATH SLASH_INDEX REVERSE)
# + +
find_plus(${REALPATH} RIGHT_PLUS)
find_plus_v(REALPATH PLUS_INDEX)
# + -1
if(RIGHT_PLUS LESS 0) # 0: +
set(REALNAME "${REALNAME_Dependency}")
spark_debug_message("---> 传入路径末尾/右移部分: --> ${REALNAME} <-- 无依赖+")
spark_debug_message("---> 构建 ${REALNAME} -> ${REALNAME} ${REALPATH} ")
spark_add_library_path(${REALNAME} ${REALPATH})
if(SPARK_FIND_QT5)
target_link_qt5(${REALNAME})
endif(SPARK_FIND_QT5)
if(SPARK_FIND_QT6)
target_link_qt6(${REALNAME})
endif(SPARK_FIND_QT6)
# +
if(PLUS_INDEX LESS 0)
# +
set(dir ${REALPATH})
str_right_v(REALPATH SLASH_INDEX target)
spark_add_library_path(${target}
${dir}
${${target}_ADD_SOURCE}
)
# 使 spark_add_library_realpaths
target_include_directories(${target} PUBLIC ${dir})
_handle_spark_target_link_qt_macro(${target})
else()
spark_debug_message("---> 传入路径末尾/右移部分: --> ${REALNAME_Dependency} <-- 依赖+")
# + + target_depends_str
str_right_v(REALPATH PLUS_INDEX target_depends_str)
string(REPLACE "+" ";" target_depends "${target_depends_str}")
find_dir_plus_v(REALPATH target)
str_left_v(REALPATH PLUS_INDEX dir)
# + / +
# src/unclassified/widgets/DocTypeListView+JsonDeploy
# ^(LASTINDEX) ^(RIGHT_PLUS)
# RIGHT_PLUS - LASTINDEX DocTypeListView
math(EXPR REALNAME_LENGTH "${RIGHT_PLUS}-${LASTINDEX}")
spark_add_library_path(${target}
${dir}
${${target}_ADD_SOURCE}
)
spark_debug_message(" [INCLUDE_DIRS]: ${dir} ${dir}/.. \n")
target_include_directories(${target} PUBLIC ${dir} ${dir}/..)
target_link_libraries(${target} ${target_depends})
endif(PLUS_INDEX LESS 0)
spark_debug_message("---> 计算传入路径末尾/右移部分: --> 位置: ${RIGHT_PLUS}")
# spark_debug_message("---> 计算传入路径末尾/右移部分: --> 长度: ${REALNAME_Dependency}")
# DocTypeListView
# JsonDeploy
# set(REALNAME "")
string(SUBSTRING "${REALPATH}" 0 ${RIGHT_PLUS} _REALPATH_DIR)
string(SUBSTRING "${REALPATH}" ${LASTINDEX} ${REALNAME_LENGTH} REALNAME)
spark_debug_message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME}")
string(SUBSTRING "${REALPATH}" ${RIGHT_PLUS} ${REALPATH_LENGTH} Dependency)
spark_debug_message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency}")
# plus_list(${Dependency} dependencies dependencies_len)
string(REPLACE "+" ";" dependencies "${Dependency}")
spark_debug_message("---> 计算传入路径末尾/右移部分: --> 库名: ${REALNAME} --> +部分: ${Dependency} --> 列表: ${dependencies} <-- ")
spark_debug_message("---> 构建 ${REALNAME} -> ${REALNAME} ${_REALPATH_DIR}")
spark_add_library_path(${REALNAME} ${_REALPATH_DIR})
# target_link_qt5(${REALNAME}) # 使
target_include_directories(${REALNAME} PUBLIC ${_REALPATH_DIR})
target_link_libraries(${REALNAME} ${dependencies})
endif(RIGHT_PLUS LESS 0)
endforeach(REALPATH IN LISTS REALPATHS)
endmacro(spark_add_library_realpaths)
@ -188,6 +181,7 @@ macro(spark_aux_source_paths AUX_VAR)
endmacro(spark_aux_source_paths AUX_VAR)
# spark_file_glob
# 使 file(GLOB)
#
macro(spark_file_glob FGLOB_VAR)
set(${FGLOB_VAR} "")
@ -235,3 +229,107 @@ macro(spark_add_source_paths SOURCE_VAR)
endforeach(ui_src IN LISTS UI_SRCS)
endforeach(source_path IN LISTS ${SOURCE_VAR}_PATHS)
endmacro(spark_add_source_paths SOURCE_VAR)
# spark_add_library_file_glob
#
macro(spark_add_library_file_glob _lib_name)
spark_file_glob(${_lib_name}_SOURCES ${ARGN})
spark_add_library(${_lib_name} ${${_lib_name}_SOURCES})
endmacro(spark_add_library_file_glob _lib_name)
# spark_add_executable_source <target> ...
#
#
macro(spark_add_executable_source target)
set(${target}_ADD_SOURCE ${ARGN})
endmacro(spark_add_executable_source target)
# spark_add_executable_realpaths dir
macro(_handle_spark_add_executable_realpaths_if_dir_empty_macro)
if("${dir}" STREQUAL "")
spark_add_executable(${target}
${${target}_ADD_SOURCE}
)
else()
spark_add_executable_path(${target}
${dir}
${${target}_ADD_SOURCE}
)
endif("${dir}" STREQUAL "")
endmacro(_handle_spark_add_executable_realpaths_if_dir_empty_macro)
# spark_add_executable_realpaths
#
# : :
# : :+A+B
macro(spark_add_executable_realpaths)
set(REALPATHS ${ARGN})
foreach(REALPATH IN LISTS REALPATHS)
# : :
find_colon(${REALPATH} COLON_INDEX)
if(COLON_INDEX LESS 0)
# do not anything
else()
# : target
# string(SUBSTRING "${REALPATH}" 0 ${COLON_INDEX} REALTARGET)
find_colon_v(REALPATH COLON_INDEX)
str_left_v(REALPATH COLON_INDEX target)
str_right_v(REALPATH COLON_INDEX COLON_REMAIN)
# message(FATAL_ERROR "构建一个: ${target}") #
endif(COLON_INDEX LESS 0)
# + +
find_plus_v(REALPATH PLUS_INDEX)
if(PLUS_INDEX LESS 0)
# +
set(dir ${COLON_REMAIN})
# spark_add_executable_path(${target}
# ${dir}
# ${${target}_ADD_SOURCE}
# )
_handle_spark_add_executable_realpaths_if_dir_empty_macro()
_handle_spark_target_link_qt_macro(${target})
else()
# + + target_depends_str
str_right_v(REALPATH PLUS_INDEX target_depends_str)
string(REPLACE "+" ";" target_depends "${target_depends_str}")
# dir
find_colon_plus_v(REALPATH dir)
# spark_add_executable_path(${target}
# ${dir}
# ${${target}_ADD_SOURCE}
# )
_handle_spark_add_executable_realpaths_if_dir_empty_macro()
target_include_directories(${target} PUBLIC ${dir} ${dir}/..)
target_link_libraries(${target} ${target_depends})
endif(PLUS_INDEX LESS 0)
endforeach(REALPATH IN LISTS REALPATHS)
endmacro(spark_add_executable_realpaths)
#
# 1.
# src/widgets/DocTypeListView
# ^
#
#
# 2.
# src/widgets/MaintainerInfoView+DocTypeListView+...
# ^ MaintainerInfoView
# ^'+'
#
# 1.基于指定的目录路径进行构建(行不通,可执行目标很少为一个目录)
# 2.基于指定的文件路径进行构建(也许可以)
# 3.基于指定的文件名称进行构建()
# 4.基于指定命名规则(target:dir:dir+depend+depend...)

View File

@ -0,0 +1,48 @@
cmake_minimum_required(VERSION 3.5.1)
# translator_qt5 _qmvar [... *.ts]
macro(translator_qt5 _qmvar)
# set(SPARK_TRANSLATIONS_ARGN ${ARGN})
# file(GLOB SPARK_TRANSLATIONS ${SPARK_TRANSLATIONS_ARGN})
# qt5_add_translation(SPARK_QM_TRANSLATIONS
# ${SPARK_TRANSLATIONS})
set(${_qmvar}_ARNG ${ARGN})
file(GLOB ${_qmvar}_TS_FILES ${${_qmvar}_ARNG})
find_package(Qt5LinguistTools)
qt5_add_translation(${_qmvar}
${${_qmvar}_TS_FILES})
set(SPARK_QM_TRANSLATIONS ${_qmvar})
# SPARK_QM_TRANSLATIONS ${_qmvar} add_executable ts
# qt5_create_translation
# ts make clean ts!
# qt5_add_translation
#
endmacro(translator_qt5 _qmvar)
# translator_qt6 _qmvar [... *.ts]
macro(translator_qt6 _qmvar)
# todo
endmacro(translator_qt6 _qmvar)
# translator_qt5 qt6
macro(_handle_spark_translator_qt_macro _outvar)
if(SPARK_FIND_QT5)
translator_qt5(${_outvar} ${ARGN})
endif(SPARK_FIND_QT5)
if(SPARK_FIND_QT6)
translator_qt6(${_outvar} ${ARGN})
endif(SPARK_FIND_QT6)
endmacro(_handle_spark_translator_qt_macro _outvar)
# translator_qt _qmvar [... *.ts | match]
macro(translator_qt)
_handle_spark_translator_qt_macro(${ARGN})
endmacro(translator_qt)

48
cmake/linuxdeployqt-help Normal file
View File

@ -0,0 +1,48 @@
linuxdeployqt (commit 5fa79fa), build 36 built on 2022-08-21 12:36:03 UTC
WARNING: Not checking glibc on the host system.
The resulting AppDir or AppImage may not run on older systems.
This mode is unsupported and discouraged.
For more information, please see
https://github.com/probonopd/linuxdeployqt/issues/340
Usage: linuxdeployqt <app-binary|desktop file> [options]
Options:
-always-overwrite : Copy files even if the target file exists.
-appimage : Create an AppImage (implies -bundle-non-qt-libs).
-bundle-non-qt-libs : Also bundle non-core, non-Qt libraries.
-exclude-libs=<list> : List of libraries which should be excluded,
separated by comma.
-ignore-glob=<glob> : Glob pattern relative to appdir to ignore when
searching for libraries.
-executable=<path> : Let the given executable use the deployed libraries
too
-extra-plugins=<list> : List of extra plugins which should be deployed,
separated by comma.
-no-copy-copyright-files : Skip deployment of copyright files.
-no-plugins : Skip plugin deployment.
-no-strip : Don't run 'strip' on the binaries.
-no-translations : Skip deployment of translations.
-qmake=<path> : The qmake executable to use.
-qmldir=<path> : Scan for QML imports in the given path.
-qmlimport=<path> : Add the given path to QML module search locations.
-show-exclude-libs : Print exclude libraries list.
-verbose=<0-3> : 0 = no output, 1 = error/warning (default),
2 = normal, 3 = debug.
-updateinformation=<update string> : Embed update information STRING; if zsyncmake is installed, generate zsync file
-qtlibinfix=<infix> : Adapt the .so search if your Qt distribution has infix.
-version : Print version statement and exit.
linuxdeployqt takes an application as input and makes it
self-contained by copying in the Qt libraries and plugins that
the application uses.
By default it deploys the Qt instance that qmake on the $PATH points to.
The '-qmake' option can be used to point to the qmake executable
to be used instead.
Plugins related to a Qt library are copied in with the library.
See the "Deploying Applications on Linux" topic in the
documentation for more information about deployment on Linux.
zinface@zinface-PC:/tmp/tmp.5gmZKUqn9s$

View File

@ -43,6 +43,6 @@ if(USE_LINUX_APPIMAGE)
include(cmake/SparkAppimageConfig.cmake) # Spark Appimage
add_appimage_icon(assets/spark.png) # Appimage
add_appimage_desktop() # Appimage 中的默认desktop(使用来自 Spark 构建的 Desktop 构建中配置的信息(必须要求 spark-desktop))
add_appimage() # Appimage
add_appimage_target(${PROJECT_NAME}) # Appimage
endif(USE_LINUX_APPIMAGE)

View File

@ -9,8 +9,8 @@ option(USE_LINUX_DEBIAN "为 Linux 生成 deb 软件包" OFF)
if(USE_LINUX_DEBIAN)
find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
add_package_descript(cmake/package-deb.descript)
find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR})
add_package_descript(cmake/spark-deb-package.descript)
endif(USE_LINUX_DEBIAN)

View File

@ -66,7 +66,7 @@ if(USE_LINUX_UOS)
# set(PACKAGE_SUFFIX "_onlyUos")
# 2. 使 debian deb
find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
add_package_descript(cmake/package-deb.descript)
find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR})
add_package_descript(cmake/spark-deb-package.descript)
endif(USE_LINUX_UOS)

View File

@ -0,0 +1,9 @@
[Desktop Entry]
Name=@APP_NAME@
Name[zh_CN]=@APP_NAME_ZH_CN@
Exec=AppRun %F
Icon=default
Comment=@APP_COMMENT@
Terminal=true
Type=Application
Categories=@APP_CATEGORIES@;

View File

@ -1,6 +1,6 @@
# 注释行(使用方式)
# find_package(DebPackage PATHS ${CMAKE_SOURCE_DIR})
# add_package_descript(cmake/package-deb.descript)
# find_package(SparkDebPackage PATHS ${CMAKE_SOURCE_DIR})
# add_package_descript(cmake/spark-deb-package.descript)
# 打包后的文件名称
# FileName: 待定
@ -40,7 +40,9 @@ Maintainer: Ndd开源组织 <757210198@qq.com>
# 软件包主页
Homepage: https://gitee.com/cxasm/notepad--
# 软件包建议
Recommends:
Recommends:
# 软件冲突
Conflicts:
# 软件包描述信息
Descrition: Notepad--是一个国产跨平台、简单的文本编辑器。
Notepad--是一个国产跨平台、简单的文本编辑器是替换notepad++的一种选择。

View File

@ -9,4 +9,5 @@ Icon=@APP_EXECUTE_ICON_PATH@
Categories=@APP_CATEGORIES@
MimeType=text/english;text/plain;text/x-makefile;text/x-c++hdr;text/x-c++src;text/x-chdr;text/x-csrc;text/x-java;text/x-moc;text/x-pascal;text/x-tcl;text/x-tex;application/x-shellscript;text/x-patch;text/x-adasrc;text/x-chdr;text/x-csrc;text/css;application/x-desktop;text/x-patch;text/x-fortran;text/html;text/x-java;text/x-tex;text/x-makefile;text/x-objcsrc;text/x-pascal;application/x-perl;application/x-perl;application/x-php;text/vnd.wap.wml;text/x-python;application/x-ruby;text/sgml;application/xml;model/vrml;image/svg+xml;application/json;
# Generated from the DesktopGenerater component of the z-Tools toolkit
# Generated from the DesktopGenerater component of the z-Tools toolkit