Commit 021e3ee1 authored by Tien-Thinh Nguyen's avatar Tien-Thinh Nguyen

Re-oganize code following a common template

parent 82404287

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

archives
*.html
build/ext
build/log
.project
build/ausf/build
################################################################################
# Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The OpenAirInterface Software Alliance licenses this file to You under
# the OAI Public License, Version 1.1 (the "License"); you may not use this file
# except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.openairinterface.org/?page_id=698
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
# For more information about the OpenAirInterface (OAI) Software Alliance:
# contact@openairinterface.org
################################################################################
cmake_minimum_required (VERSION 3.0.2)
project(oai-cn)
# Override options for AUSF
set ( PACKAGE_NAME "AUSF" )
set ( STATIC_LINKING False )
#############################################
# Base directories, compatible with legacy OAI building
################################################
set (OPENAIRCN_DIR $ENV{OPENAIRCN_DIR})
set (BUILD_TOP_DIR ${OPENAIRCN_DIR}/build)
set (SRC_TOP_DIR $ENV{OPENAIRCN_DIR}/src)
include(${CMAKE_CURRENT_SOURCE_DIR}/../../src/oai_ausf/CMakeLists.txt)
ADD_SUBDIRECTORY(${CMAKE_CURRENT_SOURCE_DIR}/../../src/ausf_app ${CMAKE_CURRENT_BINARY_DIR}/ausf_app)
# Analogue for standard CMake module with same name, appeared in CMake 2.8.3.
#
# Used for compatibility with older versions of cmake.
function(cmake_parse_arguments prefix options one_value_keywords multi_value_keywords)
set(all_keywords ${options} ${one_value_keywords} ${multi_value_keywords})
# Clear variables before parsing.
foreach(arg ${one_value_keywords} ${multie_value_keywords})
set(cpa_${arg})
endforeach()
foreach(arg ${options})
set(cpa_${arg} FALSE)
endforeach()
set(cpa_UNPARSED_ARGUMENTS)
set(current_keyword)
# Classification for current_keyword:
# 'ONE' or 'MULTY'.
set(current_keyword_type)
# now iterate over all arguments and fill the result variables
foreach(arg ${ARGN})
list(FIND all_keywords ${arg} keyword_index)
if(keyword_index EQUAL -1)
if(current_keyword)
if(current_keyword_type STREQUAL "ONE")
set(cpa_${current_keyword} ${arg})
set(current_keyword)
else(current_keyword_type STREQUAL "ONE")
list(APPEND cpa_${current_keyword} ${arg})
endif(current_keyword_type STREQUAL "ONE")
else(current_keyword)
list(APPEND cpa_UNPARSED_ARGUMENTS ${arg})
endif(current_keyword)
else(keyword_index EQUAL -1)
if(current_keyword AND current_keyword_type STREQUAL "ONE")
message(SEND_ERROR "Value is expected for one-value-keyword ${current_keyword}")
endif(current_keyword AND current_keyword_type STREQUAL "ONE")
list(FIND options ${arg} option_index)
if(option_index EQUAL -1)
set(current_keyword ${arg})
list(FIND one_value_keywords ${arg} one_value_index)
if(one_value_index EQUAL -1)
set(current_keyword_type "MULTI")
else(one_value_index EQUAL -1)
set(current_keyword_type "ONE")
endif(one_value_index EQUAL -1)
else(option_index EQUAL -1)
set(current_keyword)
set(cpa_${arg} TRUE)
endif(option_index EQUAL -1)
endif(keyword_index EQUAL -1)
endforeach(arg ${ARGN})
if(current_keyword AND current_keyword_type STREQUAL "ONE")
message(SEND_ERROR "Value is expected for one-value-keyword ${current_keyword}")
endif(current_keyword AND current_keyword_type STREQUAL "ONE")
# propagate the result variables to the caller:
foreach(keyword ${all_keywords} UNPARSED_ARGUMENTS)
set(${prefix}_${keyword} ${cpa_${keyword}} PARENT_SCOPE)
endforeach(keyword ${all_keywords})
endfunction(cmake_parse_arguments prefix options one_value_keywords multi_value_keywords)
# - Find mysqlclient
# Find the native MySQL includes and library
#
# MYSQL_INCLUDE_DIR - where to find mysql.h, etc.
# MYSQL_LIBRARIES - List of libraries when using MySQL.
# MYSQL_FOUND - True if MySQL found.
IF (MYSQL_INCLUDE_DIR)
# Already in cache, be silent
SET(MYSQL_FIND_QUIETLY TRUE)
ENDIF (MYSQL_INCLUDE_DIR)
FIND_PATH(MYSQL_INCLUDE_DIR mysql.h
/usr/local/include/mysql
/usr/include/mysql
)
SET(MYSQL_NAMES mysqlclient mysqlclient_r)
FIND_LIBRARY(MYSQL_LIBRARY
NAMES ${MYSQL_NAMES}
PATHS /usr/lib /usr/local/lib
PATH_SUFFIXES mysql
)
IF (MYSQL_INCLUDE_DIR AND MYSQL_LIBRARY)
SET(MYSQL_FOUND TRUE)
SET( MYSQL_LIBRARIES ${MYSQL_LIBRARY} )
ELSE (MYSQL_INCLUDE_DIR AND MYSQL_LIBRARY)
SET(MYSQL_FOUND FALSE)
SET( MYSQL_LIBRARIES )
ENDIF (MYSQL_INCLUDE_DIR AND MYSQL_LIBRARY)
IF (MYSQL_FOUND)
IF (NOT MYSQL_FIND_QUIETLY)
MESSAGE(STATUS "Found MySQL: ${MYSQL_LIBRARY}")
ENDIF (NOT MYSQL_FIND_QUIETLY)
ELSE (MYSQL_FOUND)
IF (MYSQL_FIND_REQUIRED)
MESSAGE(STATUS "Looked for MySQL libraries named ${MYSQL_NAMES}.")
MESSAGE(FATAL_ERROR "Could NOT find MySQL library")
ENDIF (MYSQL_FIND_REQUIRED)
ENDIF (MYSQL_FOUND)
MARK_AS_ADVANCED(
MYSQL_LIBRARY
MYSQL_INCLUDE_DIR
)
# - Look for GNU Bison, the parser generator
# Based off a news post from Andy Cedilnik at Kitware
# Defines the following:
# BISON_EXECUTABLE - path to the bison executable
# BISON_FILE - parse a file with bison
# BISON_PREFIX_OUTPUTS - Set to true to make BISON_FILE produce prefixed
# symbols in the generated output based on filename.
# So for ${filename}.y, you'll get ${filename}parse(), etc.
# instead of yyparse().
# BISON_GENERATE_DEFINES - Set to true to make BISON_FILE output the matching
# .h file for a .c file. You want this if you're using
# flex.
IF(NOT DEFINED BISON_PREFIX_OUTPUTS)
SET(BISON_PREFIX_OUTPUTS FALSE)
ENDIF(NOT DEFINED BISON_PREFIX_OUTPUTS)
IF(NOT DEFINED BISON_GENERATE_DEFINES)
SET(BISON_GENERATE_DEFINES FALSE)
ENDIF(NOT DEFINED BISON_GENERATE_DEFINES)
IF(NOT BISON_EXECUTABLE)
MESSAGE(STATUS "Looking for bison")
FIND_PROGRAM(BISON_EXECUTABLE bison)
IF(BISON_EXECUTABLE)
MESSAGE(STATUS "Looking for bison -- ${BISON_EXECUTABLE}")
ENDIF(BISON_EXECUTABLE)
ENDIF(NOT BISON_EXECUTABLE)
IF(BISON_EXECUTABLE)
MACRO(BISON_FILE FILENAME)
GET_FILENAME_COMPONENT(PATH "${FILENAME}" PATH)
IF("${PATH}" STREQUAL "")
SET(PATH_OPT "")
ELSE("${PATH}" STREQUAL "")
SET(PATH_OPT "/${PATH}")
ENDIF("${PATH}" STREQUAL "")
GET_FILENAME_COMPONENT(HEAD "${FILENAME}" NAME_WE)
IF(NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}")
FILE(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}")
ENDIF(NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}")
IF(BISON_PREFIX_OUTPUTS)
SET(PREFIX "${HEAD}")
ELSE(BISON_PREFIX_OUTPUTS)
SET(PREFIX "yy")
ENDIF(BISON_PREFIX_OUTPUTS)
SET(OUTFILE "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}/${HEAD}.tab.c")
IF(BISON_GENERATE_DEFINES)
SET(HEADER "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}/${HEAD}.tab.h")
ADD_CUSTOM_COMMAND(
OUTPUT "${OUTFILE}" "${HEADER}"
COMMAND "${BISON_EXECUTABLE}"
ARGS "--name-prefix=${PREFIX}"
"--defines"
"--output-file=${OUTFILE}"
"${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME}"
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME}")
SET_SOURCE_FILES_PROPERTIES("${OUTFILE}" "${HEADER}" PROPERTIES GENERATED TRUE)
SET_SOURCE_FILES_PROPERTIES("${HEADER}" PROPERTIES HEADER_FILE_ONLY TRUE)
ELSE(BISON_GENERATE_DEFINES)
ADD_CUSTOM_COMMAND(
OUTPUT "${OUTFILE}"
COMMAND "${BISON_EXECUTABLE}"
ARGS "--name-prefix=${PREFIX}"
"--output-file=${OUTFILE}"
"${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME}"
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME}")
SET_SOURCE_FILES_PROPERTIES("${OUTFILE}" PROPERTIES GENERATED TRUE)
ENDIF(BISON_GENERATE_DEFINES)
ENDMACRO(BISON_FILE)
ENDIF(BISON_EXECUTABLE)
# - Look for GNU flex, the lexer generator.
# Defines the following:
# FLEX_EXECUTABLE - path to the flex executable
# FLEX_FILE - parse a file with flex
# FLEX_PREFIX_OUTPUTS - Set to true to make FLEX_FILE produce outputs of
# lex.${filename}.c, not lex.yy.c . Passes -P to flex.
IF(NOT DEFINED FLEX_PREFIX_OUTPUTS)
SET(FLEX_PREFIX_OUTPUTS FALSE)
ENDIF(NOT DEFINED FLEX_PREFIX_OUTPUTS)
IF(NOT FLEX_EXECUTABLE)
MESSAGE(STATUS "Looking for flex")
FIND_PROGRAM(FLEX_EXECUTABLE flex)
IF(FLEX_EXECUTABLE)
MESSAGE(STATUS "Looking for flex -- ${FLEX_EXECUTABLE}")
ENDIF(FLEX_EXECUTABLE)
ENDIF(NOT FLEX_EXECUTABLE)
IF(FLEX_EXECUTABLE)
MACRO(FLEX_FILE FILENAME)
GET_FILENAME_COMPONENT(PATH "${FILENAME}" PATH)
IF("${PATH}" STREQUAL "")
SET(PATH_OPT "")
ELSE("${PATH}" STREQUAL "")
SET(PATH_OPT "/${PATH}")
ENDIF("${PATH}" STREQUAL "")
IF(NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}")
FILE(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}")
ENDIF(NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}")
IF(FLEX_PREFIX_OUTPUTS)
GET_FILENAME_COMPONENT(PREFIX "${FILENAME}" NAME_WE)
ELSE(FLEX_PREFIX_OUTPUTS)
SET(PREFIX "yy")
ENDIF(FLEX_PREFIX_OUTPUTS)
SET(OUTFILE "${CMAKE_CURRENT_BINARY_DIR}${PATH_OPT}/lex.${PREFIX}.c")
ADD_CUSTOM_COMMAND(
OUTPUT "${OUTFILE}"
COMMAND "${FLEX_EXECUTABLE}"
ARGS "--prefix=${PREFIX}"
"--outfile=${OUTFILE}"
"${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME}"
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/${FILENAME}")
SET_SOURCE_FILES_PROPERTIES("${OUTFILE}" PROPERTIES GENERATED TRUE)
ENDMACRO(FLEX_FILE)
ENDIF(FLEX_EXECUTABLE)
# - Try to find the CHECK libraries
# Once done this will define
#
# CHECK_FOUND - system has check
# CHECK_INCLUDE_DIR - the check include directory
# CHECK_LIBRARIES - check library
#
# This configuration file for finding libcheck is originally from
# the opensync project. The originally was downloaded from here:
# opensync.org/browser/branches/3rd-party-cmake-modules/modules/FindCheck.cmake
#
# Copyright (c) 2007 Daniel Gollub <dgollub@suse.de>
# Copyright (c) 2007 Bjoern Ricks <b.ricks@fh-osnabrueck.de>
#
# Redistribution and use is allowed according to the terms of the New
# BSD license.
# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
INCLUDE( FindPkgConfig )
# Take care about check.pc settings
PKG_SEARCH_MODULE( CHECK check )
# Look for CHECK include dir and libraries
IF( NOT CHECK_FOUND )
IF ( CHECK_INSTALL_DIR )
MESSAGE ( STATUS "Using override CHECK_INSTALL_DIR to find check" )
SET ( CHECK_INCLUDE_DIR "${CHECK_INSTALL_DIR}/include" )
SET ( CHECK_INCLUDE_DIRS "${CHECK_INCLUDE_DIR}" )
FIND_LIBRARY( CHECK_LIBRARY NAMES check PATHS "${CHECK_INSTALL_DIR}/lib" )
FIND_LIBRARY( COMPAT_LIBRARY NAMES compat PATHS "${CHECK_INSTALL_DIR}/lib" )
SET ( CHECK_LIBRARIES "${CHECK_LIBRARY}" "${COMPAT_LIBRARY}" )
ELSE ( CHECK_INSTALL_DIR )
FIND_PATH( CHECK_INCLUDE_DIR check.h )
FIND_LIBRARY( CHECK_LIBRARIES NAMES check )
ENDIF ( CHECK_INSTALL_DIR )
IF ( CHECK_INCLUDE_DIR AND CHECK_LIBRARIES )
SET( CHECK_FOUND 1 )
IF ( NOT Check_FIND_QUIETLY )
MESSAGE ( STATUS "Found CHECK: ${CHECK_LIBRARIES}" )
ENDIF ( NOT Check_FIND_QUIETLY )
ELSE ( CHECK_INCLUDE_DIR AND CHECK_LIBRARIES )
IF ( Check_FIND_REQUIRED )
MESSAGE( FATAL_ERROR "Could NOT find CHECK" )
ELSE ( Check_FIND_REQUIRED )
IF ( NOT Check_FIND_QUIETLY )
MESSAGE( STATUS "Could NOT find CHECK" )
ENDIF ( NOT Check_FIND_QUIETLY )
ENDIF ( Check_FIND_REQUIRED )
ENDIF ( CHECK_INCLUDE_DIR AND CHECK_LIBRARIES )
ENDIF( NOT CHECK_FOUND )
# Hide advanced variables from CMake GUIs
MARK_AS_ADVANCED( CHECK_INCLUDE_DIR CHECK_LIBRARIES )
# Find Libevent
# http://monkey.org/~provos/libevent/
#
# Once done, this will define:
#
# Event_FOUND - system has Event
# Event_INCLUDE_DIRS - the Event include directories
# Event_LIBRARIES - link these to use Event
#
if (EVENT_INCLUDE_DIR AND EVENT_LIBRARY)
# Already in cache, be silent
set(EVENT_FIND_QUIETLY TRUE)
endif (EVENT_INCLUDE_DIR AND EVENT_LIBRARY)
find_path(EVENT_INCLUDE_DIR event.h
PATHS /usr/include
PATH_SUFFIXES event
)
find_library(EVENT_LIBRARY
NAMES event
PATHS /usr/lib /usr/local/lib
)
set(EVENT_LIBRARIES ${EVENT_LIBRARY} )
add_definitions(-DLIBNET_LIL_ENDIAN)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(EVENT
DEFAULT_MSG
EVENT_INCLUDE_DIR
EVENT_LIBRARIES
)
mark_as_advanced(EVENT_INCLUDE_DIR EVENT_LIBRARY)
# - Find freediameter
# Find the native FreeDiameter includes and libraries
#
# FREEDIAMETER_FOUND - True if FreeDiameter found.
# FREEDIAMETER_INCLUDE_DIR - where to find gnutls.h, etc.
# FREEDIAMETER_LIBRARIES - List of libraries when using gnutls.
# FREEDIAMETER_HSS_S6A_ENABLED - true if FreeDiameter enabled for S6A interface
if (FREEDIAMETER_INCLUDE_DIR AND FREEDIAMETER_LIBRARIES)
set(FREEDIAMETER_FIND_QUIETLY TRUE)
endif (FREEDIAMETER_INCLUDE_DIR AND FREEDIAMETER_LIBRARIES)
# Include dir
find_path(FREEDIAMETER_INCLUDE_DIR
NAMES
freeDiameter/freeDiameter-host.h
freeDiameter/libfdcore.h
freeDiameter/libfdproto.h
)
# Library
find_library(FREEDIAMETER_LIBRARY
NAMES fdcore
)
# handle the QUIETLY and REQUIRED arguments and set FREEDIAMETER_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(FREEDIAMETER DEFAULT_MSG FREEDIAMETER_LIBRARY FREEDIAMETER_INCLUDE_DIR)
IF(FREEDIAMETER_FOUND)
SET( FREEDIAMETER_LIBRARIES ${FREEDIAMETER_LIBRARY} )
ELSE(FREEDIAMETER_FOUND)
SET( FREEDIAMETER_LIBRARIES )
ENDIF(FREEDIAMETER_FOUND)
find_library(FREEDIAMETER_LIBRARY2
NAMES fdproto
)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(FREEDIAMETER2 DEFAULT_MSG FREEDIAMETER_LIBRARY2 FREEDIAMETER_INCLUDE_DIR)
IF(FREEDIAMETER2_FOUND)
SET( FREEDIAMETER_LIBRARIES ${FREEDIAMETER_LIBRARIES} ${FREEDIAMETER_LIBRARY2} )
ELSE(FREEDIAMETER2_FOUND)
SET( FREEDIAMETER_LIBRARIES )
ENDIF(FREEDIAMETER2_FOUND)
# Lastly make it so that the FREEDIAMETER_LIBRARY and FREEDIAMETER_INCLUDE_DIR variables
# only show up under the advanced options in the gui cmake applications.
MARK_AS_ADVANCED( FREEDIAMETER_LIBRARY FREEDIAMETER_INCLUDE_DIR )
# Now check if the library is patched for OPENAIR HSS S6A.
IF(FREEDIAMETER_FOUND)
IF( FREEDIAMETER_INCLUDE_DIR )
# Extract FD_PROJECT_VERSION_MAJOR, FD_PROJECT_VERSION_MINOR from freeDiameter-host.h
# Read the whole file:
#
FILE(READ "${FREEDIAMETER_INCLUDE_DIR}/freeDiameter/freeDiameter-host.h" _freeDiameter_HOST_H_CONTENTS)
STRING(REGEX REPLACE ".*#define FD_PROJECT_VERSION_MAJOR ([0-9]+).*" "\\1" FD_PROJECT_VERSION_MAJOR "${_freeDiameter_HOST_H_CONTENTS}")
STRING(REGEX REPLACE ".*#define FD_PROJECT_VERSION_MINOR ([0-9]+).*" "\\1" FD_PROJECT_VERSION_MINOR "${_freeDiameter_HOST_H_CONTENTS}")
IF(FD_PROJECT_VERSION_MAJOR GREATER 0)
MATH(EXPR FREEDIAMETER_VERSION_VALUE "${FD_PROJECT_VERSION_MAJOR} * 100 + ${FD_PROJECT_VERSION_MINOR} * 10")
SET(FREEDIAMETER_VERSION "${FREEDIAMETER_VERSION_VALUE}" CACHE INTERNAL "Freediameter release version")
MESSAGE(STATUS "freeDiameter version found ${FREEDIAMETER_VERSION}")
ENDIF(FD_PROJECT_VERSION_MAJOR GREATER 0)
ENDIF( FREEDIAMETER_INCLUDE_DIR )
GET_FILENAME_COMPONENT(FREEDIAMETER_PATH ${FREEDIAMETER_LIBRARY} PATH)
IF( NOT( "${FREEDIAMETER_VERSION_TEST_FOR}" STREQUAL "${FREEDIAMETER_LIBRARIES}" ))
INCLUDE (CheckLibraryExists)
MESSAGE(STATUS "Checking freeDiameter patched for S6A")
UNSET(FREEDIAMETER_HSS_S6A_ENABLED)
UNSET(FREEDIAMETER_HSS_S6A_ENABLED CACHE)
UNSET(DICT_S6A_FOUND)
UNSET(DICT_S6A_FOUND CACHE)
FIND_FILE(DICT_S6A_FOUND NAMES dict_s6a.fdx PATHS ${FREEDIAMETER_PATH} ${FREEDIAMETER_PATH}/freeDiameter)
IF(DICT_S6A_FOUND)
SET( FREEDIAMETER_HSS_S6A_ENABLED TRUE CACHE INTERNAL "dict_s6a.fdx Found")
ELSE(DICT_S6A_FOUND)
SET( FREEDIAMETER_HSS_S6A_ENABLED FALSE CACHE INTERNAL "dict_s6a.fdx not Found")
ENDIF(DICT_S6A_FOUND)
SET( FREEDIAMETER_VERSION_TEST_FOR ${FREEDIAMETER_LIBRARIES} CACHE INTERNAL "Version the test was made against" )
ENDIF (NOT( "${FREEDIAMETER_VERSION_TEST_FOR}" STREQUAL "${FREEDIAMETER_LIBRARIES}" ))
ENDIF(FREEDIAMETER_FOUND)
# FindGCCXML
# ----------
find_program(GCCXML
NAMES gccxml
PATHS "/usr/bin"
"usr/local/bin"
)
mark_as_advanced(GCCXML)
# - Find gnutls
# Find the native GCRYPT includes and library
#
# GCRYPT_FOUND - True if gnutls found.
# GCRYPT_INCLUDE_DIR - where to find gnutls.h, etc.
# GCRYPT_LIBRARIES - List of libraries when using gnutls.
if (GCRYPT_INCLUDE_DIR AND GCRYPT_LIBRARIES)
set(GCRYPT_FIND_QUIETLY TRUE)
endif (GCRYPT_INCLUDE_DIR AND GCRYPT_LIBRARIES)
# Include dir
find_path(GCRYPT_INCLUDE_DIR
NAMES
gcrypt.h
)
# Library
find_library(GCRYPT_LIBRARY
NAMES gcrypt
)
# handle the QUIETLY and REQUIRED arguments and set GCRYPT_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(GCRYPT DEFAULT_MSG GCRYPT_LIBRARY GCRYPT_INCLUDE_DIR)
IF(GCRYPT_FOUND)
SET( GCRYPT_LIBRARIES ${GCRYPT_LIBRARY} )
ELSE(GCRYPT_FOUND)
SET( GCRYPT_LIBRARIES )
ENDIF(GCRYPT_FOUND)
# Lastly make it so that the GCRYPT_LIBRARY and GCRYPT_INCLUDE_DIR variables
# only show up under the advanced options in the gui cmake applications.
MARK_AS_ADVANCED( GCRYPT_LIBRARY GCRYPT_INCLUDE_DIR )
# - Find gnutls
# Find the native GNUTLS includes and library
#
# GNUTLS_FOUND - True if gnutls found.
# GNUTLS_INCLUDE_DIR - where to find gnutls.h, etc.
# GNUTLS_LIBRARIES - List of libraries when using gnutls.
# GNUTLS_VERSION_210 - true if GnuTLS version is >= 2.10.0 (does not require additional separate gcrypt initialization)
# GNUTLS_VERSION_212 - true if GnuTLS version is >= 2.12.0 (supports gnutls_transport_set_vec_push_function)
# GNUTLS_VERSION_300 - true if GnuTLS version is >= 3.00.0 (x509 verification functions changed)
# GNUTLS_VERSION_310 - true if GnuTLS version is >= 3.01.0 (stabilization branch with new APIs)
if (GNUTLS_INCLUDE_DIR AND GNUTLS_LIBRARIES)
set(GNUTLS_FIND_QUIETLY TRUE)
endif (GNUTLS_INCLUDE_DIR AND GNUTLS_LIBRARIES)
# Include dir
find_path(GNUTLS_INCLUDE_DIR
NAMES
gnutls.h
gnutls/gnutls.h
)
# Library
find_library(GNUTLS_LIBRARY
NAMES gnutls
)
# handle the QUIETLY and REQUIRED arguments and set GNUTLS_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(GNUTLS DEFAULT_MSG GNUTLS_LIBRARY GNUTLS_INCLUDE_DIR)
IF(GNUTLS_FOUND)
SET( GNUTLS_LIBRARIES ${GNUTLS_LIBRARY} )
ELSE(GNUTLS_FOUND)
SET( GNUTLS_LIBRARIES )
ENDIF(GNUTLS_FOUND)
# Lastly make it so that the GNUTLS_LIBRARY and GNUTLS_INCLUDE_DIR variables
# only show up under the advanced options in the gui cmake applications.
MARK_AS_ADVANCED( GNUTLS_LIBRARY GNUTLS_INCLUDE_DIR )
# Now check if the library is recent. gnutls_hash was added in 2.10.0.
# Also test library is even more recent. gnutls_x509_trust_list_verify_crt was added in 3.00.0.
IF(GNUTLS_FOUND)
IF( NOT( "${GNUTLS_VERSION_TEST_FOR}" STREQUAL "${GNUTLS_LIBRARY}" ))
INCLUDE (CheckLibraryExists)
MESSAGE(STATUS "Checking GNUTLS version")
UNSET(GNUTLS_VERSION_210)
UNSET(GNUTLS_VERSION_210 CACHE)
UNSET(GNUTLS_VERSION_212)
UNSET(GNUTLS_VERSION_212 CACHE)
UNSET(GNUTLS_VERSION_300)
UNSET(GNUTLS_VERSION_300 CACHE)
UNSET(GNUTLS_VERSION_310)
UNSET(GNUTLS_VERSION_310 CACHE)
GET_FILENAME_COMPONENT(GNUTLS_PATH ${GNUTLS_LIBRARY} PATH)
CHECK_LIBRARY_EXISTS(gnutls gnutls_hash ${GNUTLS_PATH} GNUTLS_VERSION_210)
CHECK_LIBRARY_EXISTS(gnutls gnutls_transport_set_vec_push_function ${GNUTLS_PATH} GNUTLS_VERSION_212)
CHECK_LIBRARY_EXISTS(gnutls gnutls_x509_trust_list_verify_crt ${GNUTLS_PATH} GNUTLS_VERSION_300)
CHECK_LIBRARY_EXISTS(gnutls gnutls_handshake_set_timeout ${GNUTLS_PATH} GNUTLS_VERSION_310)
SET( GNUTLS_VERSION_TEST_FOR ${GNUTLS_LIBRARY} CACHE INTERNAL "Version the test was made against" )
ENDIF (NOT( "${GNUTLS_VERSION_TEST_FOR}" STREQUAL "${GNUTLS_LIBRARY}" ))
ENDIF(GNUTLS_FOUND)
# - Support for building kernel modules
# This module defines several variables which may be used when build
# kernel modules.
#
# The following variables are set here:
# Kbuild_VERSION_STRING - kernel version.
# Kbuild_ARCH - architecture.
# Kbuild_BUILD_DIR - directory for build kernel and/or its components.
# Kbuild_VERSION_{MAJOR|MINOR|TWEAK} - kernel version components
# Kbuild_VERSION_STRING_CLASSIC - classic representation of version,
# where parts are delimited by dots.
#
# Cache variables which affect on this package:
# CMAKE_SYSTEM_VERSION - change Kbuild_VERSION_STRING
# Also, setting CMAKE_SYSTEM_VERSION will be interpreted by cmake as crosscompile.
#
# Cache variables(ADVANCED) which affect on this package:
# ARCH - if not empty, change Kbuild_ARCH
# KBUILD_DIR - change Kbuild_BUILD_DIR
set(Kbuild_VERSION_STRING ${CMAKE_SYSTEM_VERSION})
# Form classic version view.
string(REGEX MATCH "([0-9]+)\\.([0-9]+)[.-]([0-9]+)"
_kernel_version_match
"${Kbuild_VERSION_STRING}"
)
if(NOT _kernel_version_match)
message(FATAL_ERROR "Kernel version has unexpected format: ${Kbuild_VERSION_STRING}")
endif(NOT _kernel_version_match)
set(Kbuild_VERSION_MAJOR ${CMAKE_MATCH_1})
set(Kbuild_VERSION_MINOR ${CMAKE_MATCH_2})
set(Kbuild_VERSION_TWEAK ${CMAKE_MATCH_3})
# Version string for compare
set(Kbuild_VERSION_STRING_CLASSIC "${Kbuild_VERSION_MAJOR}.${Kbuild_VERSION_MINOR}.${Kbuild_VERSION_TWEAK}")
set(KBUILD_DIR "/lib/modules/${Kbuild_VERSION_STRING}/build" CACHE PATH
"Directory for build linux kernel and/or its components."
)
mark_as_advanced(KBUILD_DIR)
set(Kbuild_BUILD_DIR "${KBUILD_DIR}")
set(ARCH "" CACHE STRING
"Architecture for build linux kernel components for, empty string means autodetect."
)
mark_as_advanced(ARCH)
if(NOT ARCH)
# Autodetect arch.
execute_process(COMMAND uname -m
RESULT_VARIABLE uname_m_result
OUTPUT_VARIABLE ARCH_DEFAULT
)
if(NOT uname_m_result EQUAL 0)
message("'uname -m' failed:")
message("${ARCH_DEFAULT}")
message(FATAL_ERROR "Failed to determine system architecture.")
endif(NOT uname_m_result EQUAL 0)
string(REGEX REPLACE "\n$" "" ARCH_DEFAULT "${ARCH_DEFAULT}")
# Pairs of pattern-replace for postprocess architecture string from
# 'uname -m'.
# Taken from ./Makefile of the kernel build.
set(_kbuild_arch_replacers
"i.86" "x86"
"x86_64" "x86"
"sun4u" "sparc64"
"arm.*" "arm"
"sa110" "arm"
"s390x" "s390"
"parisc64" "parisc"
"ppc.*" "powerpc"
"mips.*" "mips"
"sh[234].*" "sh"
"aarch64.*" "arm64"
)
set(_current_pattern)
foreach(p ${_kbuild_arch_replacers})
if(_current_pattern)
string(REGEX REPLACE "${_current_pattern}" "${p}" ARCH_DEFAULT "${ARCH_DEFAULT}")
set(_current_pattern)
else(_current_pattern)
set(_current_pattern "${p}")
endif(_current_pattern)
endforeach(p ${_kbuild_arch_replacers})
set(Kbuild_ARCH "${ARCH_DEFAULT}")
else(NOT ARCH)
# User-provided value is used.
set(Kbuild_ARCH "${ARCH}")
endif(NOT ARCH)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Kbuild
REQUIRED_VARS Kbuild_BUILD_DIR
VERSION_VAR KBuild_VERSION_STRING_CLASSIC
)
# - Try to find the LibXml2 xml processing library
# Once done this will define
#
# LIBXML2_FOUND - System has LibXml2
# LIBXML2_INCLUDE_DIR - The LibXml2 include directory
# LIBXML2_LIBRARIES - The libraries needed to use LibXml2
# LIBXML2_DEFINITIONS - Compiler switches required for using LibXml2
# LIBXML2_XMLLINT_EXECUTABLE - The XML checking tool xmllint coming with LibXml2
#=============================================================================
# Copyright 2006-2009 Kitware, Inc.
# Copyright 2006 Alexander Neundorf <neundorf@kde.org>
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# See the License for more information.
#=============================================================================
# (To distributed this file outside of CMake, substitute the full
# License text for the above reference.)
# use pkg-config to get the directories and then use these values
# in the FIND_PATH() and FIND_LIBRARY() calls
FIND_PACKAGE(PkgConfig)
PKG_CHECK_MODULES(PC_LIBXML libxml-2.0)
SET(LIBXML2_DEFINITIONS ${PC_LIBXML_CFLAGS_OTHER})
FIND_PATH(LIBXML2_INCLUDE_DIR NAMES libxml/xpath.h
HINTS
${PC_LIBXML_INCLUDEDIR}
${PC_LIBXML_INCLUDE_DIRS}
PATH_SUFFIXES libxml2
)
FIND_LIBRARY(LIBXML2_LIBRARIES NAMES xml2 libxml2
HINTS
${PC_LIBXML_LIBDIR}
${PC_LIBXML_LIBRARY_DIRS}
)
FIND_PROGRAM(LIBXML2_XMLLINT_EXECUTABLE xmllint)
# for backwards compat. with KDE 4.0.x:
SET(XMLLINT_EXECUTABLE "${LIBXML2_XMLLINT_EXECUTABLE}")
# handle the QUIETLY and REQUIRED arguments and set LIBXML2_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(LibXml2 DEFAULT_MSG LIBXML2_LIBRARIES LIBXML2_INCLUDE_DIR)
MARK_AS_ADVANCED(LIBXML2_INCLUDE_DIR LIBXML2_LIBRARIES LIBXML2_XMLLINT_EXECUTABLE)
# - Find mysqlclient
#
# -*- cmake -*-
#
# Find the native MySQL includes and library
#
# MySQL_INCLUDE_DIR - where to find mysql.h, etc.
# MySQL_LIBRARIES - List of libraries when using MySQL.
# MySQL_FOUND - True if MySQL found.
IF (MySQL_INCLUDE_DIR AND MySQL_LIBRARY)
# Already in cache, be silent
SET(MySQL_FIND_QUIETLY TRUE)
ENDIF (MySQL_INCLUDE_DIR AND MySQL_LIBRARY)
# Include dir
FIND_PATH(MySQL_INCLUDE_DIR
NAMES mysql.h
PATH_SUFFIXES mysql
)
# Library
#SET(MySQL_NAMES mysqlclient mysqlclient_r)
SET(MySQL_NAMES mysqlclient)
FIND_LIBRARY(MySQL_LIBRARY
NAMES ${MySQL_NAMES}
PATHS /usr/lib /usr/local/lib
PATH_SUFFIXES mysql
)
IF (MySQL_INCLUDE_DIR AND MySQL_LIBRARY)
SET(MySQL_FOUND TRUE)
SET( MySQL_LIBRARIES ${MySQL_LIBRARY} )
ELSE (MySQL_INCLUDE_DIR AND MySQL_LIBRARY)
SET(MySQL_FOUND FALSE)
SET( MySQL_LIBRARIES )
ENDIF (MySQL_INCLUDE_DIR AND MySQL_LIBRARY)
IF (MySQL_FOUND)
IF (NOT MySQL_FIND_QUIETLY)
MESSAGE(STATUS "Found MySQL: ${MySQL_LIBRARY}")
ENDIF (NOT MySQL_FIND_QUIETLY)
ELSE (MySQL_FOUND)
IF (MySQL_FIND_REQUIRED)
MESSAGE(STATUS "Looked for MySQL libraries named ${MySQL_NAMES}.")
MESSAGE(FATAL_ERROR "Could NOT find MySQL library")
ENDIF (MySQL_FIND_REQUIRED)
ENDIF (MySQL_FOUND)
MARK_AS_ADVANCED(
MySQL_LIBRARY
MySQL_INCLUDE_DIR
)
# Find the native Nettle includes, library, and flags
#
# NETTLE_INCLUDE_DIR - where to find nettle.h, etc.
# NETTLE_LIBRARIES - List of libraries when using Nettle.
# NETTLE_FOUND - True if Nettle found.
IF (NETTLE_INCLUDE_DIR)
# Already in cache, be silent
SET(NETTLE_FIND_QUIETLY TRUE)
ENDIF (NETTLE_INCLUDE_DIR)
FIND_PATH(NETTLE_INCLUDE_DIR nettle/nettle-meta.h)
SET(NETTLE_NAMES nettle)
FIND_LIBRARY(NETTLE_LIBRARY NAMES ${NETTLE_NAMES} )
# handle the QUIETLY and REQUIRED arguments and set NETTLE_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(NETTLE DEFAULT_MSG NETTLE_LIBRARY NETTLE_INCLUDE_DIR)
IF(NETTLE_FOUND)
SET(NETTLE_LIBRARIES ${NETTLE_LIBRARY})
ELSE(NETTLE_FOUND)
SET(NETTLE_LIBRARIES )
ENDIF(NETTLE_FOUND)
MARK_AS_ADVANCED(NETTLE_LIBRARY NETTLE_INCLUDE_DIR)
# - Find PostgreSQL library
#
# This module defines:
# POSTGRESQL_FOUND - True if the package is found
# POSTGRESQL_INCLUDE_DIR - containing libpq-fe.h
# POSTGRESQL_LIBRARIES - Libraries to link to use PQ functions.
if (POSTGRESQL_INCLUDE_DIR AND POSTGRESQL_LIBRARIES)
set(POSTGRESQL_FIND_QUIETLY TRUE)
endif (POSTGRESQL_INCLUDE_DIR AND POSTGRESQL_LIBRARIES)
# Include dir
find_path(POSTGRESQL_INCLUDE_DIR
NAMES libpq-fe.h
PATH_SUFFIXES pgsql postgresql
)
# Library
find_library(POSTGRESQL_LIBRARY
NAMES pq
)
# handle the QUIETLY and REQUIRED arguments and set POSTGRESQL_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(POSTGRESQL DEFAULT_MSG POSTGRESQL_LIBRARY POSTGRESQL_INCLUDE_DIR)
IF(POSTGRESQL_FOUND)
SET( POSTGRESQL_LIBRARIES ${POSTGRESQL_LIBRARY} )
ELSE(POSTGRESQL_FOUND)
SET( POSTGRESQL_LIBRARIES )
ENDIF(POSTGRESQL_FOUND)
# Lastly make it so that the POSTGRESQL_LIBRARY and POSTGRESQL_INCLUDE_DIR variables
# only show up under the advanced options in the gui cmake applications.
MARK_AS_ADVANCED( POSTGRESQL_LIBRARY POSTGRESQL_INCLUDE_DIR )
# - Try to find SCTP library and headers
# Once done, this will define
#
# SCTP_FOUND - system has SCTP
# SCTP_INCLUDE_DIR - the SCTP include directories
# SCTP_LIBRARIES - link these to use SCTP
if (SCTP_INCLUDE_DIR AND SCTP_LIBRARIES)
set(SCTP_FIND_QUIETLY TRUE)
endif (SCTP_INCLUDE_DIR AND SCTP_LIBRARIES)
# Include dir
find_path(SCTP_INCLUDE_DIR
NAMES netinet/sctp.h
)
# Library
find_library(SCTP_LIBRARY
NAMES sctp
)
# Set the include dir variables and the libraries and let libfind_process do the rest.
# NOTE: Singular variables for this library, plural for libraries this this lib depends on.
#set(SCTP_PROCESS_INCLUDES SCTP_INCLUDE_DIR)
#set(SCTP_PROCESS_LIBS SCTP_LIBRARY)
#libfind_process(SCTP)
# handle the QUIETLY and REQUIRED arguments and set SCTP_FOUND to TRUE if
# all listed variables are TRUE
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(SCTP DEFAULT_MSG SCTP_LIBRARY SCTP_INCLUDE_DIR)
# If we successfully found the sctp library then add the library to the
# SCTP_LIBRARIES cmake variable otherwise set SCTP_LIBRARIES to nothing.
IF(SCTP_FOUND)
SET( SCTP_LIBRARIES ${SCTP_LIBRARY} )
ELSE(SCTP_FOUND)
SET( SCTP_LIBRARIES )
ENDIF(SCTP_FOUND)
# Lastly make it so that the SCTP_LIBRARY and SCTP_INCLUDE_DIR variables
# only show up under the advanced options in the gui cmake applications.
MARK_AS_ADVANCED( SCTP_LIBRARY SCTP_INCLUDE_DIR )
# Updated FindThreads.cmake that supports pthread-win32
# Downloaded from http://www.vtk.org/Bug/bug_view_advanced_page.php?bug_id=6399
# - This module determines the thread library of the system.
#
# The following variables are set
# CMAKE_THREAD_LIBS_INIT - the thread library
# CMAKE_USE_SPROC_INIT - are we using sproc?
# CMAKE_USE_WIN32_THREADS_INIT - using WIN32 threads?
# CMAKE_USE_PTHREADS_INIT - are we using pthreads
# CMAKE_HP_PTHREADS_INIT - are we using hp pthreads
#
# If use of pthreads-win32 is desired, the following variables
# can be set.
#
# THREADS_USE_PTHREADS_WIN32 -
# Setting this to true searches for the pthreads-win32
# port (since CMake 2.8.0)
#
# THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME
# C = no exceptions (default)
# (NOTE: This is the default scheme on most POSIX thread
# implementations and what you should probably be using)
# CE = C++ Exception Handling
# SE = Structure Exception Handling (MSVC only)
# (NOTE: Changing this option from the default may affect
# the portability of your application. See pthreads-win32
# documentation for more details.)
#
#======================================================
# Example usage where threading library
# is provided by the system:
#
# find_package(Threads REQUIRED)
# add_executable(foo foo.cc)
# target_link_libraries(foo ${CMAKE_THREAD_LIBS_INIT})
#
# Example usage if pthreads-win32 is desired on Windows
# or a system provided thread library:
#
# set(THREADS_USE_PTHREADS_WIN32 true)
# find_package(Threads REQUIRED)
# include_directories(${THREADS_PTHREADS_INCLUDE_DIR})
#
# add_executable(foo foo.cc)
# target_link_libraries(foo ${CMAKE_THREAD_LIBS_INIT})
#
INCLUDE (CheckIncludeFiles)
INCLUDE (CheckLibraryExists)
SET(Threads_FOUND FALSE)
IF(WIN32 AND NOT CYGWIN AND THREADS_USE_PTHREADS_WIN32)
SET(_Threads_ptwin32 true)
ENDIF()
# Do we have sproc?
IF(CMAKE_SYSTEM MATCHES IRIX)
CHECK_INCLUDE_FILES("sys/types.h;sys/prctl.h" CMAKE_HAVE_SPROC_H)
ENDIF()
IF(CMAKE_HAVE_SPROC_H)
# We have sproc
SET(CMAKE_USE_SPROC_INIT 1)
ELSEIF(_Threads_ptwin32)
IF(NOT DEFINED THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME)
# Assign the default scheme
SET(THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME "C")
ELSE()
# Validate the scheme specified by the user
IF(NOT THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME STREQUAL "C" AND
NOT THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME STREQUAL "CE" AND
NOT THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME STREQUAL "SE")
MESSAGE(FATAL_ERROR "See documentation for FindPthreads.cmake, only C, CE, and SE modes are allowed")
ENDIF()
IF(NOT MSVC AND THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME STREQUAL "SE")
MESSAGE(FATAL_ERROR "Structured Exception Handling is only allowed for MSVC")
ENDIF(NOT MSVC AND THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME STREQUAL "SE")
ENDIF()
FIND_PATH(THREADS_PTHREADS_INCLUDE_DIR pthread.h)
# Determine the library filename
IF(MSVC)
SET(_Threads_pthreads_libname
pthreadV${THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME}2)
ELSEIF(MINGW)
SET(_Threads_pthreads_libname
pthreadG${THREADS_PTHREADS_WIN32_EXCEPTION_SCHEME}2)
ELSE()
MESSAGE(FATAL_ERROR "This should never happen")
ENDIF()
# Use the include path to help find the library if possible
SET(_Threads_lib_paths "")
IF(THREADS_PTHREADS_INCLUDE_DIR)
GET_FILENAME_COMPONENT(_Threads_root_dir
${THREADS_PTHREADS_INCLUDE_DIR} PATH)
SET(_Threads_lib_paths ${_Threads_root_dir}/lib)
ENDIF()
FIND_LIBRARY(THREADS_PTHREADS_WIN32_LIBRARY
NAMES ${_Threads_pthreads_libname}
PATHS ${_Threads_lib_paths}
DOC "The Portable Threads Library for Win32"
NO_SYSTEM_PATH
)
IF(THREADS_PTHREADS_INCLUDE_DIR AND THREADS_PTHREADS_WIN32_LIBRARY)
MARK_AS_ADVANCED(THREADS_PTHREADS_INCLUDE_DIR)
SET(CMAKE_THREAD_LIBS_INIT ${THREADS_PTHREADS_WIN32_LIBRARY})
SET(CMAKE_HAVE_THREADS_LIBRARY 1)
SET(Threads_FOUND TRUE)
ENDIF()
MARK_AS_ADVANCED(THREADS_PTHREADS_WIN32_LIBRARY)
ELSE()
# Do we have pthreads?
CHECK_INCLUDE_FILES("pthread.h" CMAKE_HAVE_PTHREAD_H)
IF(CMAKE_HAVE_PTHREAD_H)
#
# We have pthread.h
# Let's check for the library now.
#
SET(CMAKE_HAVE_THREADS_LIBRARY)
IF(NOT THREADS_HAVE_PTHREAD_ARG)
# Do we have -lpthreads
CHECK_LIBRARY_EXISTS(pthreads pthread_create "" CMAKE_HAVE_PTHREADS_CREATE)
IF(CMAKE_HAVE_PTHREADS_CREATE)
SET(CMAKE_THREAD_LIBS_INIT "-lpthreads")
SET(CMAKE_HAVE_THREADS_LIBRARY 1)
SET(Threads_FOUND TRUE)
ENDIF()
# Ok, how about -lpthread
CHECK_LIBRARY_EXISTS(pthread pthread_create "" CMAKE_HAVE_PTHREAD_CREATE)
IF(CMAKE_HAVE_PTHREAD_CREATE)
SET(CMAKE_THREAD_LIBS_INIT "-lpthread")
SET(Threads_FOUND TRUE)
SET(CMAKE_HAVE_THREADS_LIBRARY 1)
ENDIF()
IF(CMAKE_SYSTEM MATCHES "SunOS.*")
# On sun also check for -lthread
CHECK_LIBRARY_EXISTS(thread thr_create "" CMAKE_HAVE_THR_CREATE)
IF(CMAKE_HAVE_THR_CREATE)
SET(CMAKE_THREAD_LIBS_INIT "-lthread")
SET(CMAKE_HAVE_THREADS_LIBRARY 1)
SET(Threads_FOUND TRUE)
ENDIF()
ENDIF(CMAKE_SYSTEM MATCHES "SunOS.*")
ENDIF(NOT THREADS_HAVE_PTHREAD_ARG)
IF(NOT CMAKE_HAVE_THREADS_LIBRARY)
# If we did not found -lpthread, -lpthread, or -lthread, look for -pthread
IF("THREADS_HAVE_PTHREAD_ARG" MATCHES "^THREADS_HAVE_PTHREAD_ARG")
MESSAGE(STATUS "Check if compiler accepts -pthread")
TRY_RUN(THREADS_PTHREAD_ARG THREADS_HAVE_PTHREAD_ARG
${CMAKE_BINARY_DIR}
${CMAKE_ROOT}/Modules/CheckForPthreads.c
CMAKE_FLAGS -DLINK_LIBRARIES:STRING=-pthread
COMPILE_OUTPUT_VARIABLE OUTPUT)
IF(THREADS_HAVE_PTHREAD_ARG)
IF(THREADS_PTHREAD_ARG MATCHES "^2$")
SET(Threads_FOUND TRUE)
MESSAGE(STATUS "Check if compiler accepts -pthread - yes")
ELSE()
MESSAGE(STATUS "Check if compiler accepts -pthread - no")
FILE(APPEND
${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Determining if compiler accepts -pthread returned ${THREADS_PTHREAD_ARG} instead of 2. The compiler had the following output:\n${OUTPUT}\n\n")
ENDIF()
ELSE()
MESSAGE(STATUS "Check if compiler accepts -pthread - no")
FILE(APPEND
${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Determining if compiler accepts -pthread failed with the following output:\n${OUTPUT}\n\n")
ENDIF()
ENDIF("THREADS_HAVE_PTHREAD_ARG" MATCHES "^THREADS_HAVE_PTHREAD_ARG")
IF(THREADS_HAVE_PTHREAD_ARG)
SET(Threads_FOUND TRUE)
SET(CMAKE_THREAD_LIBS_INIT "-pthread")
ENDIF()
ENDIF(NOT CMAKE_HAVE_THREADS_LIBRARY)
ENDIF(CMAKE_HAVE_PTHREAD_H)
ENDIF()
IF(CMAKE_THREAD_LIBS_INIT)
SET(CMAKE_USE_PTHREADS_INIT 1)
SET(Threads_FOUND TRUE)
ENDIF()
IF(CMAKE_SYSTEM MATCHES "Windows"
AND NOT THREADS_USE_PTHREADS_WIN32)
SET(CMAKE_USE_WIN32_THREADS_INIT 1)
SET(Threads_FOUND TRUE)
ENDIF()
IF(CMAKE_USE_PTHREADS_INIT)
IF(CMAKE_SYSTEM MATCHES "HP-UX-*")
# Use libcma if it exists and can be used. It provides more
# symbols than the plain pthread library. CMA threads
# have actually been deprecated:
# http://docs.hp.com/en/B3920-90091/ch12s03.html#d0e11395
# http://docs.hp.com/en/947/d8.html
# but we need to maintain compatibility here.
# The CMAKE_HP_PTHREADS setting actually indicates whether CMA threads
# are available.
CHECK_LIBRARY_EXISTS(cma pthread_attr_create "" CMAKE_HAVE_HP_CMA)
IF(CMAKE_HAVE_HP_CMA)
SET(CMAKE_THREAD_LIBS_INIT "-lcma")
SET(CMAKE_HP_PTHREADS_INIT 1)
SET(Threads_FOUND TRUE)
ENDIF(CMAKE_HAVE_HP_CMA)
SET(CMAKE_USE_PTHREADS_INIT 1)
ENDIF()
IF(CMAKE_SYSTEM MATCHES "OSF1-V*")
SET(CMAKE_USE_PTHREADS_INIT 0)
SET(CMAKE_THREAD_LIBS_INIT )
ENDIF()
IF(CMAKE_SYSTEM MATCHES "CYGWIN_NT*")
SET(CMAKE_USE_PTHREADS_INIT 1)
SET(Threads_FOUND TRUE)
SET(CMAKE_THREAD_LIBS_INIT )
SET(CMAKE_USE_WIN32_THREADS_INIT 0)
ENDIF()
ENDIF(CMAKE_USE_PTHREADS_INIT)
INCLUDE(FindPackageHandleStandardArgs)
IF(_Threads_ptwin32)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Threads DEFAULT_MSG
THREADS_PTHREADS_WIN32_LIBRARY THREADS_PTHREADS_INCLUDE_DIR)
ELSE()
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Threads DEFAULT_MSG Threads_FOUND)
ENDIF()
src=${dir}
obj-m += ${name}.o
${name}-y += ${objs}
ccflags-y += ${module_cc_opt}
# Create rule for obtain one file by copying another one
function(rule_copy_file target_file source_file)
add_custom_command(OUTPUT ${target_file}
COMMAND cp -p ${source_file} ${target_file}
DEPENDS ${source_file}
)
endfunction(rule_copy_file target_file source_file)
# rule_copy_source([source_dir] file ...)
#
# Create rule for obtain file(s) in binary tree by copiing it from source tree.
#
# Files are given relative to ${source_dir}, if it is set, or
# relative to ${CMAKE_CURRENT_SOURCE_DIR}.
#
# Files will be copied into ${CMAKE_CURRENT_BINARY_DIR} with same
# relative paths.
#
# ${source_dir} should be absolute path(that is, starts from '/').
# Otherwise first argument is treated as first file to copy.
function(rule_copy_source file)
string(REGEX MATCH "^/" is_abs_path ${file})
if(is_abs_path)
set(source_dir ${file})
set(files ${ARGN})
else(is_abs_path)
set(source_dir ${CMAKE_CURRENT_SOURCE_DIR})
set(files ${file} ${ARGN})
endif(is_abs_path)
foreach(file_real ${files})
rule_copy_file("${CMAKE_CURRENT_BINARY_DIR}/${file_real}"
${source_dir}/${file_real})
endforeach(file_real ${files})
endfunction(rule_copy_source file)
# to_abs_path(output_var path [...])
#
# Convert relative path of file to absolute path:
# use path in source tree, if file already exist there.
# otherwise use path in binary tree.
# If initial path already absolute, return it.
function(to_abs_path output_var)
set(result)
foreach(path ${ARGN})
string(REGEX MATCH "^/" _is_abs_path ${path})
if(_is_abs_path)
list(APPEND result ${path})
else(_is_abs_path)
file(GLOB to_abs_path_file
"${CMAKE_CURRENT_SOURCE_DIR}/${path}"
)
if(NOT to_abs_path_file)
set (to_abs_path_file "${CMAKE_CURRENT_BINARY_DIR}/${path}")
endif(NOT to_abs_path_file)
list(APPEND result ${to_abs_path_file})
endif(_is_abs_path)
endforeach(path ${ARGN})
set("${output_var}" ${result} PARENT_SCOPE)
endfunction(to_abs_path output_var path)
# is_path_inside_dir(output_var dir path)
#
# Set output_var to true if path is absolute path inside given directory.
# NOTE: Path should be absolute.
macro(is_path_inside_dir output_var dir path)
file(RELATIVE_PATH _rel_path ${dir} ${path})
string(REGEX MATCH "^\\.\\." _is_not_inside_dir ${_rel_path})
if(_is_not_inside_dir)
set(${output_var} "FALSE")
else(_is_not_inside_dir)
set(${output_var} "TRUE")
endif(_is_not_inside_dir)
endmacro(is_path_inside_dir output_var dir path)
# Write given content to the file.
#
# If file is already exists and its content is same as written one, file
# is not rewritten, so its write timestamp remains unchanged.
function(file_update filename content)
if(EXISTS "${filename}")
file(READ "${filename}" old_content)
if(old_content STREQUAL "${content}")
return()
endif(old_content STREQUAL "${content}")
endif(EXISTS "${filename}")
# File doesn't exists or its content differ.
file(WRITE "${filename}" "${content}")
endfunction(file_update filename content)
# Write given content to the file in APPEND mode.
#
# For append we use position variable <pos> which should be initialized
# to 0 for every new build process and which is updated every time when
# this function is called.
# If file is already exists and its content at given <pos> is same as
# written one, file is not rewritten, so its write timestamp remains unchanged.
#
# Note, that file will not be rewritten if new build process issues less
# APPEND actions than one which create file.
# But similar problem exists for file_update() and even for built-in
# configure_file() command: file will not be removed if new build process
# do not call configure_file() for it.
function(file_update_append filename content POS)
set(pos "${${POS}}")
string(LENGTH "${content}" len)
# Update output variable first.
# This allows to use return() when need not to do anything
math(EXPR pos_new "${pos}+${len}")
set(${POS} "${pos_new}" PARENT_SCOPE)
if(EXISTS "${filename}")
file(READ "${filename}" old_content LIMIT "${len}" OFFSET "${pos}")
if(old_content STREQUAL "${content}")
return()
elseif(old_content STREQUAL "")
file(APPEND "${filename}" "${content}")
return()
endif(old_content STREQUAL "${content}")
else(EXISTS "${filename}")
if(NOT pos EQUAL 0)
message(FATAL_ERROR "Appending to non-zero position to non-existent file.")
endif(NOT pos EQUAL 0)
endif(EXISTS "${filename}")
# File doesn't exists or its content differ.
if(NOT pos EQUAL 0)
file(READ "${filename}" prefix LIMIT "${pos}")
else(NOT pos EQUAL 0)
set(prefix)
endif(NOT pos EQUAL 0)
file(WRITE "${filename}" "${prefix}${content}")
endfunction(file_update_append filename content POS)
# Common mechanism for output status message
# when checking different aspects.
#
# Normal using:
#
# check_begin("Checking <...>")
# if(NOT <check-already-has-been-done>)
# check_try() # Here message is printed
# # Perform check(try_compile(), etc.)
# endif(NOT <check-already-has-been-done>)
# check_end("<check-result>") # Here message is printed with result.
# Should be called (unconditionally) when new cheking is issued.
# Note, that given @status_msg is not printed at that step.
function(check_begin status_msg)
set(_check_status_msg ${status_msg} PARENT_SCOPE)
set(_check_has_tries PARENT_SCOPE)
endfunction(check_begin status_msg)
# Should be called before every real checking, that is which is not come
# from cache variables comparision.
#
# First call of that function is trigger printing of @status_msg,
# passed to check_begin().
function(check_try)
if(NOT _check_has_tries)
message(STATUS "${_check_status_msg}")
set(_check_has_tries "1" PARENT_SCOPE)
endif(NOT _check_has_tries)
endfunction(check_try)
# Should be called when cheking is end, and @result_msg should be short
# description of check result.
# If any check_try() has been issued before,
# "@status_msg - @result_msg"
# will be printed.
# Otherwise,
# "@status_msg - [cached] @result_msg"
# will be printed, at it will be the only message for that check.
function(check_end result_msg)
if(NOT _check_has_tries)
message(STATUS "${_check_status_msg} [cached] - ${result_msg}")
else(NOT _check_has_tries)
message(STATUS "${_check_status_msg} - ${result_msg}")
endif(NOT _check_has_tries)
set(_check_status_msg PARENT_SCOPE)
set(_check_has_tries PARENT_SCOPE)
endfunction(check_end result_msg)
# set_bool_string(var true_string false_string value [CACHE ... | PARENT_SCOPE])
#
# Set variable to one of two string according to true property of some value.
#
# If 'value' is true-evaluated, 'var' will be set to 'true_string',
# otherwise to 'var' will be set to 'false_string'.
# Like standard set(), macro accept CACHE and PARENT_SCOPE modifiers.
#
# Useful for form meaningful value for cache variables, contained result
# of some operation.
# Also may be used for form message for check_end().
macro(set_bool_string var true_string false_string value)
# Macro parameters are not a variables, so them cannot be tested
# using 'if(value)'.
# Usage 'if(${value})' leads to warnings since 2.6.4 when ${val}
# is boolean constant
# (because until 2.6.4 'if(value)' always dereference val).
# So copy 'value' to local variable for test it.
set(_set_bool_string_value ${value})
if(_set_bool_string_value)
set(${var} "${true_string}" ${ARGN})
else()
set(${var} "${false_string}" ${ARGN})
endif()
endmacro(set_bool_string)
# set_zero_string(var true_string false_string value [CACHE ... | PARENT_SCOPE])
#
# Set variable to one of two string according to zero property of some value.
#
# If 'value' is '0' (presizely), 'var' will be set to 'zero_string',
# otherwise to 'var' will be set to 'nonzero_string'.
# Like standard set(), macro accept CACHE and PARENT_SCOPE modifiers.
#
# Useful for form meaningful value for cache variables, contained result
# of execute_process().
# Also may be used for form message for check_end().
macro(set_zero_string var zero_string nonzero_string value)
set(_set_zero_string_value ${value})
if(_set_zero_string_value EQUAL "0")
set(${var} ${zero_string} ${ARGN})
else()
set(${var} ${nonzero_string} ${ARGN})
endif()
endmacro(set_zero_string)
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
# Declare variables for path prefixes for different types of files.
# NB: depends on 'multi_kernel' and 'uninstall_target'.
# fill_install_prefixes(<project_name> <project_prefix>
# [BASE_INSTALL_PREFIX <base_install_prefix>]
# [KERNEL]
# )
#
# Setup variables <project_prefix>_* to the install prefixes for
# different project components.
# Precisely, variables with next sufficies are set:
# INSTALL_PREFIX_EXEC - executables
# INSTALL_PREFIX_READONLY - readonly files
# INSTALL_PREFIX_GLOBAL_CONF - global configuration files
# INSTALL_PREFIX_PREFIX_LIB - libraries
# INSTALL_INCLUDE_DIR - include directory(for flags to compiler)
# INSTALL_PREFIX_INCLUDE - include files
# INSTALL_PREFIX_TEMP_SESSION - temporary files(exists until system restarts)
# INSTALL_PREFIX_TEMP - temporary files(preserved even when system restats)
# INSTALL_PREFIX_STATE - files which describe current state of the project.
# INSTALL_PREFIX_CACHE - cache files
# INSTALL_PREFIX_VAR - other modifiable files
# INSTALL_PREFIX_DOC - documentation files
# INSTALL_PREFIX_EXAMPLES - documentation files
#
# With 'KERNEL' option enabled paths for kernel-related files also set:
# INSTALL_KINCLUDE_DIR - directory for include when build kernel components
# INSTALL_PREFIX_KINCLUDE - include files for the kernel.
#
# Additionally, with 'KERNEL' option enabled, several variables are set to
# paths, which contains "%kernel%" pattern.
# These paths are intended for kernel-dependent files; for make paths
# complete one should replace "%kernel%" substring with version of the
# kernel.
# Next kernel-dependend paths are set(sufficies only):
# KERNEL_INSTALL_PREFIX_KMODULE - directory for install kernel modules
# KERNEL_INSTALL_PREFIX_KSYMVERS - directory for install kernel modules' symvers files.
# KERNEL_INSTALL_INCLUDE_KERNEL_DIR - include directory with kernel-dependent headers.
# KERNEL_INSTALL_PREFIX_INCLUDE_KERNEL - include files, which depends from kernel.
#
# Iff option 'COMMON_INSTALL_PREFIX' is given, all paths above are
# calculated using <base_install_prefix> as base prefix.
# Otherwise, CMAKE_INSTALL_PREFIX is used for that purpose.
#
# Additionally,
# INSTALL_TYPE
# variable(suffix) is set to one of:
# - "GLOBAL_OPT" - install into "/opt",
# - "GLOBAL" - global installation except one into "/opt",
# - "LOCAL" - local installation.
function(fill_install_prefixes project_name project_prefix)
cmake_parse_arguments(fip "KERNEL" "BASE_INSTALL_PREFIX" "" ${ARGN})
if(fip_UNPARSED_ARGUMENTS)
list(GET fip_UNPARSED_ARGUMENTS 0 exceeded_arg)
message(SEND_ERROR "Exceeded argument: ${exceeded_arg}")
endif(fip_UNPARSED_ARGUMENTS)
if(NOT fip_BASE_INSTALL_PREFIX)
set(fip_BASE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
endif(NOT fip_BASE_INSTALL_PREFIX)
# Follow conventions about paths listed in
# devel-docs/general/path_conventions.txt
# in kedr-devel package.
# Determine type of installation
if(fip_BASE_INSTALL_PREFIX MATCHES "^/opt")
set(fip_INSTALL_TYPE "GLOBAL_OPT")
elseif(fip_BASE_INSTALL_PREFIX MATCHES "^/usr"
OR fip_BASE_INSTALL_PREFIX STREQUAL "/"
)
set(fip_INSTALL_TYPE "GLOBAL")
else()
set(fip_INSTALL_TYPE "LOCAL")
endif()
# 1
if(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
set(fip_INSTALL_PREFIX_EXEC "/opt/${project_name}/bin")
else(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
set(fip_INSTALL_PREFIX_EXEC "${fip_BASE_INSTALL_PREFIX}/bin")
endif(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
# 2
set(fip_INSTALL_PREFIX_EXEC_AUX
"${fip_BASE_INSTALL_PREFIX}/lib/${project_name}"
)
# 3
set(fip_INSTALL_PREFIX_READONLY
"${fip_BASE_INSTALL_PREFIX}/share/${project_name}"
)
# 4
set(fip_INSTALL_PREFIX_MANPAGE
"${fip_BASE_INSTALL_PREFIX}/share/man"
)
# 5
if(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
set(fip_INSTALL_PREFIX_GLOBAL_CONF "/etc/opt/${project_name}")
elseif(fip_INSTALL_TYPE STREQUAL "GLOBAL")
set(fip_INSTALL_PREFIX_GLOBAL_CONF "/etc/${project_name}")
else(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
set(fip_INSTALL_PREFIX_GLOBAL_CONF
"${fip_BASE_INSTALL_PREFIX}/etc/${project_name}"
)
endif(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
# 6
set(fip_INSTALL_PREFIX_LIB "${fip_BASE_INSTALL_PREFIX}/lib")
# 7
set(fip_INSTALL_PREFIX_LIB_AUX
"${fip_BASE_INSTALL_PREFIX}/lib/${project_name}"
)
# 8
set(fip_INSTALL_INCLUDE_DIR "${fip_BASE_INSTALL_PREFIX}/include")
set(fip_INSTALL_PREFIX_INCLUDE
"${fip_INSTALL_INCLUDE_DIR}/${project_name}"
)
# 9
set(fip_INSTALL_PREFIX_TEMP_SESSION "/tmp/${project_name}")
# 10
if(fip_INSTALL_TYPE MATCHES "GLOBAL")
set(fip_INSTALL_PREFIX_TEMP "/var/tmp/${project_name}")
else(fip_INSTALL_TYPE MATCHES "GLOBAL")
set(fip_INSTALL_PREFIX_TEMP
"${fip_BASE_INSTALL_PREFIX}/var/tmp/${project_name}"
)
endif(fip_INSTALL_TYPE MATCHES "GLOBAL")
# 11
if(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
set(fip_INSTALL_PREFIX_STATE
"/var/opt/${project_name}/lib/${project_name}"
)
elseif(fip_INSTALL_TYPE STREQUAL "GLOBAL")
set(fip_INSTALL_PREFIX_STATE "/var/lib/${project_name}")
else(fip_INSTALL_TYPE STREQUAL "GLOBAL")
set(fip_INSTALL_PREFIX_STATE
"${fip_BASE_INSTALL_PREFIX}/var/lib/${project_name}"
)
endif(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
# 12
if(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
set(fip_INSTALL_PREFIX_CACHE
"/var/opt/${project_name}/cache/${project_name}"
)
elseif(fip_INSTALL_TYPE STREQUAL "GLOBAL")
set(fip_INSTALL_PREFIX_CACHE "/var/cache/${project_name}")
else(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
set(fip_INSTALL_PREFIX_CACHE
"${fip_BASE_INSTALL_PREFIX}/var/cache/${project_name}"
)
endif(fip_INSTALL_TYPE STREQUAL "GLOBAL_OPT")
# 13
if(fip_INSTALL_TYPE MATCHES "GLOBAL")
set(fip_INSTALL_PREFIX_VAR "/var/opt/${project_name}")
else(fip_INSTALL_TYPE MATCHES "GLOBAL")
set(fip_INSTALL_PREFIX_VAR
"${fip_BASE_INSTALL_PREFIX}/var/${project_name}"
)
endif(fip_INSTALL_TYPE MATCHES "GLOBAL")
# 14
set(fip_INSTALL_PREFIX_DOC
"${fip_BASE_INSTALL_PREFIX}/share/doc/${project_name}"
)
# Set derivative install path and prefixes
# additional, 4
set(fip_INSTALL_PREFIX_EXAMPLES
"${fip_INSTALL_PREFIX_READONLY}/examples")
# Export symbols to the outer scope
foreach(suffix
INSTALL_TYPE
INSTALL_PREFIX_EXEC
INSTALL_PREFIX_EXEC_AUX
INSTALL_PREFIX_READONLY
INSTALL_PREFIX_MANPAGE
INSTALL_PREFIX_GLOBAL_CONF
INSTALL_PREFIX_LIB
INSTALL_PREFIX_LIB_AUX
INSTALL_INCLUDE_DIR
INSTALL_PREFIX_INCLUDE
INSTALL_PREFIX_TEMP_SESSION
INSTALL_PREFIX_TEMP
INSTALL_PREFIX_STATE
INSTALL_PREFIX_CACHE
INSTALL_PREFIX_VAR
INSTALL_PREFIX_DOC
INSTALL_PREFIX_EXAMPLES
)
set(${project_prefix}_${suffix} "${fip_${suffix}}" PARENT_SCOPE)
endforeach(suffix)
if(fip_KERNEL)
# Set derivative install path and prefixes
# additional, 1
if(fip_INSTALL_TYPE MATCHES GLOBAL)
set(fip_KERNEL_INSTALL_PREFIX_KMODULE
"/lib/modules/%kernel%/extra"
)
else(fip_INSTALL_TYPE MATCHES GLOBAL)
set(fip_KERNEL_INSTALL_PREFIX_KMODULE
"${fip_INSTALL_PREFIX_LIB}/modules/%kernel%/extra"
)
endif(fip_INSTALL_TYPE MATCHES GLOBAL)
# additional, 2
set(fip_KERNEL_INSTALL_PREFIX_KSYMVERS
"${fip_INSTALL_PREFIX_LIB}/modules/%kernel%/symvers"
)
# additional, 3
set(fip_INSTALL_KINCLUDE_DIR "${fip_INSTALL_INCLUDE_DIR}")
set(fip_INSTALL_PREFIX_KINCLUDE "${fip_INSTALL_PREFIX_INCLUDE}")
# Kernel include files, which depends from kernel version.
# This prefix is not listed in path conventions.
set(fip_KERNEL_INSTALL_INCLUDE_KERNEL_DIR
"${fip_BASE_INSTALL_PREFIX}/include-kernel/%kernel%"
)
set(fip_KERNEL_INSTALL_PREFIX_INCLUDE_KERNEL
"${fip_KERNEL_INSTALL_INCLUDE_KERNEL_DIR}/${project_name}-kernel"
)
# Export symbols to the outer scope
foreach(suffix
KERNEL_INSTALL_PREFIX_KMODULE
KERNEL_INSTALL_PREFIX_KSYMVERS
INSTALL_KINCLUDE_DIR
INSTALL_PREFIX_KINCLUDE
KERNEL_INSTALL_INCLUDE_KERNEL_DIR
KERNEL_INSTALL_PREFIX_INCLUDE_KERNEL
)
set(${project_prefix}_${suffix} "${fip_${suffix}}" PARENT_SCOPE)
endforeach(suffix)
endif(fip_KERNEL)
endfunction(fill_install_prefixes project_name project_prefix)
########################################################################
# Kernel-dependent paths.
#
# Some deliverables may depends on linux kernel.
#
# For make "one user installation for several kernels" paradigm works,
# installation directory for that deliverables should include
# kernel-version part(like "3.10.2-generic").
#
# So, components installed by user installation can determine at runtime,
# which kernel-dependent deliverable should be used on currently loaded system.
# They do selection using 'uname -r' request.
#
# For define variables represented kernel-dependent directories,
# we use strings containing "%kernel%" stem.
# kernel_path(kernel_version RESULT_VARIABLE pattern ...)
#
# Form concrete path representation from kernel-dependent pattern(s).
# Replace occurence of %kernel% in pattern(s) with given @kernel_version string.
# Result is stored in the RESULT_VARIABLE.
#
# @kernel_version may be concrete version of the kernel,
# or variable reference in some language.
macro(kernel_path kernel_version RESULT_VARIABLE pattern)
string(REPLACE "%kernel%" "${kernel_version}" ${RESULT_VARIABLE} ${pattern} ${ARGN})
endmacro(kernel_path kernel_version RESULT_VARIABLE pattern)
cmake_minimum_required(VERSION 3.1)
##
## PROJECT
## name and version
##
project(nlohmann_json VERSION 3.9.1 LANGUAGES CXX)
##
## MAIN_PROJECT CHECK
## determine if nlohmann_json is built as a subproject (using add_subdirectory) or if it is the main project
##
set(MAIN_PROJECT OFF)
if (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
set(MAIN_PROJECT ON)
endif()
##
## INCLUDE
##
##
include(ExternalProject)
##
## OPTIONS
##
if (POLICY CMP0077)
# Allow CMake 3.13+ to override options when using FetchContent / add_subdirectory.
cmake_policy(SET CMP0077 NEW)
endif ()
option(JSON_BuildTests "Build the unit tests when BUILD_TESTING is enabled." ${MAIN_PROJECT})
option(JSON_Install "Install CMake targets during install step." ${MAIN_PROJECT})
option(JSON_MultipleHeaders "Use non-amalgamated version of the library." OFF)
option(JSON_ImplicitConversions "Enable implicit conversions." ON)
##
## CONFIGURATION
##
include(GNUInstallDirs)
set(NLOHMANN_JSON_TARGET_NAME ${PROJECT_NAME})
set(NLOHMANN_JSON_CONFIG_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}" CACHE INTERNAL "")
set(NLOHMANN_JSON_INCLUDE_INSTALL_DIR "${CMAKE_INSTALL_INCLUDEDIR}")
set(NLOHMANN_JSON_TARGETS_EXPORT_NAME "${PROJECT_NAME}Targets")
set(NLOHMANN_JSON_CMAKE_CONFIG_TEMPLATE "cmake/config.cmake.in")
set(NLOHMANN_JSON_CMAKE_CONFIG_DIR "${CMAKE_CURRENT_BINARY_DIR}")
set(NLOHMANN_JSON_CMAKE_VERSION_CONFIG_FILE "${NLOHMANN_JSON_CMAKE_CONFIG_DIR}/${PROJECT_NAME}ConfigVersion.cmake")
set(NLOHMANN_JSON_CMAKE_PROJECT_CONFIG_FILE "${NLOHMANN_JSON_CMAKE_CONFIG_DIR}/${PROJECT_NAME}Config.cmake")
set(NLOHMANN_JSON_CMAKE_PROJECT_TARGETS_FILE "${NLOHMANN_JSON_CMAKE_CONFIG_DIR}/${PROJECT_NAME}Targets.cmake")
set(NLOHMANN_JSON_PKGCONFIG_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
if (JSON_MultipleHeaders)
set(NLOHMANN_JSON_INCLUDE_BUILD_DIR "${PROJECT_SOURCE_DIR}/include/")
message(STATUS "Using the multi-header code from ${NLOHMANN_JSON_INCLUDE_BUILD_DIR}")
else()
set(NLOHMANN_JSON_INCLUDE_BUILD_DIR "${PROJECT_SOURCE_DIR}/single_include/")
message(STATUS "Using the single-header code from ${NLOHMANN_JSON_INCLUDE_BUILD_DIR}")
endif()
if (NOT JSON_ImplicitConversions)
message(STATUS "Implicit conversions are disabled")
endif()
##
## TARGET
## create target and add include path
##
add_library(${NLOHMANN_JSON_TARGET_NAME} INTERFACE)
add_library(${PROJECT_NAME}::${NLOHMANN_JSON_TARGET_NAME} ALIAS ${NLOHMANN_JSON_TARGET_NAME})
if (${CMAKE_VERSION} VERSION_LESS "3.8.0")
target_compile_features(${NLOHMANN_JSON_TARGET_NAME} INTERFACE cxx_range_for)
else()
target_compile_features(${NLOHMANN_JSON_TARGET_NAME} INTERFACE cxx_std_11)
endif()
target_compile_definitions(
${NLOHMANN_JSON_TARGET_NAME}
INTERFACE
JSON_USE_IMPLICIT_CONVERSIONS=$<BOOL:${JSON_ImplicitConversions}>
)
target_include_directories(
${NLOHMANN_JSON_TARGET_NAME}
INTERFACE
$<BUILD_INTERFACE:${NLOHMANN_JSON_INCLUDE_BUILD_DIR}>
$<INSTALL_INTERFACE:include>
)
## add debug view definition file for msvc (natvis)
if (MSVC)
set(NLOHMANN_ADD_NATVIS TRUE)
set(NLOHMANN_NATVIS_FILE "nlohmann_json.natvis")
target_sources(
${NLOHMANN_JSON_TARGET_NAME}
INTERFACE
$<INSTALL_INTERFACE:${NLOHMANN_NATVIS_FILE}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/${NLOHMANN_NATVIS_FILE}>
)
endif()
# Install a pkg-config file, so other tools can find this.
CONFIGURE_FILE(
"${CMAKE_CURRENT_SOURCE_DIR}/cmake/pkg-config.pc.in"
"${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc"
)
##
## TESTS
## create and configure the unit test target
##
if (JSON_BuildTests)
include(CTest)
enable_testing()
add_subdirectory(test)
endif()
##
## INSTALL
## install header files, generate and install cmake config files for find_package()
##
include(CMakePackageConfigHelpers)
# use a custom package version config file instead of
# write_basic_package_version_file to ensure that it's architecture-independent
# https://github.com/nlohmann/json/issues/1697
configure_file(
"cmake/nlohmann_jsonConfigVersion.cmake.in"
${NLOHMANN_JSON_CMAKE_VERSION_CONFIG_FILE}
@ONLY
)
configure_file(
${NLOHMANN_JSON_CMAKE_CONFIG_TEMPLATE}
${NLOHMANN_JSON_CMAKE_PROJECT_CONFIG_FILE}
@ONLY
)
if(JSON_Install)
install(
DIRECTORY ${NLOHMANN_JSON_INCLUDE_BUILD_DIR}
DESTINATION ${NLOHMANN_JSON_INCLUDE_INSTALL_DIR}
)
install(
FILES ${NLOHMANN_JSON_CMAKE_PROJECT_CONFIG_FILE} ${NLOHMANN_JSON_CMAKE_VERSION_CONFIG_FILE}
DESTINATION ${NLOHMANN_JSON_CONFIG_INSTALL_DIR}
)
if (NLOHMANN_ADD_NATVIS)
install(
FILES ${NLOHMANN_NATVIS_FILE}
DESTINATION .
)
endif()
export(
TARGETS ${NLOHMANN_JSON_TARGET_NAME}
NAMESPACE ${PROJECT_NAME}::
FILE ${NLOHMANN_JSON_CMAKE_PROJECT_TARGETS_FILE}
)
install(
TARGETS ${NLOHMANN_JSON_TARGET_NAME}
EXPORT ${NLOHMANN_JSON_TARGETS_EXPORT_NAME}
INCLUDES DESTINATION ${NLOHMANN_JSON_INCLUDE_INSTALL_DIR}
)
install(
EXPORT ${NLOHMANN_JSON_TARGETS_EXPORT_NAME}
NAMESPACE ${PROJECT_NAME}::
DESTINATION ${NLOHMANN_JSON_CONFIG_INSTALL_DIR}
)
install(
FILES "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc"
DESTINATION ${NLOHMANN_JSON_PKGCONFIG_INSTALL_DIR}
)
endif()
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at mail@nlohmann.me. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/
This diff is collapsed.
MIT License
Copyright (c) 2013-2021 Niels Lohmann
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
This diff is collapsed.
This diff is collapsed.
version: '{build}'
environment:
matrix:
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Debug
platform: x86
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
configuration: Debug
platform: x86
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 15 2017
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
configuration: Debug
platform: x86
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 16 2019
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
configuration: Debug
platform: x64
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 16 2019
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Debug
COMPILER: mingw
platform: x86
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Ninja
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Release
COMPILER: mingw
platform: x86
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Ninja
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Release
platform: x86
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Release
platform: x86
name: with_win_header
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
configuration: Release
platform: x86
CXX_FLAGS: "/permissive- /std:c++latest /utf-8"
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 15 2017
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
configuration: Release
platform: x86
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: "-DJSON_ImplicitConversions=OFF"
GENERATOR: Visual Studio 16 2019
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
configuration: Release
platform: x64
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 16 2019
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Release
platform: x64
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
configuration: Release
platform: x64
CXX_FLAGS: "/permissive- /std:c++latest /Zc:__cplusplus /utf-8 /F4000000"
LINKER_FLAGS: "/STACK:4000000"
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 15 2017
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
configuration: Release
platform: x64
CXX_FLAGS: ""
LINKER_FLAGS: ""
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 16 2019
init:
- cmake --version
- msbuild /version
install:
- if "%COMPILER%"=="mingw" appveyor DownloadFile https://github.com/ninja-build/ninja/releases/download/v1.6.0/ninja-win.zip -FileName ninja.zip
- if "%COMPILER%"=="mingw" 7z x ninja.zip -oC:\projects\deps\ninja > nul
- if "%COMPILER%"=="mingw" set PATH=C:\projects\deps\ninja;%PATH%
- if "%COMPILER%"=="mingw" set PATH=C:\mingw-w64\x86_64-7.3.0-posix-seh-rt_v5-rev0\mingw64\bin;%PATH%
- if "%COMPILER%"=="mingw" g++ --version
- if "%platform%"=="x86" set GENERATOR_PLATFORM=Win32
before_build:
# for with_win_header build, inject the inclusion of Windows.h to the single-header library
- ps: if ($env:name -Eq "with_win_header") { $header_path = "single_include\nlohmann\json.hpp" }
- ps: if ($env:name -Eq "with_win_header") { "#include <Windows.h>`n" + (Get-Content $header_path | Out-String) | Set-Content $header_path }
- if "%GENERATOR%"=="Ninja" (cmake . -G "%GENERATOR%" -DCMAKE_BUILD_TYPE="%configuration%" -DCMAKE_CXX_FLAGS="%CXX_FLAGS%" -DCMAKE_EXE_LINKER_FLAGS="%LINKER_FLAGS%" -DCMAKE_IGNORE_PATH="C:/Program Files/Git/usr/bin" -DJSON_BuildTests=On "%CMAKE_OPTIONS%") else (cmake . -G "%GENERATOR%" -A "%GENERATOR_PLATFORM%" -DCMAKE_CXX_FLAGS="%CXX_FLAGS%" -DCMAKE_EXE_LINKER_FLAGS="%LINKER_FLAGS%" -DCMAKE_IGNORE_PATH="C:/Program Files/Git/usr/bin" -DJSON_BuildTests=On "%CMAKE_OPTIONS%")
build_script:
- cmake --build . --config "%configuration%"
test_script:
- if "%configuration%"=="Release" ctest -C "%configuration%" -V -j
# On Debug builds, skip test-unicode_all
# as it is extremely slow to run and cause
# occasional timeouts on AppVeyor.
# More info: https://github.com/nlohmann/json/pull/1570
- if "%configuration%"=="Debug" ctest --exclude-regex "test-unicode" -C "%configuration%" -V -j
cmake_minimum_required(VERSION 3.8)
project(JSON_Benchmarks LANGUAGES CXX)
# set compiler flags
if((CMAKE_CXX_COMPILER_ID MATCHES GNU) OR (CMAKE_CXX_COMPILER_ID MATCHES Clang))
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -flto -DNDEBUG -O3")
endif()
# configure Google Benchmarks
set(BENCHMARK_ENABLE_TESTING OFF CACHE INTERNAL "" FORCE)
add_subdirectory(thirdparty/benchmark)
# header directories
include_directories(thirdparty)
include_directories(${CMAKE_SOURCE_DIR}/../single_include)
# download test data
include(${CMAKE_SOURCE_DIR}/../cmake/download_test_data.cmake)
# benchmark binary
add_executable(json_benchmarks src/benchmarks.cpp)
target_compile_features(json_benchmarks PRIVATE cxx_std_11)
target_link_libraries(json_benchmarks benchmark ${CMAKE_THREAD_LIBS_INIT})
add_dependencies(json_benchmarks download_test_data)
target_include_directories(json_benchmarks PRIVATE ${CMAKE_BINARY_DIR}/include)
#include "benchmark/benchmark.h"
#include <nlohmann/json.hpp>
#include <fstream>
#include <test_data.hpp>
using json = nlohmann::json;
//////////////////////////////////////////////////////////////////////////////
// parse JSON from file
//////////////////////////////////////////////////////////////////////////////
static void ParseFile(benchmark::State& state, const char* filename)
{
while (state.KeepRunning())
{
state.PauseTiming();
auto* f = new std::ifstream(filename);
auto* j = new json();
state.ResumeTiming();
*j = json::parse(*f);
state.PauseTiming();
delete f;
delete j;
state.ResumeTiming();
}
std::ifstream file(filename, std::ios::binary | std::ios::ate);
state.SetBytesProcessed(state.iterations() * file.tellg());
}
BENCHMARK_CAPTURE(ParseFile, jeopardy, TEST_DATA_DIRECTORY "/jeopardy/jeopardy.json");
BENCHMARK_CAPTURE(ParseFile, canada, TEST_DATA_DIRECTORY "/nativejson-benchmark/canada.json");
BENCHMARK_CAPTURE(ParseFile, citm_catalog, TEST_DATA_DIRECTORY "/nativejson-benchmark/citm_catalog.json");
BENCHMARK_CAPTURE(ParseFile, twitter, TEST_DATA_DIRECTORY "/nativejson-benchmark/twitter.json");
BENCHMARK_CAPTURE(ParseFile, floats, TEST_DATA_DIRECTORY "/regression/floats.json");
BENCHMARK_CAPTURE(ParseFile, signed_ints, TEST_DATA_DIRECTORY "/regression/signed_ints.json");
BENCHMARK_CAPTURE(ParseFile, unsigned_ints, TEST_DATA_DIRECTORY "/regression/unsigned_ints.json");
BENCHMARK_CAPTURE(ParseFile, small_signed_ints, TEST_DATA_DIRECTORY "/regression/small_signed_ints.json");
//////////////////////////////////////////////////////////////////////////////
// parse JSON from string
//////////////////////////////////////////////////////////////////////////////
static void ParseString(benchmark::State& state, const char* filename)
{
std::ifstream f(filename);
std::string str((std::istreambuf_iterator<char>(f)), std::istreambuf_iterator<char>());
while (state.KeepRunning())
{
state.PauseTiming();
auto* j = new json();
state.ResumeTiming();
*j = json::parse(str);
state.PauseTiming();
delete j;
state.ResumeTiming();
}
state.SetBytesProcessed(state.iterations() * str.size());
}
BENCHMARK_CAPTURE(ParseString, jeopardy, TEST_DATA_DIRECTORY "/jeopardy/jeopardy.json");
BENCHMARK_CAPTURE(ParseString, canada, TEST_DATA_DIRECTORY "/nativejson-benchmark/canada.json");
BENCHMARK_CAPTURE(ParseString, citm_catalog, TEST_DATA_DIRECTORY "/nativejson-benchmark/citm_catalog.json");
BENCHMARK_CAPTURE(ParseString, twitter, TEST_DATA_DIRECTORY "/nativejson-benchmark/twitter.json");
BENCHMARK_CAPTURE(ParseString, floats, TEST_DATA_DIRECTORY "/regression/floats.json");
BENCHMARK_CAPTURE(ParseString, signed_ints, TEST_DATA_DIRECTORY "/regression/signed_ints.json");
BENCHMARK_CAPTURE(ParseString, unsigned_ints, TEST_DATA_DIRECTORY "/regression/unsigned_ints.json");
BENCHMARK_CAPTURE(ParseString, small_signed_ints, TEST_DATA_DIRECTORY "/regression/small_signed_ints.json");
//////////////////////////////////////////////////////////////////////////////
// serialize JSON
//////////////////////////////////////////////////////////////////////////////
static void Dump(benchmark::State& state, const char* filename, int indent)
{
std::ifstream f(filename);
std::string str((std::istreambuf_iterator<char>(f)), std::istreambuf_iterator<char>());
json j = json::parse(str);
while (state.KeepRunning())
{
j.dump(indent);
}
state.SetBytesProcessed(state.iterations() * j.dump(indent).size());
}
BENCHMARK_CAPTURE(Dump, jeopardy / -, TEST_DATA_DIRECTORY "/jeopardy/jeopardy.json", -1);
BENCHMARK_CAPTURE(Dump, jeopardy / 4, TEST_DATA_DIRECTORY "/jeopardy/jeopardy.json", 4);
BENCHMARK_CAPTURE(Dump, canada / -, TEST_DATA_DIRECTORY "/nativejson-benchmark/canada.json", -1);
BENCHMARK_CAPTURE(Dump, canada / 4, TEST_DATA_DIRECTORY "/nativejson-benchmark/canada.json", 4);
BENCHMARK_CAPTURE(Dump, citm_catalog / -, TEST_DATA_DIRECTORY "/nativejson-benchmark/citm_catalog.json", -1);
BENCHMARK_CAPTURE(Dump, citm_catalog / 4, TEST_DATA_DIRECTORY "/nativejson-benchmark/citm_catalog.json", 4);
BENCHMARK_CAPTURE(Dump, twitter / -, TEST_DATA_DIRECTORY "/nativejson-benchmark/twitter.json", -1);
BENCHMARK_CAPTURE(Dump, twitter / 4, TEST_DATA_DIRECTORY "/nativejson-benchmark/twitter.json", 4);
BENCHMARK_CAPTURE(Dump, floats / -, TEST_DATA_DIRECTORY "/regression/floats.json", -1);
BENCHMARK_CAPTURE(Dump, floats / 4, TEST_DATA_DIRECTORY "/regression/floats.json", 4);
BENCHMARK_CAPTURE(Dump, signed_ints / -, TEST_DATA_DIRECTORY "/regression/signed_ints.json", -1);
BENCHMARK_CAPTURE(Dump, signed_ints / 4, TEST_DATA_DIRECTORY "/regression/signed_ints.json", 4);
BENCHMARK_CAPTURE(Dump, unsigned_ints / -, TEST_DATA_DIRECTORY "/regression/unsigned_ints.json", -1);
BENCHMARK_CAPTURE(Dump, unsigned_ints / 4, TEST_DATA_DIRECTORY "/regression/unsigned_ints.json", 4);
BENCHMARK_CAPTURE(Dump, small_signed_ints / -, TEST_DATA_DIRECTORY "/regression/small_signed_ints.json", -1);
BENCHMARK_CAPTURE(Dump, small_signed_ints / 4, TEST_DATA_DIRECTORY "/regression/small_signed_ints.json", 4);
BENCHMARK_MAIN();
# This is the official list of benchmark authors for copyright purposes.
# This file is distinct from the CONTRIBUTORS files.
# See the latter for an explanation.
#
# Names should be added to this file as:
# Name or Organization <email address>
# The email address is not required for organizations.
#
# Please keep the list sorted.
Albert Pretorius <pretoalb@gmail.com>
Arne Beer <arne@twobeer.de>
Carto
Christopher Seymour <chris.j.seymour@hotmail.com>
David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
Deniz Evrenci <denizevrenci@gmail.com>
Dirac Research
Dominik Czarnota <dominik.b.czarnota@gmail.com>
Eric Fiselier <eric@efcs.ca>
Eugene Zhuk <eugene.zhuk@gmail.com>
Evgeny Safronov <division494@gmail.com>
Felix Homann <linuxaudio@showlabor.de>
Google Inc.
International Business Machines Corporation
Ismael Jimenez Martinez <ismael.jimenez.martinez@gmail.com>
Jern-Kuan Leong <jernkuan@gmail.com>
JianXiong Zhou <zhoujianxiong2@gmail.com>
Joao Paulo Magalhaes <joaoppmagalhaes@gmail.com>
Jussi Knuuttila <jussi.knuuttila@gmail.com>
Kaito Udagawa <umireon@gmail.com>
Kishan Kumar <kumar.kishan@outlook.com>
Lei Xu <eddyxu@gmail.com>
Matt Clarkson <mattyclarkson@gmail.com>
Maxim Vafin <maxvafin@gmail.com>
MongoDB Inc.
Nick Hutchinson <nshutchinson@gmail.com>
Oleksandr Sochka <sasha.sochka@gmail.com>
Paul Redmond <paul.redmond@gmail.com>
Radoslav Yovchev <radoslav.tm@gmail.com>
Roman Lebedev <lebedev.ri@gmail.com>
Shuo Chen <chenshuo@chenshuo.com>
Steinar H. Gunderson <sgunderson@bigfoot.com>
Stripe, Inc.
Yixuan Qiu <yixuanq@gmail.com>
Yusuke Suzuki <utatane.tea@gmail.com>
Zbigniew Skowron <zbychs@gmail.com>
licenses(["notice"])
config_setting(
name = "windows",
values = {
"cpu": "x64_windows",
},
visibility = [":__subpackages__"],
)
cc_library(
name = "benchmark",
srcs = glob(
[
"src/*.cc",
"src/*.h",
],
exclude = ["src/benchmark_main.cc"],
),
hdrs = ["include/benchmark/benchmark.h"],
linkopts = select({
":windows": ["-DEFAULTLIB:shlwapi.lib"],
"//conditions:default": ["-pthread"],
}),
strip_include_prefix = "include",
visibility = ["//visibility:public"],
)
cc_library(
name = "benchmark_main",
srcs = ["src/benchmark_main.cc"],
hdrs = ["include/benchmark/benchmark.h"],
strip_include_prefix = "include",
visibility = ["//visibility:public"],
deps = [":benchmark"],
)
cc_library(
name = "benchmark_internal_headers",
hdrs = glob(["src/*.h"]),
visibility = ["//test:__pkg__"],
)
# How to contribute #
We'd love to accept your patches and contributions to this project. There are
a just a few small guidelines you need to follow.
## Contributor License Agreement ##
Contributions to any Google project must be accompanied by a Contributor
License Agreement. This is not a copyright **assignment**, it simply gives
Google permission to use and redistribute your contributions as part of the
project.
* If you are an individual writing original source code and you're sure you
own the intellectual property, then you'll need to sign an [individual
CLA][].
* If you work for a company that wants to allow you to contribute your work,
then you'll need to sign a [corporate CLA][].
You generally only need to submit a CLA once, so if you've already submitted
one (even if it was for a different project), you probably don't need to do it
again.
[individual CLA]: https://developers.google.com/open-source/cla/individual
[corporate CLA]: https://developers.google.com/open-source/cla/corporate
Once your CLA is submitted (or if you already submitted one for
another Google project), make a commit adding yourself to the
[AUTHORS][] and [CONTRIBUTORS][] files. This commit can be part
of your first [pull request][].
[AUTHORS]: AUTHORS
[CONTRIBUTORS]: CONTRIBUTORS
## Submitting a patch ##
1. It's generally best to start by opening a new issue describing the bug or
feature you're intending to fix. Even if you think it's relatively minor,
it's helpful to know what people are working on. Mention in the initial
issue that you are planning to work on that bug or feature so that it can
be assigned to you.
1. Follow the normal process of [forking][] the project, and setup a new
branch to work in. It's important that each group of changes be done in
separate branches in order to ensure that a pull request only includes the
commits related to that bug or feature.
1. Do your best to have [well-formed commit messages][] for each change.
This provides consistency throughout the project, and ensures that commit
messages are able to be formatted properly by various git tools.
1. Finally, push the commits to your fork and submit a [pull request][].
[forking]: https://help.github.com/articles/fork-a-repo
[well-formed commit messages]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html
[pull request]: https://help.github.com/articles/creating-a-pull-request
# People who have agreed to one of the CLAs and can contribute patches.
# The AUTHORS file lists the copyright holders; this file
# lists people. For example, Google employees are listed here
# but not in AUTHORS, because Google holds the copyright.
#
# Names should be added to this file only after verifying that
# the individual or the individual's organization has agreed to
# the appropriate Contributor License Agreement, found here:
#
# https://developers.google.com/open-source/cla/individual
# https://developers.google.com/open-source/cla/corporate
#
# The agreement for individuals can be filled out on the web.
#
# When adding J Random Contributor's name to this file,
# either J's name or J's organization's name should be
# added to the AUTHORS file, depending on whether the
# individual or corporate CLA was used.
#
# Names should be added to this file as:
# Name <email address>
#
# Please keep the list sorted.
Albert Pretorius <pretoalb@gmail.com>
Arne Beer <arne@twobeer.de>
Billy Robert O'Neal III <billy.oneal@gmail.com> <bion@microsoft.com>
Chris Kennelly <ckennelly@google.com> <ckennelly@ckennelly.com>
Christopher Seymour <chris.j.seymour@hotmail.com>
David Coeurjolly <david.coeurjolly@liris.cnrs.fr>
Deniz Evrenci <denizevrenci@gmail.com>
Dominic Hamon <dma@stripysock.com> <dominic@google.com>
Dominik Czarnota <dominik.b.czarnota@gmail.com>
Eric Fiselier <eric@efcs.ca>
Eugene Zhuk <eugene.zhuk@gmail.com>
Evgeny Safronov <division494@gmail.com>
Felix Homann <linuxaudio@showlabor.de>
Ismael Jimenez Martinez <ismael.jimenez.martinez@gmail.com>
Jern-Kuan Leong <jernkuan@gmail.com>
JianXiong Zhou <zhoujianxiong2@gmail.com>
Joao Paulo Magalhaes <joaoppmagalhaes@gmail.com>
John Millikin <jmillikin@stripe.com>
Jussi Knuuttila <jussi.knuuttila@gmail.com>
Kai Wolf <kai.wolf@gmail.com>
Kishan Kumar <kumar.kishan@outlook.com>
Kaito Udagawa <umireon@gmail.com>
Lei Xu <eddyxu@gmail.com>
Matt Clarkson <mattyclarkson@gmail.com>
Maxim Vafin <maxvafin@gmail.com>
Nick Hutchinson <nshutchinson@gmail.com>
Oleksandr Sochka <sasha.sochka@gmail.com>
Pascal Leroy <phl@google.com>
Paul Redmond <paul.redmond@gmail.com>
Pierre Phaneuf <pphaneuf@google.com>
Radoslav Yovchev <radoslav.tm@gmail.com>
Raul Marin <rmrodriguez@cartodb.com>
Ray Glover <ray.glover@uk.ibm.com>
Robert Guo <robert.guo@mongodb.com>
Roman Lebedev <lebedev.ri@gmail.com>
Shuo Chen <chenshuo@chenshuo.com>
Tobias Ulvgård <tobias.ulvgard@dirac.se>
Tom Madams <tom.ej.madams@gmail.com> <tmadams@google.com>
Yixuan Qiu <yixuanq@gmail.com>
Yusuke Suzuki <utatane.tea@gmail.com>
Zbigniew Skowron <zbychs@gmail.com>
workspace(name = "com_github_google_benchmark")
http_archive(
name = "com_google_googletest",
urls = ["https://github.com/google/googletest/archive/3f0cf6b62ad1eb50d8736538363d3580dd640c3e.zip"],
strip_prefix = "googletest-3f0cf6b62ad1eb50d8736538363d3580dd640c3e",
)
version: '{build}'
image: Visual Studio 2017
configuration:
- Debug
- Release
environment:
matrix:
- compiler: msvc-15-seh
generator: "Visual Studio 15 2017"
- compiler: msvc-15-seh
generator: "Visual Studio 15 2017 Win64"
- compiler: msvc-14-seh
generator: "Visual Studio 14 2015"
- compiler: msvc-14-seh
generator: "Visual Studio 14 2015 Win64"
- compiler: msvc-12-seh
generator: "Visual Studio 12 2013"
- compiler: msvc-12-seh
generator: "Visual Studio 12 2013 Win64"
- compiler: gcc-5.3.0-posix
generator: "MinGW Makefiles"
cxx_path: 'C:\mingw-w64\i686-5.3.0-posix-dwarf-rt_v4-rev0\mingw32\bin'
APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
matrix:
fast_finish: true
install:
# git bash conflicts with MinGW makefiles
- if "%generator%"=="MinGW Makefiles" (set "PATH=%PATH:C:\Program Files\Git\usr\bin;=%")
- if not "%cxx_path%"=="" (set "PATH=%PATH%;%cxx_path%")
build_script:
- md _build -Force
- cd _build
- echo %configuration%
- cmake -G "%generator%" "-DCMAKE_BUILD_TYPE=%configuration%" -DBENCHMARK_DOWNLOAD_DEPENDENCIES=ON ..
- cmake --build . --config %configuration%
test_script:
- ctest -c %configuration% --timeout 300 --output-on-failure
artifacts:
- path: '_build/CMakeFiles/*.log'
name: logs
- path: '_build/Testing/**/*.xml'
name: test_results
include("${CMAKE_CURRENT_LIST_DIR}/@targets_export_name@.cmake")
prefix=@CMAKE_INSTALL_PREFIX@
exec_prefix=${prefix}
libdir=${prefix}/lib
includedir=${prefix}/include
Name: @PROJECT_NAME@
Description: Google microbenchmark framework
Version: @VERSION@
Libs: -L${libdir} -lbenchmark
Cflags: -I${includedir}
macro(split_list listname)
string(REPLACE ";" " " ${listname} "${${listname}}")
endmacro()
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment