diff --git a/.travis.yml b/.travis.yml index dbf2b13996..e085b7f64e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,20 +2,25 @@ os: - linux - osx -language: c++ +language: python + - "2.7" before_install: - - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get update -qq; sudo apt-get install -y gfortran; fi - - if [ "$TRAVIS_OS_NAME" = "linux" ]; then pushd $HOME && mkdir cmake3.1 && cd cmake3.1 && (curl -L "http://cmake.org/files/v3.1/cmake-3.1.0-Linux-x86_64.tar.gz" | gunzip -c | tar x) && cd cmake-*/bin && export PATH="${PWD}:${PATH}"; popd; fi - - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update ; brew outdated cmake || brew upgrade cmake ; fi - - cmake --version + - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get update -qq; sudo apt-get install -y gfortran xvfb; fi + - if [ "$TRAVIS_OS_NAME" = "linux" ]; then wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; fi + - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update ; fi + - if [ "$TRAVIS_OS_NAME" = "osx" ]; then wget https://repo.continuum.io/miniconda/Miniconda-latest-MacOSX-x86_64.sh -O miniconda.sh; fi + - if [ "$TRAVIS_OS_NAME" = "osx" ]; then wget https://github.com/UV-CDAT/uvcdat/releases/download/v2.4.1/gfortran-4.9.2-Mac.tar.gz -O ~/gfortran-4.9.2-Mac.tar.gz ; pushd / ; sudo tar xzvf ~/gfortran-4.9.2-Mac.tar.gz ; pushd ; fi + - export PATH="$HOME/miniconda/bin:$PATH" + - bash miniconda.sh -b -p $HOME/miniconda + - conda config --set always_yes yes --set changeps1 no + - conda update -y -q conda + - conda install openssl=1.0.2d script: - - git submodule init - - git submodule update - cd .. - mkdir _build - cd _build - - cmake -DGIT_PROTOCOL=git:// -DCDAT_BUILD_MODE=LEAN -DCDAT_BUILD_GRAPHICS=ON -DCDAT_BUILD_SCIPY=OFF ../uvcdat - - ctest -VV -S ../uvcdat/CMake/travis_build.cmake - - ctest -VV -S ../uvcdat/CMake/travis_submit.cmake + - cmake -DGIT_PROTOCOL=git:// ../uvcdat + - make + - ctest -j8 -D Experimental diff --git a/CMake/ESMP.patch b/CMake/ESMP.patch deleted file mode 100644 index 9d1eb2c07f..0000000000 --- a/CMake/ESMP.patch +++ /dev/null @@ -1,33 +0,0 @@ ---- a/ESMP_LoadESMF.py 2014-01-14 10:00:22.000000000 -0500 -+++ b/ESMP_LoadESMF.py 2014-01-14 10:40:57.000000000 -0500 -@@ -64,6 +64,14 @@ - # esmfmk = c[2] - - try: -+ -+ # If we are not dealing with an absolute path treat it a relative to the -+ # current Python module. -+ if not os.path.isabs(esmfmk): -+ # Get the directory for this module -+ rel_dir = os.path.dirname(os.path.realpath(__file__)) -+ esmfmk = os.path.abspath(os.path.join(rel_dir, esmfmk)) -+ - MKFILE = open(esmfmk, 'r') - except: - raise IOError("File not found\n %s") % esmfmk -@@ -72,11 +80,12 @@ - libsdir = 0 - esmfos = 0 - esmfabi = 0 -+ -+ libsdir = os.path.dirname(esmfmk) -+ - # MKFILE = open(esmfmk,'r') - for line in MKFILE: -- if 'ESMF_LIBSDIR' in line: -- libsdir = line.split("=")[1] -- elif 'ESMF_OS:' in line: -+ if 'ESMF_OS:' in line: - esmfos = line.split(":")[1] - elif 'ESMF_ABI:' in line: - esmfabi = line.split(":")[1] diff --git a/CMake/cdat_modules/basemap_deps.cmake b/CMake/cdat_modules/basemap_deps.cmake deleted file mode 100644 index 98520d1d08..0000000000 --- a/CMake/cdat_modules/basemap_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(basemap_deps ${matplotlib_pkg} ${geos_pkg}) diff --git a/CMake/cdat_modules/basemap_external.cmake b/CMake/cdat_modules/basemap_external.cmake deleted file mode 100644 index 53b3a59a1b..0000000000 --- a/CMake/cdat_modules/basemap_external.cmake +++ /dev/null @@ -1,39 +0,0 @@ -# The basemap external project - -set(basemap_binary "${CMAKE_CURRENT_BINARY_DIR}/build/basemap") - -#configure_file( -# ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_configure_step.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/basemap_configure_step.cmake @ONLY) -# to build we also run a cmake -P script. -# the script will set LD_LIBRARY_PATH so that -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/basemap_make_step.cmake @ONLY) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/basemap_install_step.cmake @ONLY) - -#set(basemap_CONFIGURE_COMMAND ${CMAKE_COMMAND} -# -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -P ${cdat_CMAKE_BINARY_DIR}/basemap_configure_step.cmake) -set(basemap_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/basemap_make_step.cmake) -set(basemap_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/basemap_install_step.cmake) - -# create an external project to download numpy, -# and configure and build it -ExternalProject_Add(basemap - URL ${basemap_URL}/${basemap_GZ} - URL_MD5 ${basemap_MD5} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${basemap_binary} - BINARY_DIR ${basemap_binary} - CONFIGURE_COMMAND "" - BUILD_COMMAND ${basemap_BUILD_COMMAND} - UPDATE_COMMAND "" - INSTALL_COMMAND ${basemap_INSTALL_COMMAND} - DEPENDS - ${basemap_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/basemap_pkg.cmake b/CMake/cdat_modules/basemap_pkg.cmake deleted file mode 100644 index bfcaa6c07b..0000000000 --- a/CMake/cdat_modules/basemap_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set( basemap_MAJOR 1 ) -set( basemap_MINOR 0 ) -set( basemap_PATCH 5 ) -set(basemap_URL ${LLNL_URL}) -set(basemap_GZ basemap-${basemap_MAJOR}.${basemap_MINOR}.${basemap_PATCH}.tar.gz) -set(basemap_MD5 089260ea2b3eebb9d63e1783d0b15298 ) -set(BASEMAP_VERSION ${basemap_MAJOR}.${basemap_MINOR}.${basemap_PATCH}) -set(BASEMAP_SOURCE ${basemap_URL}/${basemap_GZ}) - -add_cdat_package_dependent(basemap "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/cairo_deps.cmake b/CMake/cdat_modules/cairo_deps.cmake deleted file mode 100644 index 78b7fe0b3d..0000000000 --- a/CMake/cdat_modules/cairo_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Cairo_deps ${pkgconfig_pkg} ${png_pkg} ${fontconfig_pkg} ${freetype_pkg} ${pixman_pkg} ${libxml2_pkg}) diff --git a/CMake/cdat_modules/cairo_external.cmake b/CMake/cdat_modules/cairo_external.cmake deleted file mode 100644 index 1826425c1e..0000000000 --- a/CMake/cdat_modules/cairo_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ - -set(Cairo_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cairo") -set(Cairo_install "${cdat_EXTERNALS}") -set(Cairo_conf_args --disable-static^^--enable-quartz=no^^--enable-win32=no^^--enable-skia=no^^--enable-os2=no^^--enable-beos=no^^--enable-drm=no^^--enable-gallium=no^^--enable-cogl=no) - -ExternalProject_Add(Cairo - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Cairo_source} - INSTALL_DIR ${Cairo_install} - URL ${CAIRO_URL}/${CAIRO_GZ} - URL_MD5 ${CAIRO_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DSKIP_LDFLAGS=YES -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${Cairo_conf_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${Cairo_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/cairo_pkg.cmake b/CMake/cdat_modules/cairo_pkg.cmake deleted file mode 100644 index be1bcce3ca..0000000000 --- a/CMake/cdat_modules/cairo_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(CAIRO_MAJOR 1) -set(CAIRO_MINOR 10) -set(CAIRO_PATCH 2) -set(CAIRO_MAJOR_SRC 1) -set(CAIRO_MINOR_SRC 12) -set(CAIRO_PATCH_SRC 14) -set(CAIRO_URL ${LLNL_URL}) -set(CAIRO_GZ cairo-${CAIRO_MAJOR_SRC}.${CAIRO_MINOR_SRC}.${CAIRO_PATCH_SRC}.tar.gz) -set(CAIRO_MD5 4a55de6dbbd2d22eee9eea78e6bdbbfd ) -set(CAIRO_SOURCE ${CAIRO_URL}/${CAIRO_GZ}) -set(CAIRO_VERSION ${CAIRO_MAJOR_SRC}.${CAIRO_MINOR_SRC}.${CAIRO_PATCH_SRC}) - -add_cdat_package_dependent(Cairo "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) - diff --git a/CMake/cdat_modules/cd77_deps.cmake b/CMake/cdat_modules/cd77_deps.cmake deleted file mode 100644 index e18cdbd1df..0000000000 --- a/CMake/cdat_modules/cd77_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(cd77_deps ${python_pkg} ${ezget_pkg} ${libcdms_pkg} ${setuptools_pkg} ) diff --git a/CMake/cdat_modules/cd77_external.cmake b/CMake/cdat_modules/cd77_external.cmake deleted file mode 100644 index 00e3b0833b..0000000000 --- a/CMake/cdat_modules/cd77_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# create an external project to install cd77 -# and configure and build it - -ExternalProject_Add(cd77 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/cd77 - ${GIT_CMD_STR_cd77} - ${GIT_TAG_cd77} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} CFLAGS=$ENV{CFLAGS} LDFLAGS=$ENV{LDFLAGS} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} PYTHONPATH=${PYTHONPATH} ${USR_ENVS} ${PYTHON_EXECUTABLE} setup.py install ${USER_INSTALL_OPTIONS} ${PRFX} - DEPENDS ${${nm}_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/cd77_pkg.cmake b/CMake/cdat_modules/cd77_pkg.cmake deleted file mode 100644 index 3dc195aec9..0000000000 --- a/CMake/cdat_modules/cd77_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(cd77_VERSION 1.0.0) -set(cd77_BRANCH master) -set(cd77_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/cd77.git ) - - -set(GIT_CMD_STR_cd77 GIT_REPOSITORY ${cd77_REPOSITORY}) -set(GIT_TAG_cd77 GIT_TAG "${cd77_BRANCH}") -set (nm cd77) -string(TOUPPER ${nm} uc_nm) - -if (CDAT_BUILD_PCMDI) - add_cdat_package(cd77 "" "" ON) -endif() diff --git a/CMake/cdat_modules/cdat_deps.cmake b/CMake/cdat_modules/cdat_deps.cmake index 70d44f426f..dcb9b307a6 100644 --- a/CMake/cdat_modules/cdat_deps.cmake +++ b/CMake/cdat_modules/cdat_deps.cmake @@ -1,16 +1 @@ -set(CDAT_deps ${wget_pkg} ${python_pkg} ${numpy_pkg} - ${libcdms_pkg} - ${libcf_pkg} ${netcdf_pkg} ${myproxyclient_pkg} ${udunits2_pkg}) -if (CDAT_BUILD_GRAPHICS) - if (CDAT_BUILD_PARAVIEW) - list(APPEND CDAT_deps ${paraview_pkg}) - else() - list(APPEND CDAT_deps ${vtk_pkg}) - endif() - list(APPEND CDAT_deps ${ffmpeg_pkg}) -endif() - -if (CDAT_BUILD_ESMF) - list(APPEND CDAT_deps ${esmf_pkg}) -endif() - +set(CDAT_deps) diff --git a/CMake/cdat_modules/cdat_external.cmake b/CMake/cdat_modules/cdat_external.cmake index 7b1b53f9bf..2f79aa5070 100644 --- a/CMake/cdat_modules/cdat_external.cmake +++ b/CMake/cdat_modules/cdat_external.cmake @@ -1,18 +1,5 @@ set(CDAT_source "${cdat_SOURCE_DIR}") - -set(RUNTIME_FLAGS ${cdat_EXTERNALS}/lib) -set(LDFLAGS -L${cdat_EXTERNALS}/lib) - -if (CDAT_BUILD_LIBDRS) - set(cdat_xtra_flags "${cdat_xtra_flags} --enable-drs") -endif() - -set(cdat_build_dir ${CMAKE_CURRENT_BINARY_DIR}/cdat-build) - set(WORKING_DIR "${cdat_CMAKE_BINARY_DIR}") -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_python_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake - @ONLY) ExternalProject_Add(CDAT DOWNLOAD_DIR "" @@ -22,7 +9,7 @@ ExternalProject_Add(CDAT PATCH_COMMAND "" CONFIGURE_COMMAND "" BUILD_COMMAND "" - INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" ${CMAKE_COMMAND} -DPYTHON_INSTALL_ARGS=${cdat_xtra_flags} -P ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake + INSTALL_COMMAND ${cdat_CMAKE_BINARY_DIR}/install_cdat_from_conda.bash DEPENDS ${CDAT_deps} ${ep_log_options} ) diff --git a/CMake/cdat_modules/cdat_pkg.cmake b/CMake/cdat_modules/cdat_pkg.cmake index 05a66faf5a..92aa4ed994 100644 --- a/CMake/cdat_modules/cdat_pkg.cmake +++ b/CMake/cdat_modules/cdat_pkg.cmake @@ -1,5 +1,5 @@ set(cdat_VERSION_MAJOR 2) -set(cdat_VERSION_MINOR 2) +set(cdat_VERSION_MINOR 6) set(cdat_VERSION_PATCH 0) set(cdat_VERSION ${cdat_VERSION_MAJOR}.${cdat_VERSION_MINOR}.${cdat_VERSION_PATCH}) @@ -30,6 +30,5 @@ configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/version.in ${cdat_BINARY_DIR}/version @ONLY ) - add_cdat_package(CDAT "" "" ON) diff --git a/CMake/cdat_modules/cdatlogger_deps.cmake b/CMake/cdat_modules/cdatlogger_deps.cmake deleted file mode 100644 index c2cfeeb0b4..0000000000 --- a/CMake/cdat_modules/cdatlogger_deps.cmake +++ /dev/null @@ -1,2 +0,0 @@ -set (CDATLogger_deps ${cdat_pkg}) - diff --git a/CMake/cdat_modules/cdatlogger_external.cmake b/CMake/cdat_modules/cdatlogger_external.cmake deleted file mode 100644 index 69cb09c1ae..0000000000 --- a/CMake/cdat_modules/cdatlogger_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cmake_modules/CDATLogger.cmake.in - ${cdat_CMAKE_BINARY_DIR}/CDATLogger.cmake - @ONLY) - -ExternalProject_Add(CDATLogger - DOWNLOAD_DIR "" - SOURCE_DIR ${cdat_SOURCE_DIR} - BINARY_DIR ${cdat_build_dir} - BUILD_IN_SOURCE 0 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/CDATLogger.cmake - DEPENDS ${CDATLogger_deps} -) diff --git a/CMake/cdat_modules/cdatlogger_pkg.cmake b/CMake/cdat_modules/cdatlogger_pkg.cmake deleted file mode 100644 index f9e19bd05b..0000000000 --- a/CMake/cdat_modules/cdatlogger_pkg.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(CDATLOGGER_VERSION N/A) -set(CDATLOGGER_SOURCE N/A) - -add_cdat_package(CDATLogger "" "" OFF) diff --git a/CMake/cdat_modules/cffi_deps.cmake b/CMake/cdat_modules/cffi_deps.cmake deleted file mode 100644 index 3e06205106..0000000000 --- a/CMake/cdat_modules/cffi_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CFFI_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} ) diff --git a/CMake/cdat_modules/cffi_external.cmake b/CMake/cdat_modules/cffi_external.cmake deleted file mode 100644 index 1fdb495c4c..0000000000 --- a/CMake/cdat_modules/cffi_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm CFFI) - -include(pipinstaller) diff --git a/CMake/cdat_modules/cffi_pkg.cmake b/CMake/cdat_modules/cffi_pkg.cmake deleted file mode 100644 index 889da6bb71..0000000000 --- a/CMake/cdat_modules/cffi_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CFFI_MAJOR_SRC 1) -set(CFFI_MINOR_SRC 5) -set(CFFI_PATCH_SRC 2) - -set(CFFI_VERSION ${CFFI_MAJOR_SRC}.${CFFI_MINOR_SRC}.${CFFI_PATCH_SRC}) -set(CFFI_GZ cffi-${CFFI_VERSION}.tar.gz) -set(CFFI_SOURCE ${LLNL_URL}/${CFFI_GZ}) -set(CFFI_MD5 fa766133f7299464c8bf857e0c966a82) - -add_cdat_package_dependent(CFFI "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/clapack_deps.cmake b/CMake/cdat_modules/clapack_deps.cmake deleted file mode 100644 index e0f544de86..0000000000 --- a/CMake/cdat_modules/clapack_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CLAPACK_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/clapack_external.cmake b/CMake/cdat_modules/clapack_external.cmake deleted file mode 100644 index bab6e0f947..0000000000 --- a/CMake/cdat_modules/clapack_external.cmake +++ /dev/null @@ -1,39 +0,0 @@ -# The CLAPACK external project - -set(clapack_source "${CMAKE_CURRENT_BINARY_DIR}/LAPACK") -set(clapack_binary "${CMAKE_CURRENT_BINARY_DIR}/LAPACK-build") -set(clapack_install "${cdat_EXTERNALS}") -set(NUMPY_LAPACK_binary ${clapack_binary}) - -# -# To fix compilation problem: relocation R_X86_64_32 against `a local symbol' can not be -# used when making a shared object; recompile with -fPIC -# See http://www.cmake.org/pipermail/cmake/2007-May/014350.html -# -if(UNIX AND CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64") - set(cdat_tpl_c_flags_LAPACK "-fPIC ${cdat_tpl_c_flags}") -endif() - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/CLAPACK_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/CLAPACK_install_step.cmake - @ONLY) - -set(CLAPACK_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/CLAPACK_install_step.cmake) - -ExternalProject_Add(CLAPACK - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${clapack_source} - BINARY_DIR ${clapack_binary} - INSTALL_DIR ${clapack_install} - URL ${CLAPACK_URL}/${CLAPACK_GZ} - URL_MD5 ${CLAPACK_MD5} - CMAKE_CACHE_ARGS - -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags} - -DBUILD_SHARED_LIBS:BOOL=ON - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} - ${CLAPACK_EXTRA_ARGS} - INSTALL_COMMAND ${CLAPACK_INSTALL_COMMAND} - DEPENDS ${CLAPACK_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/clapack_pkg.cmake b/CMake/cdat_modules/clapack_pkg.cmake deleted file mode 100644 index 38f156a44a..0000000000 --- a/CMake/cdat_modules/clapack_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(CLAPACK_MAJOR 3) -set(CLAPACK_MINOR 2) -set(CLAPACK_PATCH 1) -set(CLAPACK_VERSION ${CLAPACK_MAJOR}.${CLAPACK_MINOR}.${CLAPACK_PATCH}) -set(CLAPACK_URL http://www.netlib.org/clapack) -set(CLAPACK_GZ clapack-${CLAPACK_VERSION}-CMAKE.tgz) -set(CLAPACK_MD5 4fd18eb33f3ff8c5d65a7d43913d661b) -set(CLAPACK_SOURCE ${CLAPACK_URL}/${CLAPACK_GZ}) - -if(NOT APPLE) - if(NOT CMAKE_Fortran_COMPILER) - add_cdat_package(CLAPACK "" "" OFF) - endif() -endif() - diff --git a/CMake/cdat_modules/click_deps.cmake b/CMake/cdat_modules/click_deps.cmake deleted file mode 100644 index ee4e50d5d9..0000000000 --- a/CMake/cdat_modules/click_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CLICK_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} ) diff --git a/CMake/cdat_modules/click_external.cmake b/CMake/cdat_modules/click_external.cmake deleted file mode 100644 index 0c89a3bf12..0000000000 --- a/CMake/cdat_modules/click_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm CLICK) - -include(pipinstaller) diff --git a/CMake/cdat_modules/click_pkg.cmake b/CMake/cdat_modules/click_pkg.cmake deleted file mode 100644 index b0aef777c6..0000000000 --- a/CMake/cdat_modules/click_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(CLICK_MAJOR_SRC 4) -set(CLICK_MINOR_SRC 1) - -set(CLICK_VERSION ${CLICK_MAJOR_SRC}.${CLICK_MINOR_SRC}) -set(CLICK_GZ click-${CLICK_VERSION}.tar.gz) -set(CLICK_SOURCE ${LLNL_URL}/${CLICK_GZ}) -set(CLICK_MD5 6a3fa88c738f2f775ec6de126feb99a4) - -if (CDAT_BUILD_ALL) - add_cdat_package(CLICK "" "" ON) -else() - add_cdat_package(CLICK "" "" OFF) -endif() diff --git a/CMake/cdat_modules/cligj_deps.cmake b/CMake/cdat_modules/cligj_deps.cmake deleted file mode 100644 index ce62bdb2fc..0000000000 --- a/CMake/cdat_modules/cligj_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CLIGJ_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} ${click_pkg} ) diff --git a/CMake/cdat_modules/cligj_external.cmake b/CMake/cdat_modules/cligj_external.cmake deleted file mode 100644 index 8051175700..0000000000 --- a/CMake/cdat_modules/cligj_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm CLIGJ) - -include(pipinstaller) diff --git a/CMake/cdat_modules/cligj_pkg.cmake b/CMake/cdat_modules/cligj_pkg.cmake deleted file mode 100644 index 06adad5060..0000000000 --- a/CMake/cdat_modules/cligj_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(CLIGJ_MAJOR_SRC 0) -set(CLIGJ_MINOR_SRC 3) -set(CLIGJ_PATCH_SRC 0) - -set(CLIGJ_VERSION ${CLIGJ_MAJOR_SRC}.${CLIGJ_MINOR_SRC}.${CLIGJ_PATCH_SRC}) -set(CLIGJ_GZ cligj-${CLIGJ_VERSION}.tar.gz) -set(CLIGJ_SOURCE ${LLNL_URL}/${CLIGJ_GZ}) -set(CLIGJ_MD5 cd135f171b4ef2c07ebd34731ccf09a5) - -if (CDAT_BUILD_ALL) - add_cdat_package(CLIGJ "" "" ON) -else() - add_cdat_package(CLIGJ "" "" OFF) -endif() diff --git a/CMake/cdat_modules/cmcurl_external.cmake b/CMake/cdat_modules/cmcurl_external.cmake deleted file mode 100644 index 8a6033f35a..0000000000 --- a/CMake/cdat_modules/cmcurl_external.cmake +++ /dev/null @@ -1,25 +0,0 @@ -# The cmCurl external project for Titan - -set(curl_source "${CMAKE_CURRENT_SOURCE_DIR}/cmcurl") -set(curl_binary "${CMAKE_CURRENT_BINARY_DIR}/cmcurl") - -ExternalProject_Add(cmcurl - DOWNLOAD_COMMAND "" - SOURCE_DIR "${curl_source}" - BINARY_DIR "${curl_binary}" - CMAKE_GENERATOR ${gen} - CMAKE_ARGS - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} - -DBUILD_SHARED_LIBS:BOOL=ON - -DBUILD_TESTING:BOOL=OFF - -DBUILD_CURL_TESTS:BOOL=OFF - -DBUILD_CURL_EXE:BOOL=OFF - -DCURL_DISABLE_LDAP:BOOL=ON - -DCURL_DISABLE_LDAPS:BOOL=ON - ${titan_compiler_args} - ${titan_binary_args} - ${cmcurl_EXTRA_ARGS} - -DTRIGGER_REBUILD:STRING=0 - INSTALL_COMMAND "" - DEPENDS ${cmcurl_deps} -) diff --git a/CMake/cdat_modules/cmor_deps.cmake b/CMake/cdat_modules/cmor_deps.cmake deleted file mode 100644 index 719a3c0015..0000000000 --- a/CMake/cdat_modules/cmor_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CMOR_deps ${pkgconfig_pkg} ${python_pkg} ${hdf5_pkg} ${netcdf_pkg} ${zlib_pkg} ${uuid_pkg} ${udunits2_pkg} ${cdat_pkg}) diff --git a/CMake/cdat_modules/cmor_external.cmake b/CMake/cdat_modules/cmor_external.cmake deleted file mode 100644 index 5cf4053228..0000000000 --- a/CMake/cdat_modules/cmor_external.cmake +++ /dev/null @@ -1,22 +0,0 @@ -set(cmor_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/cmor") -set(cmor_binary_dir "${CMAKE_CURRENT_BINARY_DIR}/build/cmor") -set(cmor_install_dir "${cdat_EXTERNALS}") - -set(cmor_configure_args --with-netcdf=${netcdf_install} --with-udunits2=${udunits_install} --with-uuid=${uuid_install} --enable-fortran=yes --with-python=${CMAKE_INSTALL_PREFIX} --prefix=${CMAKE_INSTALL_PREFIX}) - -# it appears currently we only configure cmor but not build it. -ExternalProject_Add(CMOR - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${cmor_source_dir} - BUILD_IN_SOURCE 1 - ${GIT_CMD_STR_CMOR} - ${GIT_TAG} - INSTALL_DIR ${cmor_install_dir} - PATCH_COMMAND "" - CONFIGURE_COMMAND sh ${cmor_source_dir}/configure ${cmor_configure_args} - DEPENDS ${CMOR_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_CMOR) - unset(GIT_CMD_STR_CMOR) -endif() diff --git a/CMake/cdat_modules/cmor_pkg.cmake b/CMake/cdat_modules/cmor_pkg.cmake deleted file mode 100644 index e3b785ac39..0000000000 --- a/CMake/cdat_modules/cmor_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(CMOR_VERSION 2.9.2) -set(CMOR_BRANCH uvcdat-2.4.0) -set(CMOR_REPOSITORY ${GIT_PROTOCOL}github.com/PCMDI/cmor.git ) - -set(GIT_CMD_STR_CMOR GIT_REPOSITORY ${CMOR_REPOSITORY}) -set(GIT_TAG GIT_TAG "${CMOR_BRANCH}") -set (nm CMOR) -string(TOUPPER ${nm} uc_nm) - -if (CDAT_BUILD_ALL) - add_cdat_package(CMOR "" "" ON) -else() - add_cdat_package_dependent(CMOR "" "" ON "CDAT_BUILD_CMOR" OFF) -endif() diff --git a/CMake/cdat_modules/configobj_deps.cmake b/CMake/cdat_modules/configobj_deps.cmake deleted file mode 100644 index 1835fd3b45..0000000000 --- a/CMake/cdat_modules/configobj_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(configobj_deps ${python_pkg} ${six_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/configobj_external.cmake b/CMake/cdat_modules/configobj_external.cmake deleted file mode 100644 index ce77c8f039..0000000000 --- a/CMake/cdat_modules/configobj_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# configobj -# -set(configobj_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/configobj") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/configobj_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/configobj_build_step.cmake" - @ONLY -) - -set(configobj_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/configobj_build_step.cmake) - -ExternalProject_Add(configobj - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${configobj_source_dir} - URL ${CONFIGOBJ_URL}/${CONFIGOBJ_GZ} - URL_MD5 ${CONFIGOBJ_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${configobj_build_command} - INSTALL_COMMAND "" - DEPENDS ${configobj_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/configobj_pkg.cmake b/CMake/cdat_modules/configobj_pkg.cmake deleted file mode 100644 index c236b86948..0000000000 --- a/CMake/cdat_modules/configobj_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set( CONFIGOBJ_MAJOR 5 ) -set( CONFIGOBJ_MINOR 0 ) -set( CONFIGOBJ_PATCH 6 ) -set( CONFIGOBJ_VERSION ${CONFIGOBJ_MAJOR}.${CONFIGOBJ_MINOR}.${CONFIGOBJ_PATCH} ) -set( CONFIGOBJ_URL ${LLNL_URL} ) -set( CONFIGOBJ_GZ configobj-${CONFIGOBJ_VERSION}.tar.gz ) -set( CONFIGOBJ_MD5 e472a3a1c2a67bb0ec9b5d54c13a47d6 ) - -set (nm CONFIGOBJ) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(CONFIGOBJ_SOURCE ${CONFIGOBJ_URL}/${CONFIGOBJ_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(configobj "" "" ON) -else() - add_cdat_package(configobj "" "" OFF) -endif() diff --git a/CMake/cdat_modules/coverage_deps.cmake b/CMake/cdat_modules/coverage_deps.cmake deleted file mode 100644 index d2744141de..0000000000 --- a/CMake/cdat_modules/coverage_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(coverage_deps ${python_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/coverage_external.cmake b/CMake/cdat_modules/coverage_external.cmake deleted file mode 100644 index e09cfb7fb5..0000000000 --- a/CMake/cdat_modules/coverage_external.cmake +++ /dev/null @@ -1,4 +0,0 @@ -# External coverage.py package -set(nm COVERAGE) - -include(pipinstaller) diff --git a/CMake/cdat_modules/coverage_pkg.cmake b/CMake/cdat_modules/coverage_pkg.cmake deleted file mode 100644 index 7e32eaa610..0000000000 --- a/CMake/cdat_modules/coverage_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(COVERAGE_MAJOR_SRC 4) -set(COVERAGE_MINOR_SRC 0) -set(COVERAGE_PATCH_SRC 3) - -set(COVERAGE_VERSION ${COVERAGE_MAJOR_SRC}.${COVERAGE_MINOR_SRC}.${COVERAGE_PATCH_SRC}) -set(COVERAGE_GZ coverage-${COVERAGE_VERSION}.tar.gz) -set(COVERAGE_SOURCE ${LLNL_URL}/${COVERAGE_GZ}) -set(COVERAGE_MD5 c7d3db1882484022c81bf619be7b6365) - -add_cdat_package_dependent(COVERAGE "" "" ON "CDAT_MEASURE_COVERAGE" OFF) diff --git a/CMake/cdat_modules/cryptography_deps.cmake b/CMake/cdat_modules/cryptography_deps.cmake deleted file mode 100644 index cad6e0ddd4..0000000000 --- a/CMake/cdat_modules/cryptography_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CRYPTOGRAPHY_deps ${cffi_pkg} ${enum34_pkg} ${idna_pkg} ${ipaddress_pkg} ${openssl_pkg} ${pip_pkg} ${pyasn1_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/cryptography_external.cmake b/CMake/cdat_modules/cryptography_external.cmake deleted file mode 100644 index 9e10cb4bb7..0000000000 --- a/CMake/cdat_modules/cryptography_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm CRYPTOGRAPHY) - -# Set LDFlags and CFlags to make it easier to find OpenSSL -list(APPEND USR_ENVS - "LDFLAGS=-L${OPENSSL_LIBRARY_DIR} $ENV{LDFLAGS}" - "CFLAGS=-I${OPENSSL_INCLUDE_DIR} $ENV{CFLAGS}" - ) - -include(pipinstaller) diff --git a/CMake/cdat_modules/cryptography_pkg.cmake b/CMake/cdat_modules/cryptography_pkg.cmake deleted file mode 100644 index 0b5671da14..0000000000 --- a/CMake/cdat_modules/cryptography_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CRYPTOGRAPHY_MAJOR_SRC 1) -set(CRYPTOGRAPHY_MINOR_SRC 3) -set(CRYPTOGRAPHY_PATCH_SRC 2) - -set(CRYPTOGRAPHY_VERSION ${CRYPTOGRAPHY_MAJOR_SRC}.${CRYPTOGRAPHY_MINOR_SRC}.${CRYPTOGRAPHY_PATCH_SRC}) -set(CRYPTOGRAPHY_GZ cryptography-${CRYPTOGRAPHY_VERSION}.tar.gz) -set(CRYPTOGRAPHY_SOURCE ${LLNL_URL}/${CRYPTOGRAPHY_GZ}) -set(CRYPTOGRAPHY_MD5 0359190f291824dc8ad9e6d477a607b2) - -add_cdat_package_dependent(CRYPTOGRAPHY "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/curl_deps.cmake b/CMake/cdat_modules/curl_deps.cmake deleted file mode 100644 index 432b4d319c..0000000000 --- a/CMake/cdat_modules/curl_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CURL_deps ${pkgconfig_pkg} ${libXML2_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/curl_external.cmake b/CMake/cdat_modules/curl_external.cmake deleted file mode 100644 index 6dd77c3738..0000000000 --- a/CMake/cdat_modules/curl_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(curl_source "${CMAKE_CURRENT_BINARY_DIR}/build/CURL") -set(curl_install "${cdat_EXTERNALS}") - -ExternalProject_Add(CURL - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${curl_source} - INSTALL_DIR ${curl_install} - URL ${CURL_URL}/${CURL_GZ} - URL_MD5 ${CURL_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${CURL_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/curl_pkg.cmake b/CMake/cdat_modules/curl_pkg.cmake deleted file mode 100644 index 6946ad58d3..0000000000 --- a/CMake/cdat_modules/curl_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(CURL_MAJOR 7) -set(CURL_MINOR 22) -set(CURL_PATCH 0) -set(CURL_MAJOR_SRC 7) -set(CURL_MINOR_SRC 33) -set(CURL_PATCH_SRC 0) -set(CURL_URL ${LLNL_URL}) -set(CURL_GZ curl-${CURL_MAJOR_SRC}.${CURL_MINOR_SRC}.${CURL_PATCH_SRC}.tar.gz) -set(CURL_MD5 c8a4eaac7ce7b0d1bf458d62ccd4ef93 ) -set(CURL_VERSION ${CURL_MAJOR_SRC}.${CURL_MINOR_SRC}.${CURL_PATCH_SRC}) -set(CURL_SOURCE ${CURL_URL}/${CURL_GZ}) - -add_cdat_package(CURL "" "" OFF) diff --git a/CMake/cdat_modules/curses_deps.cmake b/CMake/cdat_modules/curses_deps.cmake deleted file mode 100644 index 1926beb7c7..0000000000 --- a/CMake/cdat_modules/curses_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(curses_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/curses_external.cmake b/CMake/cdat_modules/curses_external.cmake deleted file mode 100644 index 2fe0ea547b..0000000000 --- a/CMake/cdat_modules/curses_external.cmake +++ /dev/null @@ -1,22 +0,0 @@ -set(curses_source "${CMAKE_CURRENT_BINARY_DIR}/build/curses") -set(curses_install "${cdat_EXTERNALS}") -set(curses_conf_args) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/curses_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/curses_patch_step.cmake - @ONLY) - -set(curses_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/curses_patch_step.cmake) - -ExternalProject_Add(Curses - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${curses_source} - INSTALL_DIR ${curses_install} - URL ${CURSES_URL}/${CURSES_GZ} - URL_MD5 ${CURSES_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${curses_PATCH_COMMAND} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${curses_conf_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${curses_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/curses_pkg.cmake b/CMake/cdat_modules/curses_pkg.cmake deleted file mode 100644 index 29b6d52406..0000000000 --- a/CMake/cdat_modules/curses_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set (package Curses) -string(TOUPPER ${package} package_uc) - -set(${package_uc}_MAJOR_SRC 6) -set(${package_uc}_MINOR_SRC 0) -set(${package_uc}_PATCH_SRC 0) -set(${package_uc}_URL ${LLNL_URL}) -#set(${package_uc}_GZ ncurses-${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.${${package_uc}_PATCH_SRC}.tar.gz) -set(${package_uc}_GZ ncurses-${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.tar.gz) -set(${pacakge_uc}_MD5 931959c0e1a9949999407b025cf44d3d) -#set(${package_uc}_MD5 8cb9c412e5f2d96bc6f459aa8c6282a1) -set(${package_uc}_SOURCE ${${package_uc}_URL}/${${package_uc}_GZ}) -set(${package_uc}_MD5 ${${package_uc}_MD5}) - -set(${package_uc}_VERSION ${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}) -add_cdat_package(${package} "" "" OFF) diff --git a/CMake/cdat_modules/cycler_deps.cmake b/CMake/cdat_modules/cycler_deps.cmake deleted file mode 100644 index 6c4db45355..0000000000 --- a/CMake/cdat_modules/cycler_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(cycler_deps ${python_pkg} ${setuptools_pkg} ${six_pkg}) diff --git a/CMake/cdat_modules/cycler_external.cmake b/CMake/cdat_modules/cycler_external.cmake deleted file mode 100644 index 5cd06b6e89..0000000000 --- a/CMake/cdat_modules/cycler_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# Cycler - -set(Cycler_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cycler") - -ExternalProject_Add(Cycler - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Cycler_source} - URL ${CYCLER_URL}/${CYCLER_GZ} - URL_MD5 ${CYCLER_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install --old-and-unmanageable ${PYTHON_EXTRA_PREFIX} - DEPENDS ${cycler_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/cycler_pkg.cmake b/CMake/cdat_modules/cycler_pkg.cmake deleted file mode 100644 index b2310801a3..0000000000 --- a/CMake/cdat_modules/cycler_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CYCLER_MAJOR 0) -set(CYCLER_MINOR 9) -set(CYCLER_PATCH 0) -set(CYCLER_VERSION ${CYCLER_MAJOR}.${CYCLER_MINOR}.${CYCLER_PATCH}) -set(CYCLER_URL ${LLNL_URL}) -set(CYCLER_GZ cycler-${CYCLER_VERSION}.tar.gz) -set(CYCLER_MD5 c10ade5ca3f0aadf575eb25203b225a5) -set(CYCLER_SOURCE ${CYCLER_URL}/${CYCLER_GZ}) - -add_cdat_package_dependent(Cycler "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/cython_deps.cmake b/CMake/cdat_modules/cython_deps.cmake deleted file mode 100644 index eab0a78bef..0000000000 --- a/CMake/cdat_modules/cython_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Cython_deps ${pkgconfig_pkg} ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/cython_external.cmake b/CMake/cdat_modules/cython_external.cmake deleted file mode 100644 index a059bdd30e..0000000000 --- a/CMake/cdat_modules/cython_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ -# Cython -# -# --old-and-unmangeable solution avoids the use of eggs -# and forces to create a directory. -# this seems to fix issues of the type encountered in -# bug #1192 and #1486 - -set(Cython_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cython") - -ExternalProject_Add(Cython - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Cython_source} - URL ${CYTHON_URL}/${CYTHON_GZ} - URL_MD5 ${CYTHON_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install --old-and-unmanageable ${PYTHON_EXTRA_PREFIX} - DEPENDS ${Cython_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/cython_pkg.cmake b/CMake/cdat_modules/cython_pkg.cmake deleted file mode 100644 index 3d1fe53d5a..0000000000 --- a/CMake/cdat_modules/cython_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CYTHON_MAJOR 0) -set(CYTHON_MINOR 23) -set(CYTHON_PATCH 4) -set(CYTHON_VERSION ${CYTHON_MAJOR}.${CYTHON_MINOR}.${CYTHON_PATCH}) -set(CYTHON_URL ${LLNL_URL} ) -set(CYTHON_GZ Cython-${CYTHON_VERSION}.tar.gz) -set(CYTHON_MD5 157df1f69bcec6b56fd97e0f2e057f6e) -set(CYTHON_SOURCE ${CYTHON_URL}/${CYTHON_GZ}) - -add_cdat_package_dependent(Cython "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/data_deps.cmake b/CMake/cdat_modules/data_deps.cmake deleted file mode 100644 index 8b13789179..0000000000 --- a/CMake/cdat_modules/data_deps.cmake +++ /dev/null @@ -1 +0,0 @@ - diff --git a/CMake/cdat_modules/data_pkg.cmake b/CMake/cdat_modules/data_pkg.cmake deleted file mode 100644 index d4be977a88..0000000000 --- a/CMake/cdat_modules/data_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -# Do we download the data ? -option(CDAT_DOWNLOAD_SAMPLE_DATA "Download sample data" ON) -if (CDAT_BUILD_LEAN) - message("[INFO] Disabling download data for ESGF") - set(CDAT_DOWNLOAD_SAMPLE_DATA OFF) -endif() - -if (OFFLINE_BUILD) - message("[INFO] Disabling download data for offline build") - set(CDAT_DOWNLOAD_SAMPLE_DATA OFF) -endif() - -if (CDAT_DOWNLOAD_SAMPLE_DATA) - set(SAMPLE_DATA "") -else() - set(SAMPLE_DATA --disable-sampledata) -endif() diff --git a/CMake/cdat_modules/dateutils_deps.cmake b/CMake/cdat_modules/dateutils_deps.cmake deleted file mode 100644 index 08ee1bda77..0000000000 --- a/CMake/cdat_modules/dateutils_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(DATEUTILS_deps ${python_pkg} ${pip_pkg} ${six_pkg}) diff --git a/CMake/cdat_modules/dateutils_external.cmake b/CMake/cdat_modules/dateutils_external.cmake deleted file mode 100644 index e157432d13..0000000000 --- a/CMake/cdat_modules/dateutils_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -# --old-and-unmangeable solution avoids the use of eggs -# and forces to create a directory. -# this seems to fix issues of the type encountered in -# bug #1192 and #1486 - -set(nm DATEUTILS) -set(USER_INSTALL_OPTIONS --old-and-unmanageable) -include(pipinstaller) -unset(USER_INSTALL_OPTIONS) diff --git a/CMake/cdat_modules/dateutils_pkg.cmake b/CMake/cdat_modules/dateutils_pkg.cmake deleted file mode 100644 index 9b1fe3fd57..0000000000 --- a/CMake/cdat_modules/dateutils_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(DATEUTILS_MAJOR_SRC 2) -set(DATEUTILS_MINOR_SRC 2) -set(DATEUTILS_PATCH_SRC -) - -set(DATEUTILS_VERSION ${DATEUTILS_MAJOR_SRC}.${DATEUTILS_MINOR_SRC}.${DATEUTILS_PATCH_SRC}) -set(DATEUTILS_VERSION ${DATEUTILS_MAJOR_SRC}.${DATEUTILS_MINOR_SRC}) -set(DATEUTILS_GZ python-dateutil-${DATEUTILS_VERSION}.tar.gz) -set(DATEUTILS_SOURCE ${LLNL_URL}/${DATEUTILS_GZ}) -set(DATEUTILS_MD5 c1f654d0ff7e33999380a8ba9783fd5c) - -add_cdat_package_dependent(DATEUTILS "" "" OFF "NOT CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/distribute_deps.cmake b/CMake/cdat_modules/distribute_deps.cmake deleted file mode 100644 index d6313c1c93..0000000000 --- a/CMake/cdat_modules/distribute_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(distribute_deps ${setuptools_pkg}) diff --git a/CMake/cdat_modules/distribute_external.cmake b/CMake/cdat_modules/distribute_external.cmake deleted file mode 100644 index c8f536fa01..0000000000 --- a/CMake/cdat_modules/distribute_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -if (NOT OFFLINE_BUILD) - set(EGG_GZ "distribute==${DISTRIBUTE_VERSION}") -else () - set(EGG_GZ ${CDAT_PACKAGE_CACHE_DIR}/${DISTRIBUTE_GZ}) -endif() - -ExternalProject_Add(distribute - DOWNLOAD_COMMAND "" - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND ${EASY_INSTALL_BINARY} ${EGG_GZ} - DEPENDS ${distribute_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/distribute_pkg.cmake b/CMake/cdat_modules/distribute_pkg.cmake deleted file mode 100644 index 2a0415f6b1..0000000000 --- a/CMake/cdat_modules/distribute_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(DISTRIBUTE_MAJOR_SRC 0) -set(DISTRIBUTE_MINOR_SRC 6) -set(DISTRIBUTE_PATCH_SRC 45) - -set (nm DISTRIBUTE) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(DISTRIBUTE_GZ distribute-${DISTRIBUTE_VERSION}.tar.gz) -set(DISTRIBUTE_SOURCE ${LLNL_URL}/${DISTRIBUTE_GZ}) -set(DISTRIBUTE_MD5 8953f2c07e6700dabf2ec150129b8c31 ) - -add_cdat_package(distribute "" "" OFF) diff --git a/CMake/cdat_modules/docutils_deps.cmake b/CMake/cdat_modules/docutils_deps.cmake deleted file mode 100644 index ef9fc3c52c..0000000000 --- a/CMake/cdat_modules/docutils_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(docutils_deps ${pip_pkg} ${jinja2_pkg}) diff --git a/CMake/cdat_modules/docutils_external.cmake b/CMake/cdat_modules/docutils_external.cmake deleted file mode 100644 index 36bdaedb11..0000000000 --- a/CMake/cdat_modules/docutils_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ -# create an external project to install docutils, -# and configure and build it - -set(nm docutils) -set(OLD "OFF") -include(pipinstaller) diff --git a/CMake/cdat_modules/docutils_pkg.cmake b/CMake/cdat_modules/docutils_pkg.cmake deleted file mode 100644 index 1aaa2505d9..0000000000 --- a/CMake/cdat_modules/docutils_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(docutils_MAJOR_SRC 0) -set(docutils_MINOR_SRC 10) -set(docutils_PATCH_SRC ) - -set (nm docutils) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}) -set(DOCUTILS_GZ docutils-${DOCUTILS_VERSION}.tar.gz) -set(DOCUTILS_SOURCE ${LLNL_URL}/${DOCUTILS_GZ}) -set(DOCUTILS_MD5 d8d4660c08302c791b2d71a155a2f4bc ) - -add_cdat_package_dependent(docutils "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/enum34_deps.cmake b/CMake/cdat_modules/enum34_deps.cmake deleted file mode 100644 index ed6c021a4a..0000000000 --- a/CMake/cdat_modules/enum34_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ENUM34_deps ${python_pkg}) diff --git a/CMake/cdat_modules/enum34_external.cmake b/CMake/cdat_modules/enum34_external.cmake deleted file mode 100644 index 2edf14978e..0000000000 --- a/CMake/cdat_modules/enum34_external.cmake +++ /dev/null @@ -1,8 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm ENUM34) - -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/enum34_pkg.cmake b/CMake/cdat_modules/enum34_pkg.cmake deleted file mode 100644 index b4a57ec13d..0000000000 --- a/CMake/cdat_modules/enum34_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(ENUM34_MAJOR_SRC 1) -set(ENUM34_MINOR_SRC 1) -set(ENUM34_PATCH_SRC 2) - -set(ENUM34_VERSION ${ENUM34_MAJOR_SRC}.${ENUM34_MINOR_SRC}.${ENUM34_PATCH_SRC}) -set(ENUM34_GZ enum34-${ENUM34_VERSION}.tar.gz) -set(ENUM34_SOURCE ${LLNL_URL}/${ENUM34_GZ}) -set(ENUM34_MD5 025bb71b3f9d2fad15d0ee53e48dc873) - -add_cdat_package_dependent(ENUM34 "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/eof2_deps.cmake b/CMake/cdat_modules/eof2_deps.cmake deleted file mode 100644 index fc79a9356a..0000000000 --- a/CMake/cdat_modules/eof2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(eof2_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/eof2_external.cmake b/CMake/cdat_modules/eof2_external.cmake deleted file mode 100644 index d1d98ee83e..0000000000 --- a/CMake/cdat_modules/eof2_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# EOF2 -# -set(eof2_source "${CMAKE_CURRENT_BINARY_DIR}/build/eof2") - -ExternalProject_Add(eof2 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${eof2_source} - URL ${eof2_URL}/${eof2_GZ} - URL_MD5 ${eof2_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${eof2_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/eof2_pkg.cmake b/CMake/cdat_modules/eof2_pkg.cmake deleted file mode 100644 index 89c3740679..0000000000 --- a/CMake/cdat_modules/eof2_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(eof2_MAJOR ) -set(eof2_MINOR ) -set(eof2_VERSION 620a921b46b) -set(eof2_URL ${LLNL_URL} ) -set(eof2_GZ eof2-${eof2_VERSION}.zip) -set(eof2_MD5 39e21a8633f272dc8dc748adb4c7f0e8) -set(eof2_SOURCE ${eof2_URL}/${eof2_GZ}) - -add_cdat_package_dependent(eof2 "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/eofs_deps.cmake b/CMake/cdat_modules/eofs_deps.cmake deleted file mode 100644 index 2746e30df6..0000000000 --- a/CMake/cdat_modules/eofs_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(eofs_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/eofs_external.cmake b/CMake/cdat_modules/eofs_external.cmake deleted file mode 100644 index 23c9f26fdc..0000000000 --- a/CMake/cdat_modules/eofs_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# Windfield` -# -set(eofs_source "${CMAKE_CURRENT_BINARY_DIR}/build/eofs") - -ExternalProject_Add(eofs - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${eofs_source} - URL ${eofs_URL}/${eofs_GZ} - URL_MD5 ${eofs_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${eofs_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/eofs_pkg.cmake b/CMake/cdat_modules/eofs_pkg.cmake deleted file mode 100644 index 7fbe79aeb4..0000000000 --- a/CMake/cdat_modules/eofs_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(eofs_MAJOR_SRC 1) -set(eofs_MINOR_SRC 1) -set(eofs_PATCH_SRC 0) -set(eofs_VERSION ${eofs_MAJOR_SRC}.${eofs_MINOR_SRC}.${eofs_PATCH_SRC}) -set(eofs_URL ${LLNL_URL}) -set(eofs_GZ eofs-${eofs_VERSION}.tar.gz) -set(eofs_MD5 52fce9f666d540069c90a6c109fcb3b4) -set(eofs_SOURCE ${eofs_URL}/${eofs_GZ}) - -add_cdat_package_dependent(eofs "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/esmf_deps.cmake b/CMake/cdat_modules/esmf_deps.cmake deleted file mode 100644 index 631f2a9e45..0000000000 --- a/CMake/cdat_modules/esmf_deps.cmake +++ /dev/null @@ -1,5 +0,0 @@ -set(ESMF_deps ${pkgconfig_pkg} ${python_pkg}) - -if(CDAT_BUILD_ESMF_PARALLEL) - set(ESMF_deps ${mpi_pkg} ${ESMF_deps}) -endif() diff --git a/CMake/cdat_modules/esmf_external.cmake b/CMake/cdat_modules/esmf_external.cmake deleted file mode 100644 index eaf9518ade..0000000000 --- a/CMake/cdat_modules/esmf_external.cmake +++ /dev/null @@ -1,78 +0,0 @@ -set(ESMF_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/ESMF" CACHE INTERNAL "") -set(ESMF_source "${CMAKE_CURRENT_BINARY_DIR}/build/ESMF" CACHE INTERNAL "") -set(ESMF_install "${cdat_EXTERNALS}" CACHE INTERNAL "") -set(ESMF_pthreads "OFF") -set(ESMF_os "${CDAT_BUILD_ESMF_OS}") -set(ESMF_compiler "${CDAT_BUILD_ESMF_COMPILER}") -set(ESMF_abi "${CDAT_BUILD_ESMF_ABI}") -set(ESMF_openmp "ON") - -if(APPLE) - if("${CMAKE_C_COMPILER_ID}" STREQUAL "Clang" AND ${CMAKE_C_COMPILER_VERSION} VERSION_GREATER 4.2) - # xcode 5 clang does not support openmp - set(ESMF_openmp "OFF") - endif() -endif() - -# Check if ESMF should be built in parallel -set(emsf_enable_mpi FALSE) -if(CDAT_BUILD_ESMF_PARALLEL) - set(emsf_enable_mpi TRUE) -endif() - -if("${emsf_enable_mpi}") - set(ESMF_comm "${CDAT_BUILD_ESMF_COMM}") -else() - message("[INFO] CDAT will build ESMF serial") - set(ESMF_comm "mpiuni") -endif() - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMF_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/ESMF_make_step.cmake - @ONLY -) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMF_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/ESMF_install_step.cmake - @ONLY -) - -set(ESMF_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMF_make_step.cmake) -set(ESMF_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMF_install_step.cmake) - -# ESMF Python interface. Install after ESMF is done. -set(ESMP_source "${ESMF_source_dir}/ESMP") - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMP_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/ESMP_install_step.cmake - @ONLY -) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMP_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/ESMP_patch_step.cmake - @ONLY -) - -set(ESMP_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMP_install_step.cmake) -set(ESMP_patch_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMP_patch_step.cmake) - -ExternalProject_Add(ESMF - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ESMF_source_dir} - INSTALL_DIR ${ESMF_install} - URL ${ESMF_URL}/${ESMF_GZ} - URL_MD5 ${ESMF_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND ${ESMF_build_command} - INSTALL_COMMAND ${ESMF_install_command} - INSTALL_COMMAND ${ESMP_install_command} - PATCH_COMMAND ${ESMP_patch_command} - DEPENDS ${ESMF_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/esmf_pkg.cmake b/CMake/cdat_modules/esmf_pkg.cmake deleted file mode 100644 index 0fde092697..0000000000 --- a/CMake/cdat_modules/esmf_pkg.cmake +++ /dev/null @@ -1,57 +0,0 @@ -include(CMakeDependentOption) - -set(ESMF_MAJOR 6) -set(ESMF_MINOR 3) -set(ESMF_PATCH 0rp1) -set(ESMP_MAJOR 01) -set(ESMF_VERSION ${ESMF_MAJOR}_${ESMF_MINOR}_${ESMF_PATCH}) -set(ESMF_URL ${LLNL_URL}) -set(ESMF_GZ esmp.ESMF_${ESMF_VERSION}_ESMP_${ESMP_MAJOR}.tar.bz2) -set(ESMF_MD5 a9be4fb51da1bc1fab027137297c5030 ) -set(ESMF_SOURCE ${ESMF_URL}/${ESMF_GZ}) - -if (CDAT_BUILD_LEAN) - option(CDAT_BUILD_ESMF_ESMP "Build python version Earth System Modeling Framework" OFF) -else () - option(CDAT_BUILD_ESMF_ESMP "Build python version Earth System Modeling Framework" ON) -endif() - -cmake_dependent_option(CDAT_BUILD_ESMF_PARALLEL - "Build parallel version of Earth System Modeling Framework library" ON - "CDAT_BUILD_PARALLEL" OFF -) - -set(TXCMAKE_DIR ${cdat_SOURCE_DIR}/contrib/sciMake) -include(${TXCMAKE_DIR}/sciFuncsMacros.cmake) -include(${TXCMAKE_DIR}/sciFortranChecks.cmake) - -if("${CMAKE_Fortran_COMPILER_ID}" STREQUAL GNU) - string(REGEX MATCHALL "[0-9]+\\." test_version_list ${Fortran_VERSION}) - string(SUBSTRING ${Fortran_VERSION} 0 3 Fortran_MAJOR_VERSION) - LIST(GET test_version_list 0 Fortran_MAJOR_VERSION) - LIST(GET test_version_list 1 Fortran_MINOR_VERSION) -else() - set(Fortran_MINOR_VERSION "") -endif() - -if(CDAT_BUILD_ESMF_ESMP) - if("${CMAKE_Fortran_COMPILER_ID}" STREQUAL GNU) - # GNU gfortran must be >= 4.3 last 4.2 gfortran release was 4.2.4 - if(${Fortran_VERSION} VERSION_GREATER "4.2.9" AND ${Fortran_VERSION} VERSION_LESS "5.2.2") - ## On APPLE need to test for -arch as well! - add_cdat_package(ESMF "" "Build ESMF" ON) - else() - message(FATAL_ERROR "[ERROR] gfortran must be 4.3 <= version < 5.2.2; you have ${Fortran_VERSION}") - endif() - else() - add_cdat_package(ESMF "" "Build ESMF" ON) - message("[INFO] Fortran Compiler is: ${CMAKE_Fortran_COMPILER}") - endif() - - # the following may need to be adjusted on Crays, otherwise the defaults will likely apply - set(CDAT_BUILD_ESMF_OS "${CMAKE_SYSTEM_NAME}" CACHE STRING "ESMF_OS env variable, may need to change to Unicos on Crays") - set(CDAT_BUILD_ESMF_COMPILER "gfortran" CACHE STRING "ESMF_COMPILER env variable, choices are gfortran, intel, pgi, g95, or nag") - set(CDAT_BUILD_ESMF_COMM "openmpi" CACHE STRING "ESMF_COMM env variable, choices are openmpi, mpiuni, mpi, mpich2, or mvapich2") - set(CDAT_BUILD_ESMF_ABI "64" CACHE STRING "ESMF_ABI env variable, choices are 32 or 64") -endif() - diff --git a/CMake/cdat_modules/ezget_deps.cmake b/CMake/cdat_modules/ezget_deps.cmake deleted file mode 100644 index e859d355d6..0000000000 --- a/CMake/cdat_modules/ezget_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ezget_deps ${netcdf_pkg} ${libdrs_pkg} ${libcdms_pkg}) diff --git a/CMake/cdat_modules/ezget_external.cmake b/CMake/cdat_modules/ezget_external.cmake deleted file mode 100644 index 078bebda05..0000000000 --- a/CMake/cdat_modules/ezget_external.cmake +++ /dev/null @@ -1,43 +0,0 @@ -set(ezget_source "${CMAKE_CURRENT_BINARY_DIR}/build/ezget") -set(ezget_install "${cdat_EXTERNALS}") - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ezget_Makefile.gfortran.in - ${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile - ) - -if(DEFINED GIT_CMD_STR_EZGET ) - message("[INFO] [ezget] Installing ${nm} from ${GIT_CMD_STR_EZGET}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [ezget] Installed ${nm} from tarball ${EZGET_GZ}") - set(URL_STR URL ${EZGET_URL}/${EZGET_GZ}) - set(URL_MD5_STR URL_MD5 ${EZGET_MD5}) - set(GIT_CMD_STR_EZGET ) - set(GIT_TAG ) -endif() -set(EZGET_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile) -set(EZGET_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile^^install) -set(EZGET_BUILD_ARGS -fPIC) - -ExternalProject_Add(ezget - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ezget_source} - INSTALL_DIR ${ezget_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_EZGET} - ${GIT_TAG} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${EZGET_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${EZGET_MAKE_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${EZGET_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${EZGET_MAKE_INSTALL_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${ezget_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_EZGET) - unset(GIT_CMD_STR_EZGET) -endif() diff --git a/CMake/cdat_modules/ezget_pkg.cmake b/CMake/cdat_modules/ezget_pkg.cmake deleted file mode 100644 index a18d67cfd1..0000000000 --- a/CMake/cdat_modules/ezget_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(EZGET_VERSION 1.0.0) -set(EZGET_BRANCH master) -set(EZGET_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/EzGet.git ) - -set(GIT_CMD_STR_EZGET GIT_REPOSITORY ${EZGET_REPOSITORY}) -set(GIT_TAG GIT_TAG "${EZGET_BRANCH}") - -if (CDAT_BUILD_PCMDI) - add_cdat_package(ezget "" "" ON) -endif() diff --git a/CMake/cdat_modules/ffi_deps.cmake b/CMake/cdat_modules/ffi_deps.cmake deleted file mode 100644 index 548c543fe5..0000000000 --- a/CMake/cdat_modules/ffi_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(FFI_deps ${pip_pkg}) diff --git a/CMake/cdat_modules/ffi_external.cmake b/CMake/cdat_modules/ffi_external.cmake deleted file mode 100644 index df33e73ac2..0000000000 --- a/CMake/cdat_modules/ffi_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(ffi_source "${CMAKE_CURRENT_BINARY_DIR}/build/ffi-${FFI_MAJOR}.${FFI_MINOR}.${FFI_PATCH}") -set(ffi_install "${cdat_EXTERNALS}") - -ExternalProject_Add(FFI - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ffi_source} - INSTALL_DIR ${ffi_install} - URL ${FFI_URL}/${FFI_BZ2} - URL_MD5 ${FFI_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${FFI_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/ffi_pkg.cmake b/CMake/cdat_modules/ffi_pkg.cmake deleted file mode 100644 index e7f0152d24..0000000000 --- a/CMake/cdat_modules/ffi_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set( FFI_MAJOR 3 ) -set( FFI_MINOR 1 ) -set( FFI_PATCH 5 ) -set(FFI_URL ${LLNL_URL}) -set(FFI_BZ2 libffi-${FFI_MAJOR}.${FFI_MINOR}.tar.gz) -set(FFI_MD5 f5898b29bbfd70502831a212d9249d10) - -set (nm FFI) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}) -set(FFI_SOURCE ${FFI_URL}/${FFI_BZ2}) - -add_cdat_package_dependent(FFI "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/ffmpeg_deps.cmake b/CMake/cdat_modules/ffmpeg_deps.cmake deleted file mode 100644 index b927816842..0000000000 --- a/CMake/cdat_modules/ffmpeg_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(FFMPEG_deps ${pkgconfig_pkg} ${x264_pkg}) diff --git a/CMake/cdat_modules/ffmpeg_external.cmake b/CMake/cdat_modules/ffmpeg_external.cmake deleted file mode 100644 index 1a2fe723ab..0000000000 --- a/CMake/cdat_modules/ffmpeg_external.cmake +++ /dev/null @@ -1,32 +0,0 @@ -# The FFMPEG external project for ParaView -set(ffmpeg_source "${CMAKE_CURRENT_BINARY_DIR}/build/FFMPEG") -set(ffmpeg_install "${cdat_EXTERNALS}") -set(ENV{PATH} $ENV{PATH}:${cdat_EXTERNALS}/bin) - -find_program(YASM_BIN "yasm") - -if (NOT YASM_BIN) - set(ffmpeg_conf_args --disable-yasm^^--enable-gpl^^--enable-libx264^^--extra-cxxflags=${ffmpeg_source}^^--enable-shared^^--enable-zlib) -else() - set(ffmpeg_conf_args --enable-gpl^^--enable-libx264^^--extra-cxxflags=${ffmpeg_source}^^--enable-shared^^--enable-zlib) -endif() - -ExternalProject_Add(FFMPEG - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ffmpeg_source} - INSTALL_DIR ${ffmpeg_install} - URL ${FFMPEG_URL}/${FFMPEG_GZ} - URL_MD5 ${FFMPEG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${ffmpeg_conf_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${FFMPEG_deps} - ${ep_log_options} - ) - -set(FFMPEG_INCLUDE_DIR ${ffmpeg_install}/include) -set(FFMPEG_avcodec_LIBRARY ${ffmpeg_install}/lib/libavcodec${_LINK_LIBRARY_SUFFIX}) -set(FFMPEG_avformat_LIBRARY ${ffmpeg_install}/lib/libavformat${_LINK_LIBRARY_SUFFIX}) -set(FFMPEG_avutil_LIBRARY ${ffmpeg_install}/lib/libavutil${_LINK_LIBRARY_SUFFIX}) -set(FFMPEG_swscale_LIBRARY ${ffmpeg_install}/lib/libswscale${_LINK_LIBRARY_SUFFIX}) diff --git a/CMake/cdat_modules/ffmpeg_pkg.cmake b/CMake/cdat_modules/ffmpeg_pkg.cmake deleted file mode 100644 index 65db298655..0000000000 --- a/CMake/cdat_modules/ffmpeg_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(FFMPEG_MAJOR_SRC 2) -set(FFMPEG_MINOR_SRC 7) -set(FFMPEG_PATCH_SRC 0) -set(FFMPEG_URL ${LLNL_URL}) -set(FFMPEG_GZ ffmpeg-${FFMPEG_MAJOR_SRC}.${FFMPEG_MINOR_SRC}.tar.gz) -set(FFMPEG_MD5 3ad0554981faf2c6deef23a1cd4c8c57) - -set (nm FFMPEG) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}) -set(FFMPEG_SOURCE ${FFMPEG_URL}/${FFMPEG_GZ}) -set(FFMPEG_ROOT ${cdat_EXTERNALS} CACHE PATH "Path to FFMPEG root directory") - -add_cdat_package_dependent(FFMPEG "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/fiona_deps.cmake b/CMake/cdat_modules/fiona_deps.cmake deleted file mode 100644 index 624113df20..0000000000 --- a/CMake/cdat_modules/fiona_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Fiona_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${shapely_pkg} ${gdal_pkg} ${cligj_pkg}) diff --git a/CMake/cdat_modules/fiona_external.cmake b/CMake/cdat_modules/fiona_external.cmake deleted file mode 100644 index 4d7e45c759..0000000000 --- a/CMake/cdat_modules/fiona_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ -# create an external project to install Fiona -# and configure and build it -set(nm Fiona) -set(USR_ENVS "GDAL_CONFIG=${cdat_EXTERNALS}/bin/gdal-config") -#set(USER_BUILD_EXT_OPTS "build_ext -I${cdat_EXTERNALS}/include -L${cdat_EXTERNALS}/lib -lgdal") -include(pipinstaller) diff --git a/CMake/cdat_modules/fiona_pkg.cmake b/CMake/cdat_modules/fiona_pkg.cmake deleted file mode 100644 index 1cd9024343..0000000000 --- a/CMake/cdat_modules/fiona_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set( FIONA_MAJOR_SRC 1 ) -set( FIONA_MINOR_SRC 6 ) -set( FIONA_PATCH_SRC 0 ) -set(FIONA_URL ${LLNL_URL}) -set(FIONA_GZ - Fiona-${FIONA_MAJOR_SRC}.${FIONA_MINOR_SRC}.${FIONA_PATCH_SRC}.tar.gz) -set(FIONA_MD5 40f945898c550721db715f69658cf7e9 ) -set(FIONA_SOURCE ${FIONA_URL}/${FIONA_GZ}) - -set (nm FIONA) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -if (CDAT_BUILD_ALL) - add_cdat_package(Fiona "" "" ON) -else() - add_cdat_package(Fiona "" "" OFF) -endif() diff --git a/CMake/cdat_modules/flake8_deps.cmake b/CMake/cdat_modules/flake8_deps.cmake deleted file mode 100644 index 490185ec02..0000000000 --- a/CMake/cdat_modules/flake8_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(flake8_deps ${python_pkg} ${setuptools_pkg} ${pyflakes_pkg} ${pep8_pkg} ${mccabe_pkg}) diff --git a/CMake/cdat_modules/flake8_external.cmake b/CMake/cdat_modules/flake8_external.cmake deleted file mode 100644 index 5f05cb3f85..0000000000 --- a/CMake/cdat_modules/flake8_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ -ExternalProject_Add(flake8 - DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}" - SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/flake8" - URL "${FLAKE8_SOURCE}" - URL_MD5 ${FLAKE8_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build - INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${flake8_deps} - ${ep_log_options} - ) - -if (APPLE) - set(FLAKE8_EXECUTABLE - "${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/flake8") -else() - set(FLAKE8_EXECUTABLE "${CMAKE_INSTALL_PREFIX}/bin/flake8") -endif() diff --git a/CMake/cdat_modules/flake8_pkg.cmake b/CMake/cdat_modules/flake8_pkg.cmake deleted file mode 100644 index f10ebf053d..0000000000 --- a/CMake/cdat_modules/flake8_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(nm flake8) -string(TOUPPER ${nm} uc_nm) - -set(${uc_nm}_MAJOR 2) -set(${uc_nm}_MINOR 4) -set(${uc_nm}_PATCH 1) -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_URL ${LLNL_URL}) -set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz) -set(${uc_nm}_MD5 ed45d3db81a3b7c88bd63c6e37ca1d65) - -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ}) - -if(BUILD_TESTING) - add_cdat_package(${nm} "" "" ON) -endif() diff --git a/CMake/cdat_modules/fontconfig_deps.cmake b/CMake/cdat_modules/fontconfig_deps.cmake deleted file mode 100644 index 87455d1355..0000000000 --- a/CMake/cdat_modules/fontconfig_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(fontconfig_deps ${pkgconfig_pkg} ${libxml2_pkg} ${freetype_pkg}) diff --git a/CMake/cdat_modules/fontconfig_external.cmake b/CMake/cdat_modules/fontconfig_external.cmake deleted file mode 100644 index fa57bc888e..0000000000 --- a/CMake/cdat_modules/fontconfig_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(fontconfig_source "${CMAKE_CURRENT_BINARY_DIR}/build/fontconfig") -set(fontconfig_install "${cdat_EXTERNALS}") - -ExternalProject_Add(fontconfig - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${fontconfig_source} - INSTALL_DIR ${fontconfig_install} - URL ${FTCFG_URL}/${FTCFG_GZ} - URL_MD5 ${FTCFG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=--disable-docs^^--enable-libxml2 -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${fontconfig_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/fontconfig_pkg.cmake b/CMake/cdat_modules/fontconfig_pkg.cmake deleted file mode 100644 index 9598115827..0000000000 --- a/CMake/cdat_modules/fontconfig_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(FTCFG_MAJOR 2) -set(FTCFG_MINOR 4) -set(FTCFG_PATCH 2) -set(FTCFG_MAJOR_SRC 2) -set(FTCFG_MINOR_SRC 10) -set(FTCFG_PATCH_SRC 1) -set(FTCFG_URL ${LLNL_URL}) -set(FTCFG_GZ fontconfig-${FTCFG_MAJOR_SRC}.${FTCFG_MINOR_SRC}.${FTCFG_PATCH_SRC}.tar.gz) -set(FTCFG_MD5 43808dd9153cff1c3ac302e94e024814) - -set (nm FTCFG) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(FONTCONFIG_VERSION ${FTCFG_VERSION}) -set(FONTCONFIG_SOURCE ${FTCFG_URL}/${FTCFG_GZ}) - -add_cdat_package_dependent(fontconfig "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/freetype_deps.cmake b/CMake/cdat_modules/freetype_deps.cmake deleted file mode 100644 index 6d451c65ff..0000000000 --- a/CMake/cdat_modules/freetype_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(freetype_deps ${pkgconfig_pkg} ${png_pkg}) diff --git a/CMake/cdat_modules/freetype_external.cmake b/CMake/cdat_modules/freetype_external.cmake deleted file mode 100644 index 3af2943992..0000000000 --- a/CMake/cdat_modules/freetype_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ - -set(freetype_source "${CMAKE_CURRENT_BINARY_DIR}/build/freetype") -set(freetype_install "${cdat_EXTERNALS}") - -ExternalProject_Add(freetype - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${freetype_source} - INSTALL_DIR ${freetype_install} - URL ${FT_URL}/${FT_GZ} - URL_MD5 ${FT_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${freetype_deps} - ${ep_log_options} -) - -#ln -sf @EXTERNALS@/include/freetype2/freetype @EXTERNALS@/include/freetype - -ExternalProject_Add_Step(freetype symlink - COMMAND ${CMAKE_COMMAND} -E create_symlink ${cdat_EXTERNALS}/include/freetype2/freetype ${cdat_EXTERNALS}/include/freetype - COMMENT "Symlink include/freetype2/freetype include directory as include/freetype" - DEPENDEES install -) diff --git a/CMake/cdat_modules/freetype_pkg.cmake b/CMake/cdat_modules/freetype_pkg.cmake deleted file mode 100644 index 596ce205a6..0000000000 --- a/CMake/cdat_modules/freetype_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(FT_MAJOR 9) -set(FT_MINOR 7) -set(FT_PATCH 3) -set(FT_MAJOR_SRC 2) -set(FT_MINOR_SRC 4) -set(FT_PATCH_SRC 10) -set(FT_URL ${LLNL_URL}) -set(FT_GZ freetype-${FT_MAJOR_SRC}.${FT_MINOR_SRC}.${FT_PATCH_SRC}.tar.gz) -set(FT_MD5 4b1887901730ff2e12562ef30fa521d5) - -set (nm FT) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(FREETYPE_VERSION ${FT_VERSION}) -set(FREETYPE_SOURCE ${FT_URL}/${FT_GZ}) - - -add_cdat_package_dependent(freetype "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/g2clib_deps.cmake b/CMake/cdat_modules/g2clib_deps.cmake deleted file mode 100644 index a2994c8322..0000000000 --- a/CMake/cdat_modules/g2clib_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(g2clib_deps ${pkgconfig_pkg} ${png_pkg} ${zlib_pkg} ${jasper_pkg}) diff --git a/CMake/cdat_modules/g2clib_external.cmake b/CMake/cdat_modules/g2clib_external.cmake deleted file mode 100644 index 5a1406979b..0000000000 --- a/CMake/cdat_modules/g2clib_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ - -set(g2clib_source "${CMAKE_CURRENT_BINARY_DIR}/build/g2clib") -set(g2clib_install "${cdat_EXTERNALS}") - -ExternalProject_Add(g2clib - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${g2clib_source} - INSTALL_DIR ${g2clib_install} - URL ${G2CLIB_URL}/${G2CLIB_GZ} - URL_MD5 ${G2CLIB_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS ${g2clib_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/g2clib_pkg.cmake b/CMake/cdat_modules/g2clib_pkg.cmake deleted file mode 100644 index 41580b8eb8..0000000000 --- a/CMake/cdat_modules/g2clib_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(G2CLIB_MAJOR 1) -set(G2CLIB_MINOR 4) -set(G2CLIB_PATCH 0b) -set(G2CLIB_VERSION ${G2CLIB_MAJOR}.${G2CLIB_MINOR}.${G2CLIB_PATCH}) -set(G2CLIB_URL ${LLNL_URL}) -set(G2CLIB_GZ g2clib-${G2CLIB_VERSION}.tar.gz) -set(G2CLIB_MD5 72378d980b2f4d6b09fd86e23e884a4b) -set(G2CLIB_SOURCE ${G2CLIB_URL}/${G2CLIB_GZ}) - - -add_cdat_package(g2clib "" "" ON) diff --git a/CMake/cdat_modules/gdal_deps.cmake b/CMake/cdat_modules/gdal_deps.cmake deleted file mode 100644 index 3fbc8ce4d1..0000000000 --- a/CMake/cdat_modules/gdal_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(gdal_deps ${pkgconfig_pkg} ${python_pkg} ${uuid_pkg} ${netcdf_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${jasper_pkg} ${geos_pkg} ${setuptools_pkg} ${proj4_pkg}) diff --git a/CMake/cdat_modules/gdal_external.cmake b/CMake/cdat_modules/gdal_external.cmake deleted file mode 100644 index 33e4c8e60a..0000000000 --- a/CMake/cdat_modules/gdal_external.cmake +++ /dev/null @@ -1,25 +0,0 @@ -set(gdal_source "${CMAKE_CURRENT_BINARY_DIR}/build/gdal") -set(gdal_install "${cdat_EXTERNALS}") -set(gdal_configure_args "--prefix=${cdat_EXTERNALS}^^--with-hdf5=${cdat_EXTERNALS}^^--with-netcdf=${cdat_EXTERNALS}^^--with-curl=${cdat_EXTERNALS}^^--with-geos=${cdat_EXTERNALS}/bin/geos-config^^--with-python=${PYTHON_EXECUTABLE}^^--with-jpeg=no^^--with-libtiff=internal^^--without-jpeg12^^--with-geotiff=internal^^--with-static-proj4=${cdat_EXTERNALS}/proj4") - -if (CDAT_BUILD_PARALLEL) - set(configure_file "cdatmpi_configure_step.cmake") -else() - set(configure_file "cdat_configure_step.cmake") -endif() -message("[GDAL] CONF FILE IS:"${configure_file}) -ExternalProject_Add(gdal - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${gdal_source} - INSTALL_DIR ${gdal_install} - URL ${GDAL_URL}/${GDAL_GZ} - URL_MD5 ${GDAL_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${gdal_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file} - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS "${gdal_deps}" - ${ep_log_options} -) diff --git a/CMake/cdat_modules/gdal_pkg.cmake b/CMake/cdat_modules/gdal_pkg.cmake deleted file mode 100644 index d8756b2bbf..0000000000 --- a/CMake/cdat_modules/gdal_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set( GDAL_MAJOR 1 ) -set( GDAL_MINOR 11 ) -set( GDAL_PATCH 2 ) -set(GDAL_URL ${LLNL_URL}) -set(GDAL_GZ gdal-${GDAL_MAJOR}.${GDAL_MINOR}.${GDAL_PATCH}.tar.gz) -set(GDAL_MD5 866a46f72b1feadd60310206439c1a76 ) - -set (nm GDAL) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(GDAL_SOURCE ${GDAL_URL}/${GDAL_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(gdal "" "Build the Geospatial Data Abstraction Library (GDAL) and python osgeo module" ON) -else() - add_cdat_package(gdal "" "Build the Geospatial Data Abstraction Library (GDAL) and python osgeo module" OFF) -endif() diff --git a/CMake/cdat_modules/geos_deps.cmake b/CMake/cdat_modules/geos_deps.cmake deleted file mode 100644 index 3b1cbf81b5..0000000000 --- a/CMake/cdat_modules/geos_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(GEOS_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/geos_external.cmake b/CMake/cdat_modules/geos_external.cmake deleted file mode 100644 index d7f8e65672..0000000000 --- a/CMake/cdat_modules/geos_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(geos_source "${CMAKE_CURRENT_BINARY_DIR}/build/geos-${GEOS_MAJOR}.${GEOS_MINOR}.${GEOS_PATCH}") -set(geos_install "${cdat_EXTERNALS}") - -ExternalProject_Add(GEOS - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${geos_source} - INSTALL_DIR ${geos_install} - URL ${GEOS_URL}/${GEOS_BZ2} - URL_MD5 ${GEOS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${GEOS_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/geos_pkg.cmake b/CMake/cdat_modules/geos_pkg.cmake deleted file mode 100644 index d2927b0200..0000000000 --- a/CMake/cdat_modules/geos_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set( GEOS_MAJOR 3 ) -set( GEOS_MINOR 3 ) -set( GEOS_PATCH 5 ) -set(GEOS_URL ${LLNL_URL}) -set(GEOS_BZ2 geos-${GEOS_MAJOR}.${GEOS_MINOR}.${GEOS_PATCH}.tar.bz2) -set(GEOS_MD5 2ba61afb7fe2c5ddf642d82d7b16e75b) - -set (nm GEOS) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(GEOS_SOURCE ${GEOS_URL}/${GEOS_BZ2}) - -add_cdat_package_dependent(GEOS "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/ghostscript_deps.cmake b/CMake/cdat_modules/ghostscript_deps.cmake deleted file mode 100644 index 0f4bedf966..0000000000 --- a/CMake/cdat_modules/ghostscript_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ghostscript_deps ${pkgconfig_pkg} ${zlib_pkg} ${jpeg_pkg} ${png_pkg} ${fontconfig_pkg} ${freetype_pkg} ${pixman_pkg} ${libXSLT_pkg} ${libXML2_pkg}) diff --git a/CMake/cdat_modules/ghostscript_external.cmake b/CMake/cdat_modules/ghostscript_external.cmake deleted file mode 100644 index fc322b66ac..0000000000 --- a/CMake/cdat_modules/ghostscript_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ - -set(ghostscript_source "${CMAKE_CURRENT_BINARY_DIR}/build/ghostscript") -set(ghostscript_install "${cdat_EXTERNALS}") - -set(ghostscripts_args "--with-drivers=PS,BMP --disable-cups") - -ExternalProject_Add(ghostscript - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ghostscript_source} - INSTALL_DIR ${ghostscript_install} - URL ${GS_URL}/${GS_GZ} - URL_MD5 ${GS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${ghostscripts_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DBUILD_ARGS=${ghostscript_source} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${ghostscript_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/ghostscript_pkg.cmake b/CMake/cdat_modules/ghostscript_pkg.cmake deleted file mode 100644 index d56e90ece1..0000000000 --- a/CMake/cdat_modules/ghostscript_pkg.cmake +++ /dev/null @@ -1,2 +0,0 @@ -add_cdat_package(ghostscript "" "" OFF) - diff --git a/CMake/cdat_modules/gifsicle_external.cmake b/CMake/cdat_modules/gifsicle_external.cmake deleted file mode 100644 index 853f5d55a3..0000000000 --- a/CMake/cdat_modules/gifsicle_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(gifsicle_source "${CMAKE_CURRENT_BINARY_DIR}/build/gifsicle") -set(gifsicle_install "${cdat_EXTERNALS}") - -ExternalProject_Add(gifsicle - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${gifsicle_source} - INSTALL_DIR ${gifsicle_install} - URL ${GIFSICLE_URL}/${GIFSICLE_GZ} - URL_MD5 ${GIFSICLE_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${gifsicle_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/gsw_deps.cmake b/CMake/cdat_modules/gsw_deps.cmake deleted file mode 100644 index 9d0b198790..0000000000 --- a/CMake/cdat_modules/gsw_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(gsw_deps ${python_pkg} ${numpy_pkg}) diff --git a/CMake/cdat_modules/gsw_external.cmake b/CMake/cdat_modules/gsw_external.cmake deleted file mode 100644 index 24c3c0e585..0000000000 --- a/CMake/cdat_modules/gsw_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# gsw (Gibbs Seawater) -# -set(gsw_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/gsw") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/gsw_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/gsw_build_step.cmake" - @ONLY -) - -set(gsw_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/gsw_build_step.cmake) - -ExternalProject_Add(gsw - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${gsw_source_dir} - URL ${GSW_URL}/${GSW_GZ} - URL_MD5 ${GSW_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${gsw_build_command} - INSTALL_COMMAND "" - DEPENDS ${gsw_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/gsw_pkg.cmake b/CMake/cdat_modules/gsw_pkg.cmake deleted file mode 100644 index 127a403c0d..0000000000 --- a/CMake/cdat_modules/gsw_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set( GSW_MAJOR 3 ) -set( GSW_MINOR 0 ) -set( GSW_PATCH 3 ) -set( GSW_VERSION ${GSW_MAJOR}.${GSW_MINOR}.${GSW_PATCH} ) -set( GSW_URL ${LLNL_URL} ) -set( GSW_GZ python-gsw-${GSW_VERSION}.tar.gz ) -set( GSW_MD5 a522a9ab6ab41fb70064e0378e904ffd ) - -set (nm GSW) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(GSW_SOURCE ${GSW_URL}/${GSW_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(gsw "" "" ON) -else() - add_cdat_package(gsw "" "" OFF) -endif() diff --git a/CMake/cdat_modules/gui_support_deps.cmake b/CMake/cdat_modules/gui_support_deps.cmake deleted file mode 100644 index 3c7bc73790..0000000000 --- a/CMake/cdat_modules/gui_support_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(gui_support_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/gui_support_external.cmake b/CMake/cdat_modules/gui_support_external.cmake deleted file mode 100644 index 5d10b82e7b..0000000000 --- a/CMake/cdat_modules/gui_support_external.cmake +++ /dev/null @@ -1,20 +0,0 @@ -set(gui_support_source_dir "${cdat_SOURCE_DIR}/Packages/gui_support") -set(gui_support_binary_dir "${CMAKE_CURRENT_BINARY_DIR}/build/gui_support-build") -set(runtime_library_path ${CMAKE_INSTALL_PREFIX}/lib:${cdat_EXTERNALS}/lib) - -# BUILD_COMMAND env EXTERNALS=${cdat_EXTERNALS} ${LIBRARY_PATH}=${runtime_library_path} ${PYTHON_EXECUTABLE} setup.py build -# INSTALL_COMMAND env EXTERNALS=${cdat_EXTERNALS} ${LIBRARY_PATH}=${runtime_library_path} ${PYTHON_EXECUTABLE} setup.py install --prefix=${CMAKE_INSTALL_PREFIX} -ExternalProject_Add(gui_support - DOWNLOAD_DIR "" - SOURCE_DIR ${gui_support_source_dir} - BINARY_DIR ${gui_support_binary_dir} - BUILD_IN_SOURCE 0 - BUILD_COMMAND "" -# BUILD_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} EXTERNALS=${cdat_EXTERNALS} ${PYTHON_EXECUTABLE} ${gui_support_source_dir}/setup.py build -b ${gui_support_binary_dir} - INSTALL_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} EXTERNALS=${cdat_EXTERNALS} ${PYTHON_EXECUTABLE} ${gui_support_source_dir}/setup.py build -b ${gui_support_binary_dir} install ${PYTHON_EXTRA_PREFIX} - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - DEPENDS ${gui_support_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/gui_support_pkg.cmake b/CMake/cdat_modules/gui_support_pkg.cmake deleted file mode 100644 index f4fcee7d67..0000000000 --- a/CMake/cdat_modules/gui_support_pkg.cmake +++ /dev/null @@ -1,5 +0,0 @@ -set(GUI_SUPPORT_SOURCE N/A) -set(GUI_SUPPORT_VERSION N/A) -set(GUI_SUPPORT_MD5 N/A) - -add_cdat_package_dependent(gui_support "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/h5py_deps.cmake b/CMake/cdat_modules/h5py_deps.cmake deleted file mode 100644 index f1ce2f917d..0000000000 --- a/CMake/cdat_modules/h5py_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(h5py_deps ${python_pkg} ${hdf5_pkg} ${numpy_pkg} ${cython_pkg}) diff --git a/CMake/cdat_modules/h5py_external.cmake b/CMake/cdat_modules/h5py_external.cmake deleted file mode 100644 index 83762f1e60..0000000000 --- a/CMake/cdat_modules/h5py_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# h5py -# -set(h5py_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/h5py") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/h5py_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/h5py_build_step.cmake" - @ONLY -) - -set(h5py_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/h5py_build_step.cmake) - -ExternalProject_Add(h5py - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${h5py_source_dir} - URL ${H5PY_URL}/${H5PY_GZ} - URL_MD5 ${H5PY_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${h5py_build_command} - INSTALL_COMMAND "" - DEPENDS ${h5py_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/h5py_pkg.cmake b/CMake/cdat_modules/h5py_pkg.cmake deleted file mode 100644 index 3a753fc2b2..0000000000 --- a/CMake/cdat_modules/h5py_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(H5PY_MAJOR 2) -set(H5PY_MINOR 5) -set(H5PY_PATCH 0) -set(H5PY_VERSION ${H5PY_MAJOR}.${H5PY_MINOR}.${H5PY_PATCH}) -set(H5PY_URL ${LLNL_URL}) -set(H5PY_GZ h5py-${H5PY_VERSION}.tar.gz) -set(H5PY_MD5 969c78e366e8e86dcd0376d945a72dd0) - -set (nm H5PY) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(H5PY_SOURCE ${H5PY_URL}/${H5PY_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(h5py "" "" ON) -else() - add_cdat_package(h5py "" "" OFF) -endif() diff --git a/CMake/cdat_modules/hdf4_external.cmake b/CMake/cdat_modules/hdf4_external.cmake deleted file mode 100644 index 7b34bef0b7..0000000000 --- a/CMake/cdat_modules/hdf4_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ - -set(HDF4_source "${CMAKE_CURRENT_BINARY_DIR}/HDF4") -set(HDF4_install "${cdat_EXTERNALS}") - -if(NOT CMAKE_Fortran_COMPILER) - set(hdf4_configure_args --disable-fortran) -else() - set(hdf4_configure_args --enable-fortran) -endif() - -ExternalProject_Add(HDF4 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${HDF4_source} - INSTALL_DIR ${HDF4_install} - URL ${HDF4_URL}/${HDF4_GZ} - URL_MD5 ${HDF4_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${hdf4_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${HDF4_deps} -) - -set(HDF4_DIR "${HDF4_binary}" CACHE PATH "HDF4 binary directory" FORCE) -mark_as_advanced(HDF4_DIR) diff --git a/CMake/cdat_modules/hdf5_deps.cmake b/CMake/cdat_modules/hdf5_deps.cmake deleted file mode 100644 index 45a66d741a..0000000000 --- a/CMake/cdat_modules/hdf5_deps.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(HDF5_deps ${pkgconfig_pkg} ${libxml2_pkg} ${libxslt_pkg} ${zlib_pkg}) -if (CDAT_BUILD_PARALLEL) - list(APPEND HDF5_deps ${mpi_pkg}) -endif() diff --git a/CMake/cdat_modules/hdf5_external.cmake b/CMake/cdat_modules/hdf5_external.cmake deleted file mode 100644 index 7f20675f7d..0000000000 --- a/CMake/cdat_modules/hdf5_external.cmake +++ /dev/null @@ -1,40 +0,0 @@ - -set(HDF5_source "${CMAKE_CURRENT_BINARY_DIR}/build/HDF5") -set(HDF5_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/hdf5_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/hdf5_patch_step.cmake - @ONLY) -if (CDAT_BUILD_PARALLEL) - set(hdf5_configure_args "--enable-parallel") - set(hdf5_additional_cflags "-w -fPIC") - set(configure_file "cdatmpi_configure_step.cmake") -else() - set(hdf5_configure_args "") - set(hdf5_additional_cflags "-w") - set(configure_file "cdat_configure_step.cmake") -endif() -# we disable HDF5 warnings because it has way too many of them. -ExternalProject_Add(HDF5 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${HDF5_source} - INSTALL_DIR ${HDF5_install} - URL ${HDF5_URL}/${HDF5_GZ} - URL_MD5 ${HDF5_MD5} - BUILD_IN_SOURCE 1 - #PATCH_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/hdf5_patch_step.cmake - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${hdf5_configure_args} -DADDITIONAL_CFLAGS=${hdf5_additional_cflags} -DADDITIONAL_CPPFPAGS=-w -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/${configure_file} - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - LOG_BUILD 1 - DEPENDS ${HDF5_deps} - ${ep_log_options} -) - -if(WIN32) - set(HDF5_INCLUDE_DIR ${HDF5_install}/include) - set(HDF5_LIBRARY ${HDF5_install}/lib/hdf5dll${_LINK_LIBRARY_SUFFIX}) -else() - set(HDF5_INCLUDE_DIR ${HDF5_install}/include) - set(HDF5_LIBRARY ${HDF5_install}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}) -endif() diff --git a/CMake/cdat_modules/hdf5_pkg.cmake b/CMake/cdat_modules/hdf5_pkg.cmake deleted file mode 100644 index 4599c9c95b..0000000000 --- a/CMake/cdat_modules/hdf5_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(HDF5_MAJOR_SRC 1) -set(HDF5_MINOR_SRC 8) -set(HDF5_PATCH_SRC 15) -set(HDF5_URL ${LLNL_URL}) -set(HDF5_GZ hdf5-${HDF5_MAJOR_SRC}.${HDF5_MINOR_SRC}.${HDF5_PATCH_SRC}.tar.gz) -set(HDF5_MD5 03cccb5b33dbe975fdcd8ae9dc021f24 ) - -set (nm HDF5) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(HDF5_SOURCE ${HDF5_URL}/${HDF5_GZ}) - -add_cdat_package(HDF5 "" "" ON) diff --git a/CMake/cdat_modules/idna_deps.cmake b/CMake/cdat_modules/idna_deps.cmake deleted file mode 100644 index e2aa851a86..0000000000 --- a/CMake/cdat_modules/idna_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(IDNA_deps ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/idna_external.cmake b/CMake/cdat_modules/idna_external.cmake deleted file mode 100644 index a987e968f5..0000000000 --- a/CMake/cdat_modules/idna_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm IDNA) - -include(pipinstaller) diff --git a/CMake/cdat_modules/idna_pkg.cmake b/CMake/cdat_modules/idna_pkg.cmake deleted file mode 100644 index 5bf8539291..0000000000 --- a/CMake/cdat_modules/idna_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(IDNA_MAJOR_SRC 2) -set(IDNA_MINOR_SRC 0) -set(IDNA_PATCH_SRC 0) - -set(IDNA_VERSION ${IDNA_MAJOR_SRC}.${IDNA_MINOR_SRC}) -set(IDNA_GZ idna-${IDNA_VERSION}.tar.gz) -set(IDNA_SOURCE ${LLNL_URL}/${IDNA_GZ}) -set(IDNA_MD5 9ef51e6e51ea91b6c62426856c8a5b7c) - -add_cdat_package_dependent(IDNA "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/ipaddress_deps.cmake b/CMake/cdat_modules/ipaddress_deps.cmake deleted file mode 100644 index ca515655fe..0000000000 --- a/CMake/cdat_modules/ipaddress_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(IPADDRESS_deps ${pip_pkg} ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/ipaddress_external.cmake b/CMake/cdat_modules/ipaddress_external.cmake deleted file mode 100644 index 4773cea4c6..0000000000 --- a/CMake/cdat_modules/ipaddress_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm IPADDRESS) - -include(pipinstaller) diff --git a/CMake/cdat_modules/ipaddress_pkg.cmake b/CMake/cdat_modules/ipaddress_pkg.cmake deleted file mode 100644 index 68ce4f6293..0000000000 --- a/CMake/cdat_modules/ipaddress_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(IPADDRESS_MAJOR_SRC 1) -set(IPADDRESS_MINOR_SRC 0) -set(IPADDRESS_PATCH_SRC 16) - -set(IPADDRESS_VERSION ${IPADDRESS_MAJOR_SRC}.${IPADDRESS_MINOR_SRC}.${IPADDRESS_PATCH_SRC}) -set(IPADDRESS_GZ ipaddress-${IPADDRESS_VERSION}.tar.gz) -set(IPADDRESS_SOURCE ${LLNL_URL}/${IPADDRESS_GZ}) -set(IPADDRESS_MD5 1e27b62aa20f5b6fc200b2bdbf0d0847) - -add_cdat_package_dependent(IPADDRESS "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/ipython_deps.cmake b/CMake/cdat_modules/ipython_deps.cmake deleted file mode 100644 index 0828bdbd98..0000000000 --- a/CMake/cdat_modules/ipython_deps.cmake +++ /dev/null @@ -1,5 +0,0 @@ -if (CDAT_BUILD_ALL) - set(IPYTHON_deps ${pip_pkg} ${tornado_pkg} ${numpy_pkg} ${numexpr_pkg}) -else () - set(IPYTHON_deps ${pip_pkg} ${numpy_pkg}) -endif() diff --git a/CMake/cdat_modules/ipython_external.cmake b/CMake/cdat_modules/ipython_external.cmake deleted file mode 100644 index eab083a8ec..0000000000 --- a/CMake/cdat_modules/ipython_external.cmake +++ /dev/null @@ -1,7 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm IPYTHON) -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/ipython_pkg.cmake b/CMake/cdat_modules/ipython_pkg.cmake deleted file mode 100644 index ce9193f5c0..0000000000 --- a/CMake/cdat_modules/ipython_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(IPYTHON_MAJOR 3) -set(IPYTHON_MINOR 0) -set(IPYTHON_PATCH 0) -set(IPYTHON_VERSION ${IPYTHON_MAJOR}.${IPYTHON_MINOR}.${IPYTHON_PATCH}) -set(IPYTHON_URL ${LLNL_URL}) -set(IPYTHON_GZ ipython-${IPYTHON_VERSION}.tar.gz) -set(IPYTHON_MD5 b3f00f3c0be036fafef3b0b9d663f27e) -set(IPYTHON_SOURCE ${IPYTHON_URL}/${IPYTHON_GZ}) - -add_cdat_package(IPYTHON "" "" ON) diff --git a/CMake/cdat_modules/jasper_deps.cmake b/CMake/cdat_modules/jasper_deps.cmake deleted file mode 100644 index 4e51869526..0000000000 --- a/CMake/cdat_modules/jasper_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(jasper_deps ${pkgconfig_pkg} ${jpeg_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/jasper_external.cmake b/CMake/cdat_modules/jasper_external.cmake deleted file mode 100644 index 81c9f5f8d5..0000000000 --- a/CMake/cdat_modules/jasper_external.cmake +++ /dev/null @@ -1,20 +0,0 @@ - -set(jasper_source "${CMAKE_CURRENT_BINARY_DIR}/build/jasper") -set(jasper_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/jasper_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/jasper_configure_step.cmake - @ONLY) - -ExternalProject_Add(jasper - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${jasper_source} - INSTALL_DIR ${jasper_install} - URL ${JASPER_URL}/${JASPER_GZ} - URL_MD5 ${JASPER_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/jasper_configure_step.cmake - DEPENDS ${jasper_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/jasper_pkg.cmake b/CMake/cdat_modules/jasper_pkg.cmake deleted file mode 100644 index a4f8987232..0000000000 --- a/CMake/cdat_modules/jasper_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(JASPER_MAJOR 1) -set(JASPER_MINOR 900) -set(JASPER_PATCH 1) -set(JASPER_VERSION ${JASPER_MAJOR}.${JASPER_MINOR}.${JASPER_PATCH}) -set(JASPER_URL ${LLNL_URL}) -set(JASPER_GZ jasper-${JASPER_VERSION}.tgz) -set(JASPER_MD5 b5ae85050d034555790a3ccbc2522860) -set(JASPER_SOURCE ${JASPER_URL}/${JASPER_GZ}) - -add_cdat_package(jasper "" "" ON) diff --git a/CMake/cdat_modules/jinja2_deps.cmake b/CMake/cdat_modules/jinja2_deps.cmake deleted file mode 100644 index a8047b98ab..0000000000 --- a/CMake/cdat_modules/jinja2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(jinja2_deps ${pip_pkg} ${markupsafe_pkg}) diff --git a/CMake/cdat_modules/jinja2_external.cmake b/CMake/cdat_modules/jinja2_external.cmake deleted file mode 100644 index a50b6c79fb..0000000000 --- a/CMake/cdat_modules/jinja2_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm jinja2) - -include(pipinstaller) diff --git a/CMake/cdat_modules/jinja2_pkg.cmake b/CMake/cdat_modules/jinja2_pkg.cmake deleted file mode 100644 index ffabe31e3d..0000000000 --- a/CMake/cdat_modules/jinja2_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(jinja2_MAJOR_SRC 2) -set(jinja2_MINOR_SRC 7) -set(jinja2_PATCH_SRC ) -set(JINJA2_VERSION ${jinja2_MAJOR_SRC}.${jinja2_MINOR_SRC}) -set(JINJA2_GZ Jinja2-${JINJA2_VERSION}.tar.gz) -set(JINJA2_SOURCE ${LLNL_URL}/${JINJA2_GZ}) -set(JINJA2_MD5 c2fb12cbbb523c57d3d15bfe4dc0e8fe ) - -add_cdat_package_dependent(jinja2 "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/jpeg_deps.cmake b/CMake/cdat_modules/jpeg_deps.cmake deleted file mode 100644 index e7e6b16ba9..0000000000 --- a/CMake/cdat_modules/jpeg_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(jpeg_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/jpeg_external.cmake b/CMake/cdat_modules/jpeg_external.cmake deleted file mode 100644 index e5a6f62bff..0000000000 --- a/CMake/cdat_modules/jpeg_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ - -set(jpeg_source "${CMAKE_CURRENT_BINARY_DIR}/build/jpeg") -set(jpeg_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/jpeg_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/jpeg_install_step.cmake - @ONLY) - -set(jpeg_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/jpeg_install_step.cmake) - -ExternalProject_Add(jpeg - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${jpeg_source} - INSTALL_DIR ${jpeg_install} - URL ${JPEG_URL}/${JPEG_GZ} - URL_MD5 ${JPEG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - INSTALL_COMMAND ${jpeg_INSTALL_COMMAND} - DEPENDS ${jpeg_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/jpeg_pkg.cmake b/CMake/cdat_modules/jpeg_pkg.cmake deleted file mode 100644 index c30e433f7f..0000000000 --- a/CMake/cdat_modules/jpeg_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(JPEG_URL ${LLNL_URL}) -set(JPEG_GZ jpegsrc.v8c.tar.gz) -set(JPEG_MD5 a2c10c04f396a9ce72894beb18b4e1f9) - -set(JPEG_VERSION v8c) -set(JPEG_SOURCE ${JPEG_URL}/${JPEG_GZ}) - -#grib2/jasper need this therefore cdms2 can't turn off -#if (CDAT_BUILD_GRAPHICS) -add_cdat_package(jpeg "" "" OFF) -#endif() - diff --git a/CMake/cdat_modules/lapack_deps.cmake b/CMake/cdat_modules/lapack_deps.cmake deleted file mode 100644 index cc81746999..0000000000 --- a/CMake/cdat_modules/lapack_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(LAPACK_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/lapack_external.cmake b/CMake/cdat_modules/lapack_external.cmake deleted file mode 100644 index a8b3f6139e..0000000000 --- a/CMake/cdat_modules/lapack_external.cmake +++ /dev/null @@ -1,26 +0,0 @@ -# The LAPACK external project - -set(lapack_source "${CMAKE_CURRENT_BINARY_DIR}/build/LAPACK") -set(lapack_binary "${CMAKE_CURRENT_BINARY_DIR}/build/LAPACK-build") -set(lapack_install "${cdat_EXTERNALS}") -set(NUMPY_LAPACK_binary ${lapack_binary}) - -ExternalProject_Add(LAPACK - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${lapack_source} - BINARY_DIR ${lapack_binary} - INSTALL_DIR ${lapack_install} - URL ${LAPACK_URL}/${LAPACK_GZ} - URL_MD5 ${LAPACK_MD5} - CMAKE_ARGS - -DCMAKE_Fortran_COMPILER:FILEPATH=${CMAKE_Fortran_COMPILER} - -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags} - -DBUILD_SHARED_LIBS:BOOL=ON - -DENABLE_TESTING:BOOL=OFF - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - DEPENDS ${LAPACK_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/lapack_pkg.cmake b/CMake/cdat_modules/lapack_pkg.cmake deleted file mode 100644 index 3f5b9b81e8..0000000000 --- a/CMake/cdat_modules/lapack_pkg.cmake +++ /dev/null @@ -1,20 +0,0 @@ -set(LAPACK_MAJOR_SRC 3) -set(LAPACK_MINOR_SRC 4) -set(LAPACK_PATCH_SRC 2) - -set(LAPACK_URL ${LLNL_URL}) -set(LAPACK_GZ lapack-${LAPACK_MAJOR_SRC}.${LAPACK_MINOR_SRC}.${LAPACK_PATCH_SRC}.tgz) -set(LAPACK_MD5 61bf1a8a4469d4bdb7604f5897179478 ) - -set (nm LAPACK) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) - -#Removing apple exclusion for now -set(LAPACK_SOURCE ${LAPACK_URL}/${LAPACK_GZ}) - -if(NOT APPLE) - if(CMAKE_Fortran_COMPILER) - add_cdat_package(LAPACK "" "" OFF) - endif() -endif() diff --git a/CMake/cdat_modules/lats_deps.cmake b/CMake/cdat_modules/lats_deps.cmake deleted file mode 100644 index 4f7aee7aa5..0000000000 --- a/CMake/cdat_modules/lats_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(lats_deps ${netcdf_pkg}) diff --git a/CMake/cdat_modules/lats_external.cmake b/CMake/cdat_modules/lats_external.cmake deleted file mode 100644 index 519fc3a2e6..0000000000 --- a/CMake/cdat_modules/lats_external.cmake +++ /dev/null @@ -1,44 +0,0 @@ - -set(lats_source "${CMAKE_CURRENT_BINARY_DIR}/build/lats") -set(lats_install "${cdat_EXTERNALS}") - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lats_Makefile.gfortran.in - ${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile - ) - -if(DEFINED GIT_CMD_STR_LATS ) - message("[INFO] [lats] Installing ${nm} from ${GIT_CMD_STR_LATS}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [lats] Installed ${nm} from tarball ${LATS_GZ}") - set(URL_STR URL ${LATS_URL}/${LATS_GZ}) - set(URL_MD5_STR URL_MD5 ${LATS_MD5}) - set(GIT_CMD_STR_LATS ) - set(GIT_TAG ) -endif() -set(LATS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile) -set(LATS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile^^install) -set(LATS_BUILD_ARGS -fPIC) - -ExternalProject_Add(lats - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${lats_source} - INSTALL_DIR ${lats_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_LATS} - ${GIT_TAG} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LATS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LATS_MAKE_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LATS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LATS_MAKE_INSTALL_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${lats_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_LATS) - unset(GIT_CMD_STR_LATS) -endif() diff --git a/CMake/cdat_modules/lats_pkg.cmake b/CMake/cdat_modules/lats_pkg.cmake deleted file mode 100644 index 545c0fe099..0000000000 --- a/CMake/cdat_modules/lats_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(LATS_VERSION 1.0.0) -set(LATS_BRANCH master) -set(LATS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/lats.git ) - -set(GIT_CMD_STR_LATS GIT_REPOSITORY ${LATS_REPOSITORY}) -set(GIT_TAG GIT_TAG "${LATS_BRANCH}") - -if (CDAT_BUILD_PCMDI) - add_cdat_package(lats "" "" ON) -endif() diff --git a/CMake/cdat_modules/lepl_deps.cmake b/CMake/cdat_modules/lepl_deps.cmake deleted file mode 100644 index 0643a85404..0000000000 --- a/CMake/cdat_modules/lepl_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(lepl_deps ${cdat_pkg} ${numexpr_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/lepl_external.cmake b/CMake/cdat_modules/lepl_external.cmake deleted file mode 100644 index 80b680e29f..0000000000 --- a/CMake/cdat_modules/lepl_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm lepl) - -include(pipinstaller) diff --git a/CMake/cdat_modules/lepl_pkg.cmake b/CMake/cdat_modules/lepl_pkg.cmake deleted file mode 100644 index 9551ef522f..0000000000 --- a/CMake/cdat_modules/lepl_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(lepl_MAJOR_SRC 5) -set(lepl_MINOR_SRC 1) -set(lepl_PATCH_SRC 3) - -set (nm lepl) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(LEPL_GZ LEPL-${LEPL_VERSION}.tar.gz) -set(LEPL_SOURCE ${LLNL_URL}/${LEPL_GZ}) -set(LEPL_MD5 5f653984c57ad8efad828c5153660743 ) - -add_cdat_package_dependent(lepl "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/libcdms_deps.cmake b/CMake/cdat_modules/libcdms_deps.cmake deleted file mode 100644 index 904227d3b5..0000000000 --- a/CMake/cdat_modules/libcdms_deps.cmake +++ /dev/null @@ -1,7 +0,0 @@ -set(libcdms_deps ${netcdf_pkg} - ${jasper_pkg} ${g2clib_pkg} ${tiff_pkg} - ${png_pkg} ${jpeg_pkg} ) -if (CDAT_BUILD_LIBDRS) - message("[INFO] ADDING LIBDRS TO LIBCDMS DEPNDENCIES") - LIST(APPEND libcdms_deps ${libdrs_pkg}) -endif() diff --git a/CMake/cdat_modules/libcdms_external.cmake b/CMake/cdat_modules/libcdms_external.cmake deleted file mode 100644 index 2ed64475c8..0000000000 --- a/CMake/cdat_modules/libcdms_external.cmake +++ /dev/null @@ -1,56 +0,0 @@ -set(libcdms_source "${CMAKE_CURRENT_BINARY_DIR}/build/libcdms") -set(libcdms_install "${cdat_EXTERNALS}") - -if(APPLE) - set(WITHPNGLIB "/usr/X11R6/lib") -else() - set(WITHPNGLIB "no") -endif() - -if (CDAT_BUILD_LIBDRS) - message("[INFO] ENBLING DRS IN CDMS") - set(drs_opt --enable-drs^^--with-drslib=${cdat_EXTERNALS}/lib^^--with-drsinc=${cdat_EXTERNALS}/include^^--with-drsincf=${cdat_EXTERNALS}/include) -else() - set(drs_opt "") -endif() - -set(CONFIGURE_ARGS --srcdir=${libcdms_source}^^--enable-dap^^${drs_opt}^^--enable-hdf=no^^--enable-pp=yes^^--enable-ql=no^^--cache-file=/dev/null^^--prefix=${libcdms_install}^^--with-nclib=${cdat_EXTERNALS}/lib^^--with-ncinc=${cdat_EXTERNALS}/include^^--with-daplib=/lib^^--with-dapinc=/include^^--with-hdfinc=./include^^--with-hdflib=./lib^^--with-hdf5lib=${cdat_EXTERNALS}/lib^^--with-pnglib=${WITHPNGLIB}^^--with-grib2lib=${cdat_EXTERNALS}/lib^^--with-jasperlib=${cdat_EXTERNALS}/lib^^--with-grib2inc=${cdat_EXTERNALS}/include^^--enable-grib2) -file(MAKE_DIRECTORY ${cdat_EXTERNALS}/man/man3) - - -if(DEFINED GIT_CMD_STR_LIBCDMS ) - message("[INFO] [libcdms] Installing ${nm} from ${GIT_CMD_STR_LIBCDMS}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [libcdms] Installed ${nm} from tarball ${LIBCDMS_GZ}") - set(URL_STR URL ${LIBCDMS_URL}/${LIBCDMS_GZ}) - set(URL_MD5_STR URL_MD5 ${LIBCDMS_MD5}) - set(GIT_CMD_STR_LIBCDMS ) - set(GIT_TAG_LIBCDMS ) -endif() -set(LIBCDMS_MAKE_ARGS -j1) -set(LIBCDMS_BUILD_ARGS -fPIC) -ExternalProject_Add(libcdms - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libcdms_source} - INSTALL_DIR ${libcdms_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_LIBCDMS} - ${GIT_TAG_LIBCDMS} - PATCH_COMMAND ${CMAKE_COMMAND} -E remove /zconf.h - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBCDMS_BUILD_ARGS} -DCONFIGURE_ARGS=${CONFIGURE_ARGS} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBCDMS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBCDMS_MAKE_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${libcdms_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR) - unset(GIT_CMD_STR) -endif() -if (DEFINED GIT_CMD_STR_LIBCDMS) - unset(GIT_CMD_STR_LIBCDMS) -endif() diff --git a/CMake/cdat_modules/libcdms_pkg.cmake b/CMake/cdat_modules/libcdms_pkg.cmake deleted file mode 100644 index eb7a722eba..0000000000 --- a/CMake/cdat_modules/libcdms_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(LIBCDMS_VERSION 1.0.0) -set(LIBCDMS_URL ${LLNL_URL}) -set(LIBCDMS_GZ libcdms-${LIBCDMS_VERSION}.tar.gz) -set(LIBCDMS_SOURCE ${LIBCDMS_URL}/${LIBCDMS_GZ}) -set(LIBCDMS_MD5 ce71f54616f755d67fbbb6c81ca4fd62) -set(LIBCDMS_BRANCH master) -set(LIBCDMS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libcdms.git ) - -set(GIT_CMD_STR_LIBCDMS GIT_REPOSITORY ${LIBCDMS_REPOSITORY}) -set(GIT_TAG_LIBCDMS GIT_TAG "${LIBCDMS_BRANCH}") - -add_cdat_package(libcdms "" "" OFF) diff --git a/CMake/cdat_modules/libcf_deps.cmake b/CMake/cdat_modules/libcf_deps.cmake deleted file mode 100644 index 5673f4b88a..0000000000 --- a/CMake/cdat_modules/libcf_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libcf_deps ${pkgconfig_pkg} ${python_pkg} ${netcdf_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${uuid_pkg} ${clapack_pkg} ${lapack_pkg} ) diff --git a/CMake/cdat_modules/libcf_external.cmake b/CMake/cdat_modules/libcf_external.cmake deleted file mode 100644 index 9eeca6839e..0000000000 --- a/CMake/cdat_modules/libcf_external.cmake +++ /dev/null @@ -1,29 +0,0 @@ - -set(libcf_source "${CMAKE_CURRENT_BINARY_DIR}/build/libcf") -set(libcf_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/libcf_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/libcf_make_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/libcf_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/libcf_install_step.cmake - @ONLY) - -set(libcf_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/libcf_make_step.cmake) -set(libcf_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/libcf_install_step.cmake) - -ExternalProject_Add(libcf - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libcf_source} - INSTALL_DIR ${libcf_install} - URL ${LIBCF_URL}/${LIBCF_GZ} - URL_MD5 ${LIBCF_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - PATCH_COMMAND "" - BUILD_COMMAND ${libcf_build_command} - INSTALL_COMMAND ${libcf_install_command} - DEPENDS ${libcf_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/libcf_pkg.cmake b/CMake/cdat_modules/libcf_pkg.cmake deleted file mode 100644 index eed8c34975..0000000000 --- a/CMake/cdat_modules/libcf_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(LIBCF_MAJOR 1) -set(LIBCF_MINOR 0) -set(LIBCF_PATCH beta11) -set(LIBCF_VERSION ${LIBCF_MAJOR}.${LIBCF_MINOR}-${LIBCF_PATCH}) -set(LIBCF_URL ${LLNL_URL}) -set(LIBCF_GZ libcf-${LIBCF_VERSION}.tar.gz) -set(LIBCF_MD5 aba4896eab79d36c7283fc7b75fb16ee) -set(LIBCF_SOURCE ${LIBCF_URL}/${LIBCF_GZ}) - -add_cdat_package_dependent(libcf "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/libdrs_deps.cmake b/CMake/cdat_modules/libdrs_deps.cmake deleted file mode 100644 index 1958c1f35f..0000000000 --- a/CMake/cdat_modules/libdrs_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libdrs_deps ${netcdf_pkg} ${g2clib_pkg}) diff --git a/CMake/cdat_modules/libdrs_external.cmake b/CMake/cdat_modules/libdrs_external.cmake deleted file mode 100644 index f1b2896789..0000000000 --- a/CMake/cdat_modules/libdrs_external.cmake +++ /dev/null @@ -1,49 +0,0 @@ -set(libdrs_source "${CMAKE_CURRENT_BINARY_DIR}/build/libdrs") -set(libdrs_install "${cdat_EXTERNALS}") - -if (APPLE) - set(libdrs_make_file libdrs_Makefile.Mac.gfortran.in) -else () - set(libdrs_make_file libdrs_Makefile.Linux.gfortran.in) -endif () - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/${libdrs_make_file} - ${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile - ) - -if(DEFINED GIT_CMD_STR_LIBDRS ) - message("[INFO] [libdrs] Installing ${nm} from ${GIT_CMD_STR_LIBDRS}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [libdrs] Installed ${nm} from tarball ${LIBDRS_GZ}") - set(URL_STR URL ${LIBDRS_URL}/${LIBDRS_GZ}) - set(URL_MD5_STR URL_MD5 ${LIBDRS_MD5}) - set(GIT_CMD_STR_LIBDRS ) - set(GIT_TAG ) -endif() -set(LIBDRS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile) -set(LIBDRS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile^^install) -set(LIBDRS_BUILD_ARGS -fPIC) - -ExternalProject_Add(libdrs - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libdrs_source} - INSTALL_DIR ${libdrs_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_LIBDRS} - ${GIT_TAG} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_ARGS} -DWORKING_DIR=/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_INSTALL_ARGS} -DWORKING_DIR=/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${libdrs_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_LIBDRS) - unset(GIT_CMD_STR_LIBDRS) -endif() diff --git a/CMake/cdat_modules/libdrs_pkg.cmake b/CMake/cdat_modules/libdrs_pkg.cmake deleted file mode 100644 index 6258a08d7c..0000000000 --- a/CMake/cdat_modules/libdrs_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(LIBDRS_VERSION 1.0.0) -set(LIBDRS_URL ${LLNL_URL}) -set(LIBDRS_BRANCH master) -set(LIBDRS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libdrs.git ) - -set(GIT_CMD_STR_LIBDRS GIT_REPOSITORY ${LIBDRS_REPOSITORY}) -set(GIT_TAG GIT_TAG "${LIBDRS_BRANCH}") -if (CDAT_BUILD_PCMDI) - set(CDAT_BUILD_LIBDRS ON) - add_cdat_package(libdrs "" "" ON) -endif() diff --git a/CMake/cdat_modules/libdrsfortran_deps.cmake b/CMake/cdat_modules/libdrsfortran_deps.cmake deleted file mode 100644 index c5db76f4b4..0000000000 --- a/CMake/cdat_modules/libdrsfortran_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libdrsfortran_deps ${netcdf_pkg} ${g2clib_pkg} ${libcdms_pkg} ${libdrs_pkg}) diff --git a/CMake/cdat_modules/libdrsfortran_external.cmake b/CMake/cdat_modules/libdrsfortran_external.cmake deleted file mode 100644 index ba6d738a4c..0000000000 --- a/CMake/cdat_modules/libdrsfortran_external.cmake +++ /dev/null @@ -1,46 +0,0 @@ -set(libdrs_source "${CMAKE_CURRENT_BINARY_DIR}/build/libdrs") -set(libdrs_install "${cdat_EXTERNALS}") - -set(libdrsfortran_make_file libdrs_Makefile.Mac.fwrap.gfortran.in) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/${libdrsfortran_make_file} - ${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile - ) - -if(DEFINED GIT_CMD_STR_LIBDRSFORTRAN ) - message("[INFO] [libdrs] Installing ${nm} from ${GIT_CMD_STR_LIBDRSFORTRAN}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [libdrs] Installed ${nm} from tarball ${LIBDRSFORTRAN_GZ}") - set(URL_STR URL ${LIBDRSFORTRAN_URL}/${LIBDRSFORTRAN_GZ}) - set(URL_MD5_STR URL_MD5 ${LIBDRSFORTRAN_MD5}) - set(GIT_CMD_STR_LIBDRS ) - set(GIT_TAG ) -endif() - -set(LIBDRS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile) -set(LIBDRS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile^^install) -set(LIBDRS_BUILD_ARGS -fPIC) - -ExternalProject_Add(libdrsfortran - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libdrs_source} - INSTALL_DIR ${libdrs_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_LIBDRSFORTRAN} - ${GIT_TAG} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_ARGS} -DWORKING_DIR=/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_INSTALL_ARGS} -DWORKING_DIR=/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${libdrsfortran_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_LIBDRS) - unset(GIT_CMD_STR_LIBDRS) -endif() diff --git a/CMake/cdat_modules/libdrsfortran_pkg.cmake b/CMake/cdat_modules/libdrsfortran_pkg.cmake deleted file mode 100644 index 23e8e34a4a..0000000000 --- a/CMake/cdat_modules/libdrsfortran_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(LIBDRSFORTRAN_VERSION 1.0.0) -set(LIBDRSFORTRAN_URL ${LLNL_URL}) -set(LIBDRSFORTRAN_BRANCH master) -set(LIBDRSFORTRAN_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libdrs.git ) - -set(GIT_CMD_STR_LIBDRSFORTRAN GIT_REPOSITORY ${LIBDRSFORTRAN_REPOSITORY}) -set(GIT_TAG GIT_TAG "${LIBDRSFORTRAN_BRANCH}") -if (CDAT_BUILD_PCMDI) - if (APPLE) - set(CDAT_BUILD_LIBDRSFORTRAN ON) - add_cdat_package(libdrsfortran "" "" ON) - endif() -endif() diff --git a/CMake/cdat_modules/libxml2_deps.cmake b/CMake/cdat_modules/libxml2_deps.cmake deleted file mode 100644 index cd79834e8f..0000000000 --- a/CMake/cdat_modules/libxml2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libXML2_deps ${pkgconfig_pkg} ${readline_pkg}) diff --git a/CMake/cdat_modules/libxml2_external.cmake b/CMake/cdat_modules/libxml2_external.cmake deleted file mode 100644 index 59216b6b5a..0000000000 --- a/CMake/cdat_modules/libxml2_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ - -set(libXML2_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXML2") -set(libXML2_install "${cdat_EXTERNALS}") - -ExternalProject_Add(libXML2 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libXML2_source} - INSTALL_DIR ${libXML2_install} - URL ${XML_URL}/${XML_GZ} - URL_MD5 ${XML_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${libXML2_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/libxml2_pkg.cmake b/CMake/cdat_modules/libxml2_pkg.cmake deleted file mode 100644 index fd2f57ad28..0000000000 --- a/CMake/cdat_modules/libxml2_pkg.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(XML_MAJOR 2) -set(XML_MINOR 7) -set(XML_PATCH 8) -set(XML_MAJOR_SRC 2) -set(XML_MINOR_SRC 7) -set(XML_PATCH_SRC 8) -set(XML_URL ${LLNL_URL}) -set(XML_GZ libxml2-${XML_MAJOR_SRC}.${XML_MINOR_SRC}.${XML_PATCH_SRC}.tar.gz) -set(XML_MD5 8127a65e8c3b08856093099b52599c86) - -set (nm XML) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(LIBXML2_VERSION ${XML_VERSION}) -set(LIBXML2_SOURCE ${XML_URL}/${XML_GZ} ) -set(LIBXML2_MD5 ${XML_MD5}) - -add_cdat_package(libXML2 "" "Bulid libxml2" OFF) - diff --git a/CMake/cdat_modules/libxslt_deps.cmake b/CMake/cdat_modules/libxslt_deps.cmake deleted file mode 100644 index 31ab3ff7fa..0000000000 --- a/CMake/cdat_modules/libxslt_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libXSLT_deps ${pkgconfig_pkg} ${readline_pkg} ${libxml2_pkg}) diff --git a/CMake/cdat_modules/libxslt_external.cmake b/CMake/cdat_modules/libxslt_external.cmake deleted file mode 100644 index 2064cf209d..0000000000 --- a/CMake/cdat_modules/libxslt_external.cmake +++ /dev/null @@ -1,20 +0,0 @@ - -set(libXSLT_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXSLT") -set(libXSLT_install "${cdat_EXTERNALS}") - -if(NOT LIBXML2_FOUND) - set(libXSLT_configure_args --with-libxml-prefix=${libXSLT_install}) -endif() - -ExternalProject_Add(libXSLT - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libXSLT_source} - INSTALL_DIR ${libXSLT_install} - URL ${XSLT_URL}/${XSLT_GZ} - URL_MD5 ${XSLT_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${libXSLT_configure_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${libXSLT_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/libxslt_pkg.cmake b/CMake/cdat_modules/libxslt_pkg.cmake deleted file mode 100644 index d763d76ba1..0000000000 --- a/CMake/cdat_modules/libxslt_pkg.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(XSLT_MAJOR 1) -set(XSLT_MINOR 1) -set(XSLT_PATCH 22) -set(XSLT_MAJOR_SRC 1) -set(XSLT_MINOR_SRC 1) -set(XSLT_PATCH_SRC 26) -set(XSLT_URL ${LLNL_URL}) -set(XSLT_GZ libxslt-${XSLT_MAJOR_SRC}.${XSLT_MINOR_SRC}.${XSLT_PATCH_SRC}.tar.gz) -set(XSLT_MD5 e61d0364a30146aaa3001296f853b2b9) - -set (nm XSLT) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(LIBXSLT_VERSION ${XSLT_VERSION}) -set(LIBXSLT_SOURCE ${XSLT_URL}/${XSLT_GZ}) -set(LIBXSLT_MD5 ${XSLT_MD5}) - -add_cdat_package(libXSLT "" "Build xslt" OFF) - diff --git a/CMake/cdat_modules/lxml_deps.cmake b/CMake/cdat_modules/lxml_deps.cmake deleted file mode 100644 index 52670d8f93..0000000000 --- a/CMake/cdat_modules/lxml_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(lxml_deps ${cython_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/lxml_external.cmake b/CMake/cdat_modules/lxml_external.cmake deleted file mode 100644 index 3b8a91e151..0000000000 --- a/CMake/cdat_modules/lxml_external.cmake +++ /dev/null @@ -1,26 +0,0 @@ -# create an external project to install lxml, -# and configure and build it -set(LXML_SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/lxml) -set(LXML_BINARY_DIR ${LXML_SOURCE_DIR}) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lxml_build_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/lxml_build_step.cmake @ONLY) -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lxml_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/lxml_install_step.cmake @ONLY) - -set(LXML_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/lxml_build_step.cmake) -set(LXML_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/lxml_install_step.cmake) - -ExternalProject_Add(lxml - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${LXML_SOURCE_DIR} - URL ${LXML_URL}/${LXML_GZ} - URL_MD5 ${LXML_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${LXML_BUILD_COMMAND} - INSTALL_COMMAND ${LXML_INSTALL_COMMAND} - # INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${lxml_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/lxml_pkg.cmake b/CMake/cdat_modules/lxml_pkg.cmake deleted file mode 100644 index df4fb236d7..0000000000 --- a/CMake/cdat_modules/lxml_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(LXML_MAJOR_SRC 2) -set(LXML_MINOR_SRC 3) -set(LXML_PATCH_SRC 5) -set(LXML_URL ${LLNL_URL}) -set(LXML_GZ lxml-${LXML_MAJOR_SRC}.${LXML_MINOR_SRC}.${LXML_PATCH_SRC}.tar.gz) -set(LXML_MD5 730bb63383528b65eaa099d64ce276cf) - -set (nm LXML) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(LXML_SOURCE ${LXML_URL}/${LXML_GZ}) - -add_cdat_package_dependent(lxml "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/markupsafe_deps.cmake b/CMake/cdat_modules/markupsafe_deps.cmake deleted file mode 100644 index 2b76bd653b..0000000000 --- a/CMake/cdat_modules/markupsafe_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(markupsafe_deps ${pip_pkg} ${pygments_pkg}) diff --git a/CMake/cdat_modules/markupsafe_external.cmake b/CMake/cdat_modules/markupsafe_external.cmake deleted file mode 100644 index 9ea130d01b..0000000000 --- a/CMake/cdat_modules/markupsafe_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm markupsafe) - -include(pipinstaller) diff --git a/CMake/cdat_modules/markupsafe_pkg.cmake b/CMake/cdat_modules/markupsafe_pkg.cmake deleted file mode 100644 index b4e664655b..0000000000 --- a/CMake/cdat_modules/markupsafe_pkg.cmake +++ /dev/null @@ -1,8 +0,0 @@ -set(markupsafe_MAJOR_SRC 0) -set(markupsafe_MINOR_SRC 18) -set(markupsafe_PATCH_SRC ) -set(MARKUPSAFE_VERSION ${markupsafe_MAJOR_SRC}.${markupsafe_MINOR_SRC}) -set(MARKUPSAFE_GZ MarkupSafe-${MARKUPSAFE_VERSION}.tar.gz) -set(MARKUPSAFE_SOURCE ${LLNL_URL}/${MARKUPSAFE_GZ}) -set(MARKUPSAFE_MD5 f8d252fd05371e51dec2fe9a36890687) -add_cdat_package_dependent(markupsafe "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/matplotlib_deps.cmake b/CMake/cdat_modules/matplotlib_deps.cmake deleted file mode 100644 index 794a6a4766..0000000000 --- a/CMake/cdat_modules/matplotlib_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Matplotlib_deps ${pyqt_pkg} ${freetype_pkg} ${cairo_pkg} ${numpy_pkg} ${png_pkg} ${six_pkg} ${dateutils_pkg} ${pyparsing_pkg} ${cycler_pkg}) diff --git a/CMake/cdat_modules/matplotlib_external.cmake b/CMake/cdat_modules/matplotlib_external.cmake deleted file mode 100644 index 8cbbd53f66..0000000000 --- a/CMake/cdat_modules/matplotlib_external.cmake +++ /dev/null @@ -1,38 +0,0 @@ -# Matplotlib -# -set(matplotlib_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/Matplotlib") - -if(CDAT_BUILD_GUI) - set(MATPLOTLIB_BACKEND "Qt4Agg") -else() - set(MATPLOTLIB_BACKEND "Agg") -endif() - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/matplotlib_patch_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/matplotlib_patch_step.cmake" - @ONLY -) - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/matplotlib_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/matplotlib_build_step.cmake" - @ONLY -) - -set(matplotlib_patch_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/matplotlib_patch_step.cmake) -set(matplotlib_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/matplotlib_build_step.cmake) - -ExternalProject_Add(Matplotlib - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${matplotlib_source_dir} - URL ${MATPLOTLIB_URL}/${MATPLOTLIB_GZ} - URL_MD5 ${MATPLOTLIB_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - PATCH_COMMAND ${matplotlib_patch_command} - BUILD_COMMAND ${matplotlib_build_command} - INSTALL_COMMAND "" - DEPENDS ${Matplotlib_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/matplotlib_pkg.cmake b/CMake/cdat_modules/matplotlib_pkg.cmake deleted file mode 100644 index 365a67c932..0000000000 --- a/CMake/cdat_modules/matplotlib_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(MATPLOTLIB_MAJOR_MIN 1) -set(MATPLOTLIB_MINOR_MIN 1) -set(MATPLOTLIB_PATCH_MIN 0) -set(MATPLOTLIB_MAJOR 1) -set(MATPLOTLIB_MINOR 5) -set(MATPLOTLIB_PATCH 1) -set(MATPLOTLIB_VERSION ${MATPLOTLIB_MAJOR}.${MATPLOTLIB_MINOR}.${MATPLOTLIB_PATCH}) -set(MATPLOTLIB_URL ${LLNL_URL}) -set(MATPLOTLIB_GZ matplotlib-${MATPLOTLIB_VERSION}.tar.gz) -set(MATPLOTLIB_MD5 b22dc4962f36aab919a7125b3b35953b) - -set(nm MATPLOTLIB) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(MATPLOTLIB_SOURCE ${MATPLOTLIB_URL}/${MATPLOTLIB_GZ}) - -add_cdat_package_dependent(Matplotlib "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/mccabe_deps.cmake b/CMake/cdat_modules/mccabe_deps.cmake deleted file mode 100644 index 1d322a3534..0000000000 --- a/CMake/cdat_modules/mccabe_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(mccabe_deps ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/mccabe_external.cmake b/CMake/cdat_modules/mccabe_external.cmake deleted file mode 100644 index 79e6561e59..0000000000 --- a/CMake/cdat_modules/mccabe_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ -ExternalProject_Add(mccabe - DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}" - SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/mccabe" - URL "${MCCABE_SOURCE}" - URL_MD5 ${MCCABE_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build - INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${mccabe_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/mccabe_pkg.cmake b/CMake/cdat_modules/mccabe_pkg.cmake deleted file mode 100644 index e2e3795a4e..0000000000 --- a/CMake/cdat_modules/mccabe_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(nm mccabe) -string(TOUPPER ${nm} uc_nm) - -set(${uc_nm}_MAJOR 0) -set(${uc_nm}_MINOR 3) -set(${uc_nm}_PATCH 1) -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_URL ${LLNL_URL}) -set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz) -set(${uc_nm}_MD5 9a1570c470ff5db678cc0c03d5c0c237 ) - -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ}) - -if(BUILD_TESTING) - add_cdat_package(${nm} "" "" ON) -endif() diff --git a/CMake/cdat_modules/md5_deps.cmake b/CMake/cdat_modules/md5_deps.cmake deleted file mode 100644 index 3ba1ef5977..0000000000 --- a/CMake/cdat_modules/md5_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(MD5_deps) diff --git a/CMake/cdat_modules/md5_external.cmake b/CMake/cdat_modules/md5_external.cmake deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/CMake/cdat_modules/md5_pkg.cmake b/CMake/cdat_modules/md5_pkg.cmake deleted file mode 100644 index a4ac90144c..0000000000 --- a/CMake/cdat_modules/md5_pkg.cmake +++ /dev/null @@ -1,3 +0,0 @@ -find_package(MD5) -set(MD5PRG ${MD5_EXECUTABLE}) -set(MD5CNT 1) diff --git a/CMake/cdat_modules/mpi4py_deps.cmake b/CMake/cdat_modules/mpi4py_deps.cmake deleted file mode 100644 index cbba65f4c1..0000000000 --- a/CMake/cdat_modules/mpi4py_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Mpi4py_deps ${numpy_pkg} ${mpi_pkg}) diff --git a/CMake/cdat_modules/mpi4py_external.cmake b/CMake/cdat_modules/mpi4py_external.cmake deleted file mode 100644 index 4c1484d292..0000000000 --- a/CMake/cdat_modules/mpi4py_external.cmake +++ /dev/null @@ -1,50 +0,0 @@ -# The Mpi4py project - -set(mpi4py_binary "${CMAKE_CURRENT_BINARY_DIR}/build/Mpi4py") - -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/mpi4py_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/mpi4py_make_step.cmake @ONLY) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/mpi4py_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/mpi4py_install_step.cmake @ONLY) - -set(mpi4py_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/mpi4py_make_step.cmake) -set(mpi4py_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/mpi4py_install_step.cmake) - -set(Mpi4py_source "${CMAKE_CURRENT_BINARY_DIR}/build/Mpi4py") - -# create an external project to download numpy, -# and configure and build it -ExternalProject_Add(Mpi4py - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Mpi4py_source} - URL ${MPI4PY_URL}/${MPI4PY_GZ} - URL_MD5 ${MPI4PY_MD5} - BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/mpi4py - CONFIGURE_COMMAND "" - BUILD_COMMAND ${mpi4py_BUILD_COMMAND} - UPDATE_COMMAND "" - INSTALL_COMMAND ${mpi4py_INSTALL_COMMAND} - DEPENDS - ${Mpi4py_deps} - ${ep_log_options} - ) - -# Mpi4py -# - -#ExternalProject_Add(Mpi4py -# DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR} -# SOURCE_DIR ${Mpi4py_source} -# URL ${MPI4PY_URL}/${MPI4PY_GZ} -# URL_MD5 ${MPI4PY_MD5} -# BUILD_IN_SOURCE 1 -# CONFIGURE_COMMAND "" -# BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build -# INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} -# DEPENDS ${Mpi4py_deps} -# ${ep_log_options} -# ) diff --git a/CMake/cdat_modules/mpi4py_pkg.cmake b/CMake/cdat_modules/mpi4py_pkg.cmake deleted file mode 100644 index e87d6be269..0000000000 --- a/CMake/cdat_modules/mpi4py_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(MPI4PY_MAJOR 1) -set(MPI4PY_MINOR 3) -set(MPI4PY_VERSION ${MPI4PY_MAJOR}.${MPI4PY_MINOR}) -set(MPI4PY_URL http://uv-cdat.llnl.gov/cdat/resources) -set(MPI4PY_GZ mpi4py-${MPI4PY_VERSION}.tar.gz) -set(MPI4PY_MD5 978472a1a71f3142c866c9463dec7103) -set(MPI4PY_SOURCE ${MPI4PY_URL}/${MPI4PY_GZ}) - -add_cdat_package(Mpi4py "" "Bulid Mpi4py" OFF) -if (CDAT_BUILD_PARALLEL) - set_property(CACHE CDAT_BUILD_MPI4PY PROPERTY VALUE ON) -endif() - diff --git a/CMake/cdat_modules/mpi_deps.cmake b/CMake/cdat_modules/mpi_deps.cmake deleted file mode 100644 index e134e5d1fe..0000000000 --- a/CMake/cdat_modules/mpi_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(MPI_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/mpi_external.cmake b/CMake/cdat_modules/mpi_external.cmake deleted file mode 100644 index 8fbe6a66fc..0000000000 --- a/CMake/cdat_modules/mpi_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(MPI_source "${CMAKE_CURRENT_BINARY_DIR}/build/MPI") -set(MPI_install "${cdat_EXTERNALS}") - -ExternalProject_Add(MPI - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${MPI_source} - INSTALL_DIR ${MPI_install} - URL ${MPI_URL}/${MPI_GZ} - URL_MD5 ${MPI_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND /configure --prefix= --disable-vt - DEPENDS ${MPI_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/mpi_pkg.cmake b/CMake/cdat_modules/mpi_pkg.cmake deleted file mode 100644 index c3397cd0c1..0000000000 --- a/CMake/cdat_modules/mpi_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(MPI_MAJOR 1) -set(MPI_MINOR 6) -set(MPI_PATCH 4) -set(MPI_URL ${LLNL_URL}) -set(MPI_GZ openmpi-${MPI_MAJOR}.${MPI_MINOR}.${MPI_PATCH}.tar.gz) -set(MPI_MD5 70aa9b6271d904c6b337ca326e6613d1) -set(MPI_SOURCE ${MPI_URL}/${MPI_GZ}) -set(MPI_VERSION ${MPI_MAJOR}.${MPI_MINOR}.${MPI_PATCH}) - -add_cdat_package(MPI "" "Bulid MPI" OFF) - -if (CDAT_BUILD_PARALLEL) - set_property(CACHE CDAT_BUILD_MPI PROPERTY VALUE ON) -endif() diff --git a/CMake/cdat_modules/myproxyclient_deps.cmake b/CMake/cdat_modules/myproxyclient_deps.cmake deleted file mode 100644 index a94e7aba74..0000000000 --- a/CMake/cdat_modules/myproxyclient_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(MyProxyClient_deps ${cryptography_pkg} ${pyopenssl_pkg} ${pip_pkg} ${pyasn1_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/myproxyclient_external.cmake b/CMake/cdat_modules/myproxyclient_external.cmake deleted file mode 100644 index eae57a9c73..0000000000 --- a/CMake/cdat_modules/myproxyclient_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm MyProxyClient) - -include(pipinstaller) diff --git a/CMake/cdat_modules/myproxyclient_pkg.cmake b/CMake/cdat_modules/myproxyclient_pkg.cmake deleted file mode 100644 index 036b1bd0a6..0000000000 --- a/CMake/cdat_modules/myproxyclient_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(MYPROXYCLIENT_MAJOR_SRC 1) -set(MYPROXYCLIENT_MINOR_SRC 3) -set(MYPROXYCLIENT_PATCH_SRC 0) - -set (nm MYPROXYCLIENT) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(MYPROXYCLIENT_GZ MyProxyClient-${MYPROXYCLIENT_VERSION}.tar.gz) -set(MYPROXYCLIENT_SOURCE ${LLNL_URL}/${MYPROXYCLIENT_GZ}) -set(MYPROXYCLIENT_MD5 829a299157f91f8ff8a6e5bc8ec1c09c ) - -add_cdat_package_dependent(MyProxyClient "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/netcdf_deps.cmake b/CMake/cdat_modules/netcdf_deps.cmake deleted file mode 100644 index c8da9fa7bf..0000000000 --- a/CMake/cdat_modules/netcdf_deps.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(NetCDF_deps ${pkgconfig_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${jpeg_pkg} ) -if (CDAT_BUILD_PARALLEL) - list(APPEND NetCDF_deps ${mpi_pkg} ) -endif() diff --git a/CMake/cdat_modules/netcdf_external.cmake b/CMake/cdat_modules/netcdf_external.cmake deleted file mode 100644 index 3135cff493..0000000000 --- a/CMake/cdat_modules/netcdf_external.cmake +++ /dev/null @@ -1,31 +0,0 @@ -set(netcdf_source "${CMAKE_CURRENT_BINARY_DIR}/build/netcdf") -set(netcdf_install "${cdat_EXTERNALS}") -set(netcdf_configure_args "--enable-netcdf-4") -if (CDAT_BUILD_PARALLEL) - set(configure_file "cdatmpi_configure_step.cmake") -else() - set(configure_file "cdat_configure_step.cmake") -endif() - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/netcdf_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/netcdf_patch_step.cmake - @ONLY) - -set(netcdf_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/netcdf_patch_step.cmake) - -ExternalProject_Add(NetCDF - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${netcdf_source} - INSTALL_DIR ${netcdf_install} - URL ${NC4_URL}/${NC4_GZ} - URL_MD5 ${NC4_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${netcdf_PATCH_COMMAND} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${netcdf_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file} - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS ${NetCDF_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/netcdf_pkg.cmake b/CMake/cdat_modules/netcdf_pkg.cmake deleted file mode 100644 index 9ea111ad8a..0000000000 --- a/CMake/cdat_modules/netcdf_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(NC4_MAJOR_SRC 4) -set(NC4_MINOR_SRC 3) -set(NC4_PATCH_SRC 3.1) -set(NC4_URL ${LLNL_URL}) -set(NC4_GZ netcdf-${NC4_MAJOR_SRC}.${NC4_MINOR_SRC}.${NC4_PATCH_SRC}.tar.gz) -set(NC4_MD5 5c9dad3705a3408d27f696e5b31fb88c ) - -set (nm NC4) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(NETCDF_VERSION ${NC4_VERSION}) -set(NETCDF_SOURCE ${NC4_URL}/${NC4_GZ}) -set(NETCDF_MD5 ${NC4_MD5}) - -add_cdat_package(NetCDF "" "" ON) diff --git a/CMake/cdat_modules/netcdfplus_deps.cmake b/CMake/cdat_modules/netcdfplus_deps.cmake deleted file mode 100644 index 7efe4f6ce7..0000000000 --- a/CMake/cdat_modules/netcdfplus_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(NetCDFPLUS_deps ${netcdf_pkg}) diff --git a/CMake/cdat_modules/netcdfplus_external.cmake b/CMake/cdat_modules/netcdfplus_external.cmake deleted file mode 100644 index 130b822981..0000000000 --- a/CMake/cdat_modules/netcdfplus_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(netcdfplus_source "${CMAKE_CURRENT_BINARY_DIR}/build/netcdf-c++") -set(netcdfplus_install "${cdat_EXTERNALS}") -set(netcdfplus_configure_args "") - -ExternalProject_Add(NetCDFPLUS - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${netcdfplus_source} - INSTALL_DIR ${netcdfplus_install} - URL ${NC4PLUS_URL}/${NC4PLUS_GZ} - URL_MD5 ${NC4PLUS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${NetCDFPLUS_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/netcdfplus_pkg.cmake b/CMake/cdat_modules/netcdfplus_pkg.cmake deleted file mode 100644 index cec5f82ecd..0000000000 --- a/CMake/cdat_modules/netcdfplus_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(NC4PLUS_MAJOR_SRC 4) -set(NC4PLUS_MINOR_SRC 2) -set(NC4PLUS_PATCH_SRC 1.1) -set(NC4PLUS_URL ${LLNL_URL}) -set(NC4PLUS_GZ netcdf-cxx-${NC4PLUS_MAJOR_SRC}.${NC4PLUS_MINOR_SRC}.tar.gz) -set(NC4PLUS_MD5 0b09655cf977d768ced6c0d327dde176) - -set (nm NC4PLUS) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(NETCDFPLUS_VERSION ${NC4PLUS_VERSION}) -set(NETCDFPLUS_SOURCE ${NC4PLUS_URL}/${NC4PLUS_GZ}) -set(NETCDFPLUS_MD5 ${NC4PLUS_MD5}) - -add_cdat_package_dependent(NetCDFPLUS "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/numexpr_deps.cmake b/CMake/cdat_modules/numexpr_deps.cmake deleted file mode 100644 index 5ba77a20d0..0000000000 --- a/CMake/cdat_modules/numexpr_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Numexpr_deps ${pkgconfig_pkg} ${numpy_pkg} ${myproxyclient_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/numexpr_external.cmake b/CMake/cdat_modules/numexpr_external.cmake deleted file mode 100644 index a87913eb39..0000000000 --- a/CMake/cdat_modules/numexpr_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm Numexpr) - -include(pipinstaller) diff --git a/CMake/cdat_modules/numexpr_pkg.cmake b/CMake/cdat_modules/numexpr_pkg.cmake deleted file mode 100644 index f8b18ab5cc..0000000000 --- a/CMake/cdat_modules/numexpr_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(NUMEXPR_MAJOR 2) -set(NUMEXPR_MINOR 2) -set(NUMEXPR_PATCH 2) -#set(NUMEXPR_VERSION ${NUMEXPR_MAJOR}.${NUMEXPR_MINOR}) -set(NUMEXPR_VERSION ${NUMEXPR_MAJOR}.${NUMEXPR_MINOR}.${NUMEXPR_PATCH}) -# Following not needed any longer using easy_install -set(NUMEXPR_URL ${LLNL_URL}) -set(NUMEXPR_GZ numexpr-${NUMEXPR_VERSION}.tar.gz) -set(NUMEXPR_MD5 18103954044b3039c0a74a6006c8e0a7) -set(NUMEXPR_SOURCE ${NUMEXPR_URL}/${NUMEXPR_GZ}) - -add_cdat_package(Numexpr "" "" OFF) diff --git a/CMake/cdat_modules/numpy_deps.cmake b/CMake/cdat_modules/numpy_deps.cmake deleted file mode 100644 index 5511925968..0000000000 --- a/CMake/cdat_modules/numpy_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(NUMPY_deps ${pkgconfig_pkg} ${python_pkg} ${clapack_pkg} ${lapack_pkg}) diff --git a/CMake/cdat_modules/numpy_external.cmake b/CMake/cdat_modules/numpy_external.cmake deleted file mode 100644 index 1e4b313494..0000000000 --- a/CMake/cdat_modules/numpy_external.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# The Numpy external project - -set(NUMPY_binary "${CMAKE_CURRENT_BINARY_DIR}/build/NUMPY") - -# to configure numpy we run a cmake -P script -# the script will create a site.cfg file -# then run python setup.py config to verify setup -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/NUMPY_configure_step.cmake @ONLY -) - -# to build numpy we also run a cmake -P script. -# the script will set LD_LIBRARY_PATH so that -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/NUMPY_make_step.cmake @ONLY -) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/NUMPY_install_step.cmake @ONLY -) - -set(NUMPY_CONFIGURE_COMMAND ${CMAKE_COMMAND} - -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_configure_step.cmake) -set(NUMPY_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_make_step.cmake) -set(NUMPY_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_install_step.cmake) - -# create an external project to download numpy, -# and configure and build it -ExternalProject_Add(NUMPY - URL ${NUMPY_URL}/${NUMPY_GZ} - URL_MD5 ${NUMPY_MD5} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${NUMPY_binary} - BINARY_DIR ${NUMPY_binary} - CONFIGURE_COMMAND ${NUMPY_CONFIGURE_COMMAND} - BUILD_COMMAND ${NUMPY_BUILD_COMMAND} - UPDATE_COMMAND "" - INSTALL_COMMAND ${NUMPY_INSTALL_COMMAND} - DEPENDS ${NUMPY_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/numpy_pkg.cmake b/CMake/cdat_modules/numpy_pkg.cmake deleted file mode 100644 index bd67f56332..0000000000 --- a/CMake/cdat_modules/numpy_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(NUMPY_MAJOR 1) -set(NUMPY_MINOR 9) -set(NUMPY_PATCH 0) -set(NUMPY_MAJOR_SRC 1) -set(NUMPY_MINOR_SRC 9) -set(NUMPY_PATCH_SRC 0) -set(NUMPY_URL ${LLNL_URL}) -set(NUMPY_GZ numpy-${NUMPY_MAJOR_SRC}.${NUMPY_MINOR_SRC}.${NUMPY_PATCH_SRC}.tar.gz) -set(NUMPY_MD5 a93dfc447f3ef749b31447084839930b) -set(NUMPY_SOURCE ${NUMPY_URL}/${NUMPY_GZ}) - -set (nm NUMPY) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package(NUMPY "" "" ON) -set(NUMPY ${pkgconfig_pkg} ${python_pkg} ${clapack_pkg} ${lapack_pkg}) diff --git a/CMake/cdat_modules/ocgis_deps.cmake b/CMake/cdat_modules/ocgis_deps.cmake deleted file mode 100644 index 4968421a1a..0000000000 --- a/CMake/cdat_modules/ocgis_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ocgis_deps ${shapely_pkg} ${gdal_pkg} ${fiona_pkg} ${pynetcdf4_pkg}) diff --git a/CMake/cdat_modules/ocgis_external.cmake b/CMake/cdat_modules/ocgis_external.cmake deleted file mode 100644 index db51295ba8..0000000000 --- a/CMake/cdat_modules/ocgis_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(ocgis_source "${CMAKE_CURRENT_BINARY_DIR}/build/ocgis") -set(ocgis_install "${cdat_EXTERNALS}") - -ExternalProject_Add(ocgis - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ocgis_source} - INSTALL_DIR ${ocgis_install} - BUILD_IN_SOURCE 1 - ${GIT_CMD_STR_OCGIS} - ${GIT_TAG} - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${ocgis_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_OCGIS) - unset(GIT_CMD_STR_OCGIS) -endif() diff --git a/CMake/cdat_modules/ocgis_pkg.cmake b/CMake/cdat_modules/ocgis_pkg.cmake deleted file mode 100644 index ad6d852fff..0000000000 --- a/CMake/cdat_modules/ocgis_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(OCGIS_SOURCE ${OCGIS_URL}/${OCGIS_GZ}) -set(OCGIS_BRANCH next) -set(OCGIS_REPOSITORY ${GIT_PROTOCOL}github.com/NCPP/ocgis.git ) - -set(GIT_CMD_STR_OCGIS GIT_REPOSITORY ${OCGIS_REPOSITORY}) -set(GIT_TAG GIT_TAG "${OCGIS_BRANCH}") - -if (CDAT_BUILD_ALL) - add_cdat_package(ocgis "" "" ON) -else() - add_cdat_package(ocgis "" "" OFF) -endif() diff --git a/CMake/cdat_modules/openssl_deps.cmake b/CMake/cdat_modules/openssl_deps.cmake deleted file mode 100644 index 22b675b476..0000000000 --- a/CMake/cdat_modules/openssl_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(OPENSSL_deps ) diff --git a/CMake/cdat_modules/openssl_external.cmake b/CMake/cdat_modules/openssl_external.cmake deleted file mode 100644 index 752d3395c7..0000000000 --- a/CMake/cdat_modules/openssl_external.cmake +++ /dev/null @@ -1,37 +0,0 @@ -set (OPENSSL_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/openssl") -set (OPENSSL_INSTALL_DIR "${cdat_EXTERNALS}") - -execute_process (COMMAND uname -s COMMAND tr -d '\n' - OUTPUT_VARIABLE HOST) -STRING (TOLOWER ${HOST} HOST) -execute_process (COMMAND uname -m COMMAND tr -d '\n' - OUTPUT_VARIABLE ARCHITECTURE) - -get_filename_component (COMPILER "${CMAKE_C_COMPILER}" NAME_WE) - -if (APPLE) - if (ARCHITECTURE MATCHES "64$") - set (HOST "${HOST}64") - endif () - set (COMPILER "cc") -endif () - -set (OPENSSL_CONF_ARGS "${HOST}-${ARCHITECTURE}-${COMPILER}") -set (OPENSSL_CONF_ARGS - ${OPENSSL_CONF_ARGS} - "--prefix=${OPENSSL_INSTALL_DIR}") - -ExternalProject_Add (openssl - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${OPENSSL_SOURCE_DIR} - INSTALL_DIR ${OPENSSL_INSTALL_DIR} - URL ${OPENSSL_SOURCE_URL} - URL_MD5 ${OPENSSL_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${OPENSSL_SOURCE_DIR}/Configure ${OPENSSL_CONF_ARGS} - DEPENDS ${OPENSSL_DEPS} - ${ep_log_options} - ) - -set (OPENSSL_INCLUDE_DIR "${OPENSSL_INSTALL_DIR}/include") -set (OPENSSL_LIBRARY_DIR "${OPENSSL_INSTALL_DIR}/lib") diff --git a/CMake/cdat_modules/openssl_pkg.cmake b/CMake/cdat_modules/openssl_pkg.cmake deleted file mode 100644 index 440d0f532f..0000000000 --- a/CMake/cdat_modules/openssl_pkg.cmake +++ /dev/null @@ -1,37 +0,0 @@ -option(CDAT_USE_SYSTEM_OPENSSL "Use system OpenSSL, if found." ON) -mark_as_advanced(CDAT_USE_SYSTEM_OPENSSL) -if(CDAT_USE_SYSTEM_OPENSSL) - find_package(OpenSSL QUIET) - if(OPENSSL_FOUND) - set(FILENAME_PATH_ARG "DIRECTORY") - if(CMAKE_VERSION VERSION_LESS 2.8.12) - # Support older version of GET_FILENAME_COMPONENT macro - # with legacy PATH argument - set(FILENAME_PATH_ARG "PATH") - endif(CMAKE_VERSION VERSION_LESS 2.8.12) - get_filename_component(OPENSSL_LIBRARY_DIR - "${OPENSSL_SSL_LIBRARY}" ${FILENAME_PATH_ARG}) - message(STATUS "System OpenSSL found. " - "OpenSSL library directory: ${OPENSSL_LIBRARY_DIR}. " - "OpenSSL Version: ${OPENSSL_VERSION}") - endif(OPENSSL_FOUND) -endif(CDAT_USE_SYSTEM_OPENSSL) - -if(NOT CDAT_USE_SYSTEM_OPENSSL OR NOT OPENSSL_FOUND) - set(OPENSSL_MAJOR_SRC 1) - set(OPENSSL_MINOR_SRC 0) - set(OPENSSL_PATCH_SRC 2e) - set(OPENSSL_VERSION - ${OPENSSL_MAJOR_SRC}.${OPENSSL_MINOR_SRC}.${OPENSSL_PATCH_SRC}) - - message(STATUS "Compiling OpenSSL from source. Version: ${OPENSSL_VERSION}") - - set(OPENSSL_URL ${LLNL_URL}) - set(OPENSSL_GZ "openssl-${OPENSSL_VERSION}.tar.gz") - set(OPENSSL_MD5 5262bfa25b60ed9de9f28d5d52d77fc5) - set(OPENSSL_SOURCE_URL ${OPENSSL_URL}/${OPENSSL_GZ}) - - # We've reached here because we need OpenSSL. - # Hence, defaulting to ON - add_cdat_package(openssl "" "" ON) -endif(NOT CDAT_USE_SYSTEM_OPENSSL OR NOT OPENSSL_FOUND) diff --git a/CMake/cdat_modules/osmesa_deps.cmake b/CMake/cdat_modules/osmesa_deps.cmake deleted file mode 100644 index 2ee8b1857b..0000000000 --- a/CMake/cdat_modules/osmesa_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(osmesa_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/osmesa_external.cmake b/CMake/cdat_modules/osmesa_external.cmake deleted file mode 100644 index 23f4870a1e..0000000000 --- a/CMake/cdat_modules/osmesa_external.cmake +++ /dev/null @@ -1,26 +0,0 @@ -set(osmesa_source "${CMAKE_CURRENT_BINARY_DIR}/build/osmesa") -set(osmesa_install "${cdat_EXTERNALS}") - -set(osmesa_conf_args "--with-driver=osmesa") -set(osmesa_conf_args "${osmesa_conf_args}^^--disable-gallium") -set(osmesa_conf_args "${osmesa_conf_args}^^--disable-gallium-intel") -set(osmesa_conf_args "${osmesa_conf_args}^^--disable-egl") - -ExternalProject_Add(OSMesa - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${osmesa_source} - INSTALL_DIR ${osmesa_install} - URL ${OSMESA_URL}/${OSMESA_GZ} - URL_MD5 ${OSMESA_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND - "${CMAKE_COMMAND}" - "-DCONFIGURE_ARGS=${osmesa_conf_args}" - "-DINSTALL_DIR=" - "-DWORKING_DIR=" - -P "${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake" - DEPENDS ${osmesa_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/osmesa_pkg.cmake b/CMake/cdat_modules/osmesa_pkg.cmake deleted file mode 100644 index 1080dfb358..0000000000 --- a/CMake/cdat_modules/osmesa_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(package OSMesa) -string(TOUPPER ${package} package_uc) - -# We're using an older mesa (7.6.1) as it is known to work well in many -# supercomputing environments. -set(${package_uc}_MAJOR_SRC 7) -set(${package_uc}_MINOR_SRC 6) -set(${package_uc}_PATCH_SRC 1) -set(${package_uc}_VERSION "${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.${${package_uc}_PATCH_SRC}") -set(${package_uc}_URL ${LLNL_URL}) -set(${package_uc}_GZ "MesaLib-${${package_uc}_VERSION}.tar.gz") -set(${package_uc}_MD5 e80fabad2e3eb7990adae773d6aeacba) -set(${package_uc}_SOURCE "${${package_uc}_URL}/${${package_uc}_GZ}") - -add_cdat_package(${package} "7.6.1" "" OFF) diff --git a/CMake/cdat_modules/paraview_deps.cmake b/CMake/cdat_modules/paraview_deps.cmake deleted file mode 100644 index 6868b8da7d..0000000000 --- a/CMake/cdat_modules/paraview_deps.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(ParaView_deps ${pkgconfig_pkg} ${python_pkg} ${hdf5_pkg} ${png_pkg} ${jpeg_pkg} ${libxml2_pkg} ${freetype_pkg} ${netcdfplus_pkg} ${zlib_pkg}) - -if (NOT CDAT_BUILD_GUI) - list(APPEND ParaView_deps ${qt_pkg}) -endif() - -if(CDAT_BUILD_PARALLEL) - list(APPEND ParaView_deps "${mpi_pkg}") -endif() - -if(NOT CDAT_BUILD_LEAN) - list(APPEND ParaView_deps "${ffmpeg_pkg}") -endif() - -if(CDAT_BUILD_OFFSCREEN) - list(APPEND ParaView_deps "${osmesa_pkg}") -endif() diff --git a/CMake/cdat_modules/paraview_external.cmake b/CMake/cdat_modules/paraview_external.cmake deleted file mode 100644 index 5c20dbc2a3..0000000000 --- a/CMake/cdat_modules/paraview_external.cmake +++ /dev/null @@ -1,262 +0,0 @@ -set(ParaView_source "${CMAKE_CURRENT_BINARY_DIR}/build/ParaView") -set(ParaView_binary "${CMAKE_CURRENT_BINARY_DIR}/build/ParaView-build") -set(ParaView_install "${cdat_EXTERNALS}") - -if(QT_QMAKE_EXECUTABLE) - get_filename_component(QT_BINARY_DIR ${QT_QMAKE_EXECUTABLE} PATH) - get_filename_component(QT_ROOT ${QT_BINARY_DIR} PATH) -endif() - -if(APPLE) - set(MACOSX_APP_INSTALL_PREFIX "${SB_EXTERNALS_DIR}") -endif() - -# Initialize -set(ParaView_tpl_args) - -# VCS needs projections from GeoVis -list(APPEND ParaView_tpl_args - -DModule_vtkViewsGeovis:BOOL=ON -) -list(APPEND ParaView_tpl_args - -DModule_vtklibproj4:BOOL=ON -) - -# We would like to see CGM as well -list(APPEND ParaView_tpl_args - -DModule_vtkIOCGM:BOOL=ON - ) - -if(NOT CDAT_BUILD_LEAN) - list(APPEND ParaView_tpl_args -DPARAVIEW_ENABLE_FFMPEG:BOOL=ON) -endif() - -if (CDAT_BUILD_PARALLEL) - list(APPEND ParaView_tpl_args - -DPARAVIEW_USE_MPI:BOOL=ON) - # Mac has issues with MPI4PY of ParaView. Also I don't know if we really need to build it - # See this bug: paraview.org/bug/view.php?id=13587 - list(APPEND ParaView_tpl_args -DENABLE_MPI4PY:BOOL=OFF) - - if(CDAT_BUILD_MPI) - if(UNIX) - set(ENV{LD_LIBRARY_PATH} "${cdat_EXTERNALS}/lib:$ENV{LD_LIBRARY_PATH}") - elseif(APPLE) - set(ENV{DYLD_FALLBACK_LIBRARY_PATH} "${cdat_EXTERNALS}/lib:$ENV{DYLD_FALLBACK_LIBRARY_PATH}") - endif() - list(APPEND ParaView_tpl_args - -DMPIEXEC:FILEPATH=${cdat_EXTERNALS}/bin/mpiexec - -DMPI_CXX_COMPILER:FILEPATH=${cdat_EXTERNALS}/bin/mpicxx - -DMPI_C_COMPILER:FILEPATH=${cdat_EXTERNALS}/bin/mpicc - -DMPI_C_INCLUDE_PATH:PATH=${cdat_EXTERNALS}/include - -DMPI_CXX_INCLUDE_PATH:PATH=${cdat_EXTERNALS}/include - -DMACOSX_APP_INSTALL_PREFIX:PATH=${MACOSX_APP_INSTALL_PREFIX} - -DVTK_MPIRUN_EXE:FILEPATH=${cdat_EXTERNALS}/bin/mpiexec) - endif() -endif() - -set(_vtk_modules "vtkRenderingImage;vtkRenderingVolume;vtkRenderingLabel;vtkRenderingFreeType;vtkRenderingFreeTypeOpenGL;vtkRenderingVolumeOpenGL;vtkRenderingCore;vtkRenderingOpenGL;vtkGeovisCore;vtkViewsCore;vtkViewsGeovis;vtkInteractionImage;vtkInteractionStyle;vtkInteractionWidgets;vtkCommonTransforms;vtkCommonCore;vtkCommonComputationalGeometry;vtkCommonExecutionModel;vtkCommonSystem;vtkCommonMisc;vtkFiltersFlowPaths;vtkFiltersStatistics;vtkFiltersAMR;vtkFiltersGeneric;vtkFiltersSources;vtkFiltersModeling;vtkFiltersExtraction;vtkFiltersSelection;vtkFiltersSMP;vtkFiltersCore;vtkFiltersHybrid;vtkFiltersTexture;vtkFiltersGeneral;vtkFiltersImaging;vtkFiltersGeometry;vtkIOImage;vtkIOCore;vtkIOExport;vtkIOImport;vtkIOGeometry;vtkImagingColor;vtkImagingSources;vtkImagingCore;vtkImagingGeneral;vtkImagingMath") - -if(NOT CDAT_BUILD_LEAN) - list(APPEND _vtk_modules "vtkIOFFMPEG") -endif() -# Either we use cdat zlib and libxml or system zlib and libxml -list(APPEND ParaView_tpl_args - -DVTK_USE_SYSTEM_ZLIB:BOOL=ON - -DVTK_USE_SYSTEM_LIBXML2:BOOL=ON - -DVTK_USE_SYSTEM_HDF5:BOOL=ON - -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON - -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON -) - -# Turn off testing and other non essential featues -list(APPEND ParaView_tpl_args - -DBUILD_TESTING:BOOL=OFF - -DPARAVIEW_BUILD_PLUGIN_MobileRemoteControl:BOOL=OFF - -DPQWIDGETS_DISABLE_QTWEBKIT:BOOL=ON - -DModule_vtkIOGeoJSON:BOOL=ON - -DCMAKE_PREFIX_PATH:PATH=${cdat_EXTERNALS} -) - -# Use cdat zlib -#if(NOT CDAT_USE_SYSTEM_ZLIB) -# list(APPEND ParaView_tpl_args -# -DZLIB_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include -# -DZLIB_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# ) -#endif() - -# Use cdat libxml -#if(NOT CDAT_USE_SYSTEM_LIBXML2) -# list(APPEND ParaView_tpl_args -# -DLIBXML2_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include/libxml2 -# -DLIBXML2_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/lib/libxml2${_LINK_LIBRARY_SUFFIX} -# -DLIBXML2_XMLLINT_EXECUTABLE:FILEPATH=${cdat_EXTERNALS}/bin/xmllint -# ) -#endif() - -# Use cdat hdf5 -if(NOT CDAT_USE_SYSTEM_HDF5) - list(APPEND ParaView_tpl_args - -DHDF5_DIR:PATH=${cdat_EXTERNALS}/ - -DHDF5_C_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include - -DHDF5_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include - -DHDF5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - -DHDF5_hdf5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - -DHDF5_hdf5_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - ) - -# if(NOT CDAT_USE_SYSTEM_ZLIB) -# list(APPEND ParaView_tpl_args -# -DHDF5_z_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# -DHDF5_z_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# ) -# endif() -endif() - -# Check if should build GUI -if(CDAT_BUILD_GUI) - list(APPEND ParaView_tpl_args - -DPARAVIEW_BUILD_QT_GUI:BOOL=ON - -DVTK_QT_USE_WEBKIT:BOOL=OFF - -DQT_QMAKE_EXECUTABLE:FILEPATH=${QT_QMAKE_EXECUTABLE} - -DQT_QTUITOOLS_INCLUDE_DIR:PATH=${QT_ROOT}/include/QtUiTools - -DQT_BINARY_DIR:FILEPATH=${QT_BINARY_DIR}) -else() - list(APPEND ParaView_tpl_args - -DPARAVIEW_BUILD_QT_GUI:BOOL=OFF) -endif() - -# Check if using R then only enable R support -if (CDAT_BUILD_R OR CDAT_USE_SYSTEM_R) - list(APPEND ParaView_tpl_args - -DPARAVIEW_USE_GNU_R:BOOL=ON - -DR_COMMAND:PATH=${R_install}/bin/R - -DR_DIR:PATH=${R_install}/lib/R - -DR_INCLUDE_DIR:PATH=${R_install}/lib/R/include - -DR_LIBRARY_BASE:PATH=${R_install}/lib/R/lib/libR${_LINK_LIBRARY_SUFFIX} - -DR_LIBRARY_BLAS:PATH=${R_install}/lib/R/lib/libRblas${_LINK_LIBRARY_SUFFIX} - -DR_LIBRARY_LAPACK:PATH=${R_install}/lib/R/lib/libRlapack${_LINK_LIBRARY_SUFFIX} - -DR_LIBRARY_READLINE:PATH=) -endif() - -if(UVCDAT_TESTDATA_LOCATION) - list(APPEND ParaView_tpl_args - -DUVCDAT_TestData:PATH=${UVCDAT_TESTDATA_LOCATION}) -endif() - -if(CDAT_BUILD_OFFSCREEN) - list(APPEND ParaView_tpl_args - "-DVTK_USE_X:BOOL=OFF" - "-DVTK_OPENGL_HAS_OSMESA:BOOL=ON" - "-DOPENGL_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include" - "-DOPENGL_gl_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}" - "-DOPENGL_glu_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libGLU${_LINK_LIBRARY_SUFFIX}" - "-DOSMESA_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include" - "-DOSMESA_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}" - ) -endif() - -include(GetGitRevisionDescription) -set(paraview_branch ${PARAVIEW_BRANCH}) - -get_git_head_revision(refspec sha) -#if("${refspec}" STREQUAL "refs/heads/devel-master") -# set(paraview_branch uvcdat-next) -#endif() - -string(REPLACE "//" "" GIT_PROTOCOL_PREFIX ${GIT_PROTOCOL}) - -if (${GIT_PROTOCOL} STREQUAL "git://") - set(REPLACE_GIT_PROTOCOL_PREFIX "http:") -else() - set(REPLACE_GIT_PROTOCOL_PREFIX "git:") -endif() - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/paraview_download.sh.in - ${cdat_CMAKE_BINARY_DIR}/paraview_download.sh @ONLY - ) - -if (NOT OFFLINE_BUILD) - set(DOWNLOAD_CMD_STR DOWNLOAD_COMMAND ${cdat_CMAKE_BINARY_DIR}/paraview_download.sh) -else () - set(DOWNLOAD_CMD_STR) -endif() - -set(_vtk_module_options) -foreach(_module ${_vtk_modules}) - list(APPEND _vtk_module_options "-DModule_${_module}:BOOL=ON") -endforeach() -ExternalProject_Add(ParaView - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ParaView_source} - BINARY_DIR ${ParaView_binary} - INSTALL_DIR ${ParaView_install} - ${DOWNLOAD_CMD_STR} - GIT_TAG ${paraview_branch} - UPDATE_COMMAND "" - PATCH_COMMAND "" - CMAKE_CACHE_ARGS - -DBUILD_SHARED_LIBS:BOOL=ON - -DBUILD_TESTING:BOOL=${BUILD_TESTING} - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR} - -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags} -# -DPARAVIEW_BUILD_AS_APPLICATION_BUNDLE:BOOL=OFF -# -DPARAVIEW_DISABLE_VTK_TESTING:BOOL=ON -# -DPARAVIEW_INSTALL_THIRD_PARTY_LIBRARIES:BOOL=OFF - # -DPARAVIEW_TESTING_WITH_PYTHON:BOOL=OFF - -DINCLUDE_PYTHONHOME_PATHS:BOOL=OFF - ${cdat_compiler_args} - ${ParaView_tpl_args} - # Python - -DPARAVIEW_ENABLE_PYTHON:BOOL=ON - -DPYTHON_EXECUTABLE:FILEPATH=${PYTHON_EXECUTABLE} - -DPYTHON_INCLUDE_DIR:PATH=${PYTHON_INCLUDE} - -DPYTHON_LIBRARY:FILEPATH=${PYTHON_LIBRARY} - -DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=ON - -DVTK_LEGACY_SILENT:BOOL=ON - ${_vtk_module_options} - -DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - DEPENDS ${ParaView_deps} - ${ep_log_options} -) - -# Install ParaView and VTK python modules via their setup.py files. - -#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/vtk_install_python_module.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/vtk_install_python_module.cmake -# @ONLY) - -#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/paraview_install_python_module.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/paraview_install_python_module.cmake -# @ONLY) - -#ExternalProject_Add_Step(ParaView InstallParaViewPythonModule -# COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/paraview_install_python_module.cmake -# DEPENDEES install -# WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR} -# ) - -#ExternalProject_Add_Step(ParaView InstallVTKPythonModule -# COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/vtk_install_python_module.cmake -# DEPENDEES install -# WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR} -# ) - -# symlinks of Externals/bin get placed in prefix/bin so we need to symlink paraview -# libs into prefix/lib as well for pvserver to work. -if(NOT EXISTS ${CMAKE_INSTALL_PREFIX}/lib) - message("making ${ParaView_install}/lib") - file(MAKE_DIRECTORY ${CMAKE_INSTALL_PREFIX}/lib) -endif() - -#ExternalProject_Add_Step(ParaView InstallParaViewLibSymlink -# COMMAND ${CMAKE_COMMAND} -E create_symlink ${ParaView_install}/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} ${CMAKE_INSTALL_PREFIX}/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} -# DEPENDEES install -# WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR} -#) -unset(GIT_CMD_STR) - diff --git a/CMake/cdat_modules/paraview_pkg.cmake b/CMake/cdat_modules/paraview_pkg.cmake deleted file mode 100644 index c5fe1743bf..0000000000 --- a/CMake/cdat_modules/paraview_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(PARAVIEW_MAJOR 4) -set(PARAVIEW_MINOR 1) -set(PARAVIEW_PATCH 0) -set(PARAVIEW_VERSION ${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}.${PARAVIEW_PATCH}) -set(PARAVIEW_URL ${LLNL_URL}) -set(PARAVIEW_GZ ParaView-${PARAVIEW_VERSION}c.tar.gz) -set(PARAVIEW_MD5) -set(PARAVIEW_BRANCH uvcdat-master) -set(PARAVIEW_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/ParaView.git ) - -add_cdat_package_dependent(ParaView "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/pbmplus_external.cmake b/CMake/cdat_modules/pbmplus_external.cmake deleted file mode 100644 index 03743c74ce..0000000000 --- a/CMake/cdat_modules/pbmplus_external.cmake +++ /dev/null @@ -1,32 +0,0 @@ - -set(pbmplus_source "${CMAKE_CURRENT_BINARY_DIR}/build/pbmplus") -set(pbmplus_install "${cdat_EXTERNALS}") - -#cp ../../exsrc/src/pbmplus/pbmplus.h . ; cp ../../exsrc/src/pbmplus/libpbm1.c pbm ;cp ../../exsrc/src/pbmplus/Makefile . - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pbmplus_configure_step.cmake.in - ${CMAKE_CURRENT_BINARY_DIR}/pbmplus_configure_step.cmake - @ONLY) - -ExternalProject_Add(pbmplus - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pbmplus_source} - INSTALL_DIR ${pbmplus_install} - URL ${PBMPLUS_URL}/${PBMPLUS_GZ} - URL_MD5 ${PBMPLUS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/pbmplus/libpbm1.c ${pbmplus_source}/pbm/ - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/pbmplus_configure_step.cmake - DEPENDS ${pbmplus_deps} - ${ep_log_options} -) - -ExternalProject_Add_Step(pbmplus CopyPbmplusHeader - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/pbmplus/pbmplus.h ${pbmplus_source}/ - DEPENDEES patch - DEPENDERS configure - ) - -#pbmplus install fails if this directory doesnt already exist. -file(MAKE_DIRECTORY ${pbmplus_install}/man/mann) diff --git a/CMake/cdat_modules/pep8_deps.cmake b/CMake/cdat_modules/pep8_deps.cmake deleted file mode 100644 index e57f7cf7e3..0000000000 --- a/CMake/cdat_modules/pep8_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pep8_deps ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/pep8_external.cmake b/CMake/cdat_modules/pep8_external.cmake deleted file mode 100644 index c6dc541c76..0000000000 --- a/CMake/cdat_modules/pep8_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# The pep8 project - -set(pep8_binary "${CMAKE_CURRENT_BINARY_DIR}/build/pep8") - -ExternalProject_Add(pep8 - DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR} - SOURCE_DIR ${pep8_binary} - URL ${PEP8_SOURCE} - URL_MD5 ${PEP8_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${pep8_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pep8_pkg.cmake b/CMake/cdat_modules/pep8_pkg.cmake deleted file mode 100644 index 723e4b8d3e..0000000000 --- a/CMake/cdat_modules/pep8_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set( PEP8_MAJOR 1 ) -set( PEP8_MINOR 5 ) -set( PEP8_PATCH 7) -set( PEP8_VERSION ${PEP8_MAJOR}.${PEP8_MINOR}.${PEP8_PATCH} ) -set( PEP8_URL ${LLNL_URL} ) -set( PEP8_GZ pep8-${PEP8_VERSION}.tar.gz ) -set( PEP8_MD5 f6adbdd69365ecca20513c709f9b7c93 ) - -set (nm PEP8) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(PEP8_SOURCE ${PEP8_URL}/${PEP8_GZ}) - -if (BUILD_TESTING) - add_cdat_package(pep8 "" "" ON) -endif() diff --git a/CMake/cdat_modules/pip_deps.cmake b/CMake/cdat_modules/pip_deps.cmake deleted file mode 100644 index 35c1383e54..0000000000 --- a/CMake/cdat_modules/pip_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pip_deps ${setuptools_pkg}) diff --git a/CMake/cdat_modules/pip_external.cmake b/CMake/cdat_modules/pip_external.cmake deleted file mode 100644 index 4c21cd6d32..0000000000 --- a/CMake/cdat_modules/pip_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm pip) - -# create an external project to install MyProxyClient, -# and configure and build it - -include(${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake) -string(TOUPPER ${nm} uc_nm) - -ExternalProject_Add(${nm} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - URL ${${uc_nm}_SOURCE} - URL_MD5 ${${uc_nm}_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND ${EASY_INSTALL_BINARY} ${CDAT_PACKAGE_CACHE_DIR}/${${uc_nm}_GZ} - DEPENDS ${${nm}_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pip_pkg.cmake b/CMake/cdat_modules/pip_pkg.cmake deleted file mode 100644 index 7e442f2f26..0000000000 --- a/CMake/cdat_modules/pip_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(PIP_MAJOR_SRC 7) -set(PIP_MINOR_SRC 1) -set(PIP_PATCH_SRC 2) - -set (nm PIP) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(PIP_URL ${LLNL_URL}) -set(PIP_GZ pip-${PIP_VERSION}.tar.gz) -set(PIP_SOURCE ${PIP_URL}/${PIP_GZ}) -set(PIP_MD5 3823d2343d9f3aaab21cf9c917710196) - -add_cdat_package(pip "" "" OFF) diff --git a/CMake/cdat_modules/pixman_deps.cmake b/CMake/cdat_modules/pixman_deps.cmake deleted file mode 100644 index 276a88585c..0000000000 --- a/CMake/cdat_modules/pixman_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pixman_deps ${pkgconfig_pkg} ${zlib_pkg} ${freetype_pkg}) diff --git a/CMake/cdat_modules/pixman_external.cmake b/CMake/cdat_modules/pixman_external.cmake deleted file mode 100644 index bd043c7a00..0000000000 --- a/CMake/cdat_modules/pixman_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ - -set(pixman_source "${CMAKE_CURRENT_BINARY_DIR}/build/pixman") -set(pixman_install "${cdat_EXTERNALS}") -set(pixman_configure_args "--disable-gtk") - -ExternalProject_Add(pixman - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pixman_source} - INSTALL_DIR ${pixman_install} - URL ${PIX_URL}/${PIX_GZ} - URL_MD5 ${PIX_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${pixman_configure_args} -DINSTALL_DIR=${pixman_install} -DWORKING_DIR=${pixman_source} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${pixman_deps} - ${ep_log_options} -) - -set(pixman_DIR "${pixman_binary}" CACHE PATH "pixman binary directory" FORCE) -mark_as_advanced(pixman_DIR) diff --git a/CMake/cdat_modules/pixman_pkg.cmake b/CMake/cdat_modules/pixman_pkg.cmake deleted file mode 100644 index 10590199f8..0000000000 --- a/CMake/cdat_modules/pixman_pkg.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(PIX_MAJOR 0) -set(PIX_MINOR 22) -set(PIX_PATCH 2) -set(PIX_MAJOR_SRC 0) -set(PIX_MINOR_SRC 30) -set(PIX_PATCH_SRC 0) -set(PIX_URL ${LLNL_URL}) -set(PIX_GZ pixman-${PIX_MAJOR_SRC}.${PIX_MINOR_SRC}.${PIX_PATCH_SRC}.tar.gz) -set(PIX_MD5 ae7ac97921dfa59086ca2231621a79c7 ) - - -set (nm PIX) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(PIXMAN_VERSION ${PIX_VERSION}) -set(PIXMAN_SOURCE ${PIX_URL}/${PIX_GZ}) -set(PIXMAN_MD5 ${PIX_MD5}) - -add_cdat_package_dependent(pixman "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/pkgconfig_deps.cmake b/CMake/cdat_modules/pkgconfig_deps.cmake deleted file mode 100644 index 106cfb0743..0000000000 --- a/CMake/cdat_modules/pkgconfig_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pkgconfig_deps ${wget_pkg}) diff --git a/CMake/cdat_modules/pkgconfig_external.cmake b/CMake/cdat_modules/pkgconfig_external.cmake deleted file mode 100644 index 2b8bd158be..0000000000 --- a/CMake/cdat_modules/pkgconfig_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ - -set(pkgconfig_source "${CMAKE_CURRENT_BINARY_DIR}/build/pkgconfig") -set(pkgconfig_install "${cdat_EXTERNALS}") -set(pkgconfig_config_args "--with-internal-glib") - -ExternalProject_Add(pkgconfig - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - URL ${PKG_URL}/${PKG_GZ} - URL_MD5 ${PKG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - SOURCE_DIR ${pkgconfig_source} - INSTALL_DIR ${pkgconfig_install} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${pkgconfig_config_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${pkgconfig_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/pkgconfig_pkg.cmake b/CMake/cdat_modules/pkgconfig_pkg.cmake deleted file mode 100644 index ca39277318..0000000000 --- a/CMake/cdat_modules/pkgconfig_pkg.cmake +++ /dev/null @@ -1,22 +0,0 @@ -set(PKG_MAJOR 0) -set(PKG_MINOR 9) -set(PKG_PATCH 0) -set(PKG_MAJOR_SRC 0) -set(PKG_MINOR_SRC 28) -set(PKG_PATCH_SRC 0) -set(PKG_VERSION ${PKG_MAJOR_SRC}.${PKG_MINOR_SRC}.${PKG_PATCH_SRC}) -set(PKG_URL ${LLNL_URL}) -set(PKG_GZ pkg-config-${PKG_MAJOR_SRC}.${PKG_MINOR_SRC}.tar.gz) -set(PKG_MD5 aa3c86e67551adc3ac865160e34a2a0d) -set(PKGCONFIG_VERSION ${PKG_VERSION}) -set(PKGCONFIG_SOURCE ${PKG_URL}/${PKG_GZ}) - -add_cdat_package(pkgconfig "" "" OFF) - -if(NOT CDAT_USE_SYSTEM_PKGCONFIG) - set(cdat_PKG_CONFIG_EXECUTABLE ${cdat_EXTERNALS}/bin/pkg-config) - set(ENV{PKG_CONFIG} "${cdat_PKG_CONFIG_EXECUTABLE}") - set(ENV{PKG_CONFIG_PATH} "${cdat_EXTERNALS}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}") - set(ENV{PKG_CONFIG} ${cdat_PKG_CONFIG_EXECUTABLE}) -endif() - diff --git a/CMake/cdat_modules/pmw_deps.cmake b/CMake/cdat_modules/pmw_deps.cmake deleted file mode 100644 index 8e1435b250..0000000000 --- a/CMake/cdat_modules/pmw_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Pmw_deps ${pkgconfig_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/pmw_external.cmake b/CMake/cdat_modules/pmw_external.cmake deleted file mode 100644 index 202ed810e5..0000000000 --- a/CMake/cdat_modules/pmw_external.cmake +++ /dev/null @@ -1,30 +0,0 @@ - -set(Pmw_source "${CMAKE_CURRENT_BINARY_DIR}/build/Pmw") -set(Pmw_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pmw_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pmw_make_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pmw_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pmw_install_step.cmake - @ONLY) - -set(Pmw_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pmw_make_step.cmake) -set(Pmw_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pmw_install_step.cmake) - -ExternalProject_Add(Pmw - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Pmw_source} - INSTALL_DIR ${Pmw_install} - URL ${PMW_URL}/${PMW_GZ} - URL_MD5 ${PMW_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND ${Pmw_build_command} - INSTALL_COMMAND ${Pmw_install_command} - DEPENDS ${Pmw_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/pmw_pkg.cmake b/CMake/cdat_modules/pmw_pkg.cmake deleted file mode 100644 index f0a0031b58..0000000000 --- a/CMake/cdat_modules/pmw_pkg.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(PMW_MAJOR 1) -set(PMW_MINOR 3) -set(PMW_MAJOR_SRC 1) -set(PMW_MINOR_SRC 3) -set(PMW_PATCH_SRC 2) -set(PMW_URL ${LLNL_URL}) -set(PMW_GZ Pmw.${PMW_MAJOR_SRC}.${PMW_MINOR_SRC}.${PMW_PATCH_SRC}.tar.gz) -set(PMW_MD5 7f30886fe9885ab3cf85dac6ce1fbda5) -set(PMW_SOURCE ${PMW_URL}/${PMW_GZ}) - - -set (nm PMW) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -if (CDAT_BUILD_LEAN) - add_cdat_package_dependent(Pmw "" "" OFF "CDAT_BUILD_GUI" OFF) -else() - add_cdat_package(Pmw "" "" OFF) -endif() diff --git a/CMake/cdat_modules/pnetcdf_deps.cmake b/CMake/cdat_modules/pnetcdf_deps.cmake deleted file mode 100644 index 9b1966cce1..0000000000 --- a/CMake/cdat_modules/pnetcdf_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PNETCDF_deps ${pkgconfig_pkg} ${mpi_pkg}) diff --git a/CMake/cdat_modules/pnetcdf_external.cmake b/CMake/cdat_modules/pnetcdf_external.cmake deleted file mode 100644 index 431348b850..0000000000 --- a/CMake/cdat_modules/pnetcdf_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(pnetcdf_source "${CMAKE_CURRENT_BINARY_DIR}/build/pnetcdf") -set(pnetcdf_install "${cdat_EXTERNALS}") -set(pnetcdf_configure_args "--with-mpi=${cdat_EXTERNALS}") -set(pnetcdf_additional_cflags "-fPIC") - -ExternalProject_Add(PNETCDF - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pnetcdf_source} - INSTALL_DIR ${pnetcdf_install} - URL ${PNETCDF_URL}/${PNETCDF_GZ} - URL_MD5 ${PNETCDF_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${pnetcdf_additional_cflags} -DINSTALL_DIR= -DWORKING_DIR= -D CONFIGURE_ARGS=${pnetcdf_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${PNETCDF_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/pnetcdf_pkg.cmake b/CMake/cdat_modules/pnetcdf_pkg.cmake deleted file mode 100644 index 02cf48e192..0000000000 --- a/CMake/cdat_modules/pnetcdf_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(PNETCDF_MAJOR_SRC 1) -set(PNETCDF_MINOR_SRC 6) -set(PNETCDF_PATCH_SRC 0) -set(PNETCDF_URL ${LLNL_URL}) -set(PNETCDF_GZ parallel-netcdf-${PNETCDF_MAJOR_SRC}.${PNETCDF_MINOR_SRC}.${PNETCDF_PATCH_SRC}.tar.gz) -set(PNETCDF_MD5 4893a50ddcd487a312c64383bdeb2631) - -set (nm PNETCDF) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(PNETCDF_SOURCE ${PNETCDF_URL}/${PNETCDF_GZ}) - -add_cdat_package(PNETCDF "" "" OFF) diff --git a/CMake/cdat_modules/png_deps.cmake b/CMake/cdat_modules/png_deps.cmake deleted file mode 100644 index 43bad1a488..0000000000 --- a/CMake/cdat_modules/png_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(png_deps ${pkgconfig_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/png_external.cmake b/CMake/cdat_modules/png_external.cmake deleted file mode 100644 index 3ba0b81a66..0000000000 --- a/CMake/cdat_modules/png_external.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# If Windows we use CMake otherwise ./configure -if(WIN32) - - set(png_source "${CMAKE_CURRENT_BINARY_DIR}/png") - set(png_binary "${CMAKE_CURRENT_BINARY_DIR}/png-build") - set(png_install "${cdat_EXTERNALS}") - - ExternalProject_Add(png - URL ${PNG_URL}/${PNG_GZ} - URL_MD5 ${PNG_MD5} - UPDATE_COMMAND "" - SOURCE_DIR ${png_source} - BINARY_DIR ${png_binary} - CMAKE_CACHE_ARGS - -DCMAKE_CXX_FLAGS:STRING=${pv_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${pv_tpl_c_flags} - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR} - ${pv_tpl_compiler_args} - -DZLIB_INCLUDE_DIR:STRING=${ZLIB_INCLUDE_DIR} - -DZLIB_LIBRARY:STRING=${ZLIB_LIBRARY} - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - DEPENDS ${png_dependencies} - ${ep_log_options} - ) - -else() - - set(png_source "${CMAKE_CURRENT_BINARY_DIR}/build/png") - set(png_install "${cdat_EXTERNALS}") - - ExternalProject_Add(png - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${png_source} - INSTALL_DIR ${png_install} - URL ${PNG_URL}/${PNG_GZ} - URL_MD5 ${PNG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/png/pngconf.h ${png_source}/pngconf.h - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${png_deps} - ${ep_log_options} - ) - -endif() diff --git a/CMake/cdat_modules/png_pkg.cmake b/CMake/cdat_modules/png_pkg.cmake deleted file mode 100644 index 5a9f1e1f46..0000000000 --- a/CMake/cdat_modules/png_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(PNG_MAJOR 1) -set(PNG_MINOR 4) -set(PNG_PATCH 1) -set(PNG_MAJOR_SRC 1) -set(PNG_MINOR_SRC 5) -set(PNG_PATCH_SRC 1) -set(PNG_VERSION ${PNG_MAJOR_SRC}.${PNG_MINOR_SRC}.${PNG_PATCH_SRC}) -set(PNG_URL ${LLNL_URL}) -set(PNG_GZ libpng-${PNG_VERSION}.tar.gz) -set(PNG_MD5 220035f111ea045a51e290906025e8b5) -set(PNG_SOURCE ${PNG_URL}/${PNG_GZ}) - -# Turns out grib2 (therefore cdms2 needs it so dont turn this off -add_cdat_package(png "" "" ON) diff --git a/CMake/cdat_modules/proj4_deps.cmake b/CMake/cdat_modules/proj4_deps.cmake deleted file mode 100644 index ec110453c5..0000000000 --- a/CMake/cdat_modules/proj4_deps.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(proj4_deps ${pkgconfig_pkg}) -if (CDAT_BUILD_PARALLEL) - list(APPEND proj4_deps ${mpi_pkg}) -endif() diff --git a/CMake/cdat_modules/proj4_external.cmake b/CMake/cdat_modules/proj4_external.cmake deleted file mode 100644 index 9bd122f5bb..0000000000 --- a/CMake/cdat_modules/proj4_external.cmake +++ /dev/null @@ -1,20 +0,0 @@ -set(proj4_source "${CMAKE_CURRENT_BINARY_DIR}/build/proj4") -set(proj4_install "${cdat_EXTERNALS}/proj4") -set(proj4_configure_args "") - -ExternalProject_Add(proj4 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${proj4_source} - INSTALL_DIR ${proj4_install} - BUILD_IN_SOURCE 1 - URL ${PROJ4_SOURCE} - URL_MD5 ${PROJ4_MD5} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${proj4_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file} - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS ${proj4_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_PROJ4) - unset(GIT_CMD_STR_PROJ4) -endif() diff --git a/CMake/cdat_modules/proj4_pkg.cmake b/CMake/cdat_modules/proj4_pkg.cmake deleted file mode 100644 index 8bf542f527..0000000000 --- a/CMake/cdat_modules/proj4_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(PROJ4_URL ${LLNL_URL}) -set(PROJ4_MAJOR_SRC 4) -set(PROJ4_MINOR_SRC 9) -set(PROJ4_PATCH_SRC 2) -set(PROJ4_GZ proj.4-${PROJ4_MAJOR_SRC}.${PROJ4_MINOR_SRC}.${PROJ4_PATCH_SRC}.tar.gz) -set(PROJ4_SOURCE ${PROJ4_URL}/${PROJ4_GZ}) -set(PROJ4_MD5 a6059d05592948d5f205ba432e359bd7) -if (CDAT_BUILD_ALL) - add_cdat_package(proj4 "" "" ON) -else() - add_cdat_package_dependent(proj4 "" "" ON "CDAT_BUILD_PROJ4" OFF) -endif() diff --git a/CMake/cdat_modules/pyasn1_deps.cmake b/CMake/cdat_modules/pyasn1_deps.cmake deleted file mode 100644 index bf438928fa..0000000000 --- a/CMake/cdat_modules/pyasn1_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYASN1_deps ${pip_pkg} ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/pyasn1_external.cmake b/CMake/cdat_modules/pyasn1_external.cmake deleted file mode 100644 index dd35ee1114..0000000000 --- a/CMake/cdat_modules/pyasn1_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm PYASN1) - -# Set LDFlags and CFlags to make it easier to find OpenSSL -list(APPEND USR_ENVS - "LDFLAGS=-L${OPENSSL_LIBRARY_DIR} $ENV{LDFLAGS}" - "CFLAGS=-I${OPENSSL_INCLUDE_DIR} $ENV{CFLAGS}" - ) - -include(pipinstaller) diff --git a/CMake/cdat_modules/pyasn1_pkg.cmake b/CMake/cdat_modules/pyasn1_pkg.cmake deleted file mode 100644 index ff69f7c518..0000000000 --- a/CMake/cdat_modules/pyasn1_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYASN1_MAJOR_SRC 0) -set(PYASN1_MINOR_SRC 1) -set(PYASN1_PATCH_SRC 9) - -set(PYASN1_VERSION ${PYASN1_MAJOR_SRC}.${PYASN1_MINOR_SRC}.${PYASN1_PATCH_SRC}) -set(PYASN1_GZ pyasn1-${PYASN1_VERSION}.tar.gz) -set(PYASN1_SOURCE ${LLNL_URL}/${PYASN1_GZ}) -set(PYASN1_MD5 f00a02a631d4016818659d1cc38d229a) - -add_cdat_package_dependent(PYASN1 "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pyclimate_deps.cmake b/CMake/cdat_modules/pyclimate_deps.cmake deleted file mode 100644 index ee5768752d..0000000000 --- a/CMake/cdat_modules/pyclimate_deps.cmake +++ /dev/null @@ -1,2 +0,0 @@ -set(pyclimate_deps ${numpy_pkg} ${pip_pkg}) - diff --git a/CMake/cdat_modules/pyclimate_external.cmake b/CMake/cdat_modules/pyclimate_external.cmake deleted file mode 100644 index 4fe52288f9..0000000000 --- a/CMake/cdat_modules/pyclimate_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ -# create an external project to install PyClimate -# and configure and build it -set(nm pyclimate) -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/pyclimate_pkg.cmake b/CMake/cdat_modules/pyclimate_pkg.cmake deleted file mode 100644 index e151f3cff3..0000000000 --- a/CMake/cdat_modules/pyclimate_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(PYCLIMATE_VERSION 1.2.3) -set(PYCLIMATE_URL ${LLNL_URL}) -set(PYCLIMATE_GZ PyClimate-${PYCLIMATE_VERSION}.tar.gz) -set(PYCLIMATE_SOURCE ${PYCLIMATE_URL}/${PYCLIMATE_GZ}) -set(PYCLIMATE_MD5 094ffd0adedc3ede24736e0c0ff1699f) - -if (CDAT_BUILD_ALL) - add_cdat_package(pyclimate "" "" ON) -else() - add_cdat_package(pyclimate "" "" OFF) -endif() diff --git a/CMake/cdat_modules/pycparser_deps.cmake b/CMake/cdat_modules/pycparser_deps.cmake deleted file mode 100644 index 3efd2d4eef..0000000000 --- a/CMake/cdat_modules/pycparser_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYCPARSER_deps ${python_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/pycparser_external.cmake b/CMake/cdat_modules/pycparser_external.cmake deleted file mode 100644 index f9b317f4f1..0000000000 --- a/CMake/cdat_modules/pycparser_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm PYCPARSER) - -include(pipinstaller) diff --git a/CMake/cdat_modules/pycparser_pkg.cmake b/CMake/cdat_modules/pycparser_pkg.cmake deleted file mode 100644 index 873a293dde..0000000000 --- a/CMake/cdat_modules/pycparser_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYCPARSER_MAJOR_SRC 2) -set(PYCPARSER_MINOR_SRC 13) -set(PYCPARSER_PATCH_SRC ) - -set(PYCPARSER_VERSION ${PYCPARSER_MAJOR_SRC}.${PYCPARSER_MINOR_SRC}) -set(PYCPARSER_GZ pycparser-${PYCPARSER_VERSION}.tar.gz) -set(PYCPARSER_SOURCE ${LLNL_URL}/${PYCPARSER_GZ}) -set(PYCPARSER_MD5 e4fe1a2d341b22e25da0d22f034ef32f ) - -add_cdat_package_dependent(PYCPARSER "" "" ON "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pyflakes_deps.cmake b/CMake/cdat_modules/pyflakes_deps.cmake deleted file mode 100644 index 14a40726d7..0000000000 --- a/CMake/cdat_modules/pyflakes_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pyflakes_deps ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/pyflakes_external.cmake b/CMake/cdat_modules/pyflakes_external.cmake deleted file mode 100644 index 40a4774f73..0000000000 --- a/CMake/cdat_modules/pyflakes_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ -ExternalProject_Add(pyflakes - DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}" - SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/pyflakes" - URL "${PYFLAKES_SOURCE}" - URL_MD5 ${PYFLAKES_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build - INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${pyflakes_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pyflakes_pkg.cmake b/CMake/cdat_modules/pyflakes_pkg.cmake deleted file mode 100644 index a83f881c76..0000000000 --- a/CMake/cdat_modules/pyflakes_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(nm pyflakes) -string(TOUPPER ${nm} uc_nm) - -set(${uc_nm}_MAJOR 0) -set(${uc_nm}_MINOR 8) -set(${uc_nm}_PATCH 1) -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_URL ${LLNL_URL}) -set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz) -set(${uc_nm}_MD5 905fe91ad14b912807e8fdc2ac2e2c23 ) - -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ}) - -if(BUILD_TESTING) - add_cdat_package(${nm} "" "" ON) -endif() diff --git a/CMake/cdat_modules/pygments_deps.cmake b/CMake/cdat_modules/pygments_deps.cmake deleted file mode 100644 index 8da947cd9c..0000000000 --- a/CMake/cdat_modules/pygments_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pygments_deps ${pip_pkg} ${lepl_pkg}) diff --git a/CMake/cdat_modules/pygments_external.cmake b/CMake/cdat_modules/pygments_external.cmake deleted file mode 100644 index 225a8b1600..0000000000 --- a/CMake/cdat_modules/pygments_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm pygments) - -include(pipinstaller) diff --git a/CMake/cdat_modules/pygments_pkg.cmake b/CMake/cdat_modules/pygments_pkg.cmake deleted file mode 100644 index 7bd13b2b5d..0000000000 --- a/CMake/cdat_modules/pygments_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(pygments_MAJOR_SRC 1) -set(pygments_MINOR_SRC 6) -set(pygments_PATCH_SRC ) -set(PYGMENTS_VERSION ${pygments_MAJOR_SRC}.${pygments_MINOR_SRC}) -set(PYGMENTS_GZ Pygments-${PYGMENTS_VERSION}.tar.gz) -set(PYGMENTS_SOURCE ${LLNL_URL}/${PYGMENTS_GZ}) -set(PYGMENTS_MD5 a18feedf6ffd0b0cc8c8b0fbdb2027b1 ) - -add_cdat_package_dependent(pygments "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/pylibxml2_deps.cmake b/CMake/cdat_modules/pylibxml2_deps.cmake deleted file mode 100644 index 8ff91e2465..0000000000 --- a/CMake/cdat_modules/pylibxml2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYLIBXML2_deps ${pkgconfig_pkg} ${python_pkg} ${libxml2_pkg} ${libxslt_pkg}) diff --git a/CMake/cdat_modules/pylibxml2_external.cmake b/CMake/cdat_modules/pylibxml2_external.cmake deleted file mode 100644 index f3d77fd17d..0000000000 --- a/CMake/cdat_modules/pylibxml2_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ - -set(libXML2_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXML2") -set(libXML2_install "${cdat_EXTERNALS}") - -ExternalProject_Add(PYLIBXML2 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libXML2_source} - INSTALL_DIR ${libXML2_install} - URL ${XML_URL}/${XML_GZ} - URL_MD5 ${XML_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${PYLIBXML2_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/pylibxml2_pkg.cmake b/CMake/cdat_modules/pylibxml2_pkg.cmake deleted file mode 100644 index e374227f78..0000000000 --- a/CMake/cdat_modules/pylibxml2_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(PYLIBXML2_MAJOR 2) -set(PYLIBXML2_MINOR 7) -set(PYLIBXML2_PATCH 8) -set(PYLIBXML2_MAJOR_SRC 2) -set(PYLIBXML2_MINOR_SRC 7) -set(PYLIBXML2_PATCH_SRC 8) -set(PYLIBXML2_URL ${LLNL_URL}) -set(PYLIBXML2_GZ libxml2-${PYLIBXML2_MAJOR_SRC}.${PYLIBXML2_MINOR_SRC}.${PYLIBXML2_PATCH_SRC}.tar.gz) -set(PYLIBXML2_MD5 8127a65e8c3b08856093099b52599c86) -set(PYLIBXML2_SOURCE ${PYLIBXML2_URL}/${PYLIBXML2_GZ}) - -set (nm PYLIBXML2) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package(PYLIBXML2 "" "" OFF) diff --git a/CMake/cdat_modules/pynetcdf4_deps.cmake b/CMake/cdat_modules/pynetcdf4_deps.cmake deleted file mode 100644 index 176f1cd996..0000000000 --- a/CMake/cdat_modules/pynetcdf4_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pynetcdf4_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${netcdf_pkg} ${numpy_pkg}) diff --git a/CMake/cdat_modules/pynetcdf4_external.cmake b/CMake/cdat_modules/pynetcdf4_external.cmake deleted file mode 100644 index f430b9e16b..0000000000 --- a/CMake/cdat_modules/pynetcdf4_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ -# create an external project to install pynetcdf -# and configure and build it - -# pynetcdf4 -# -set(pynetcdf4_source "${CMAKE_CURRENT_BINARY_DIR}/build/pynetcdf4") - -ExternalProject_Add(pynetcdf4 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pynetcdf4_source} - URL ${PYNETCDF4_URL}/${PYNETCDF4_GZ} - URL_MD5 ${PYNETCDF4_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND env "NETCDF4_DIR=${cdat_EXTERNALS}" "PYTHONPATH=$ENV{PYTHONPATH}" ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env "NETCDF4_DIR=${cdat_EXTERNALS}" "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${pynetcdf4_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/pynetcdf4_pkg.cmake b/CMake/cdat_modules/pynetcdf4_pkg.cmake deleted file mode 100644 index ebefb167b2..0000000000 --- a/CMake/cdat_modules/pynetcdf4_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set( PYNETCDF4_MAJOR_SRC 1 ) -set( PYNETCDF4_MINOR_SRC 1 ) -set( PYNETCDF4_PATCH_SRC 9 ) -set(PYNETCDF4_URL ${LLNL_URL}) -set(PYNETCDF4_GZ - netCDF4-${PYNETCDF4_MAJOR_SRC}.${PYNETCDF4_MINOR_SRC}.${PYNETCDF4_PATCH_SRC}.tar.gz) -set(PYNETCDF4_MD5 4ee7399e547f8b906e89da5529fa5ef4) -set(PYNETCDF4_SOURCE ${PYNETCDF4_URL}/${PYNETCDF4_GZ}) - -set (nm pynetcdf4) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package(pynetcdf4 "" "" ON) diff --git a/CMake/cdat_modules/pyopengl_external.cmake b/CMake/cdat_modules/pyopengl_external.cmake deleted file mode 100644 index daf68bf40c..0000000000 --- a/CMake/cdat_modules/pyopengl_external.cmake +++ /dev/null @@ -1,29 +0,0 @@ - -set(PyOpenGL_source "${CMAKE_CURRENT_BINARY_DIR}/PyOpenGL") -set(PyOpenGL_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyopengl_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyopengl_make_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyopengl_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyopengl_install_step.cmake - @ONLY) - -set(PyOpenGL_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyopengl_make_step.cmake) -set(PyOpenGL_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyopengl_install_step.cmake) - -ExternalProject_Add(PyOpenGL - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${PyOpenGL_source} - URL ${PYOPENGL_URL}/${PYOPENGL_GZ} - URL_MD5 ${PYOPENGL_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PyOpenGL_build_command} - INSTALL_COMMAND ${PyOpenGL_install_command} - DEPENDS ${PyOpenGL_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/pyopenssl_deps.cmake b/CMake/cdat_modules/pyopenssl_deps.cmake deleted file mode 100644 index 6ab54642e1..0000000000 --- a/CMake/cdat_modules/pyopenssl_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYOPENSSL_deps ${python_pkg} ${pip_pkg} ${six_pkg} ${cryptography_pkg} ${cffi_pkg} ${ffi_pkg} ${pycparser_pkg} ) diff --git a/CMake/cdat_modules/pyopenssl_external.cmake b/CMake/cdat_modules/pyopenssl_external.cmake deleted file mode 100644 index c0ed4c2386..0000000000 --- a/CMake/cdat_modules/pyopenssl_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm PYOPENSSL) - -include(pipinstaller) diff --git a/CMake/cdat_modules/pyopenssl_pkg.cmake b/CMake/cdat_modules/pyopenssl_pkg.cmake deleted file mode 100644 index d9e4d4bc4d..0000000000 --- a/CMake/cdat_modules/pyopenssl_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYOPENSSL_MAJOR_SRC 0) -set(PYOPENSSL_MINOR_SRC 14) -set(PYOPENSSL_PATCH_SRC 0) - -set(PYOPENSSL_VERSION ${PYOPENSSL_MAJOR_SRC}.${PYOPENSSL_MINOR_SRC}) -set(PYOPENSSL_GZ pyOpenSSL-${PYOPENSSL_VERSION}.tar.gz) -set(PYOPENSSL_SOURCE ${LLNL_URL}/${PYOPENSSL_GZ}) -set(PYOPENSSL_MD5 8579ff3a1d858858acfba5f046a4ddf7) - -add_cdat_package_dependent(PYOPENSSL "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pyparsing_deps.cmake b/CMake/cdat_modules/pyparsing_deps.cmake deleted file mode 100644 index 79eea79134..0000000000 --- a/CMake/cdat_modules/pyparsing_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYPARSING_deps ${python_pkg} ${pip_pkg} ${dateutils_pkg} ${six_pkg}) diff --git a/CMake/cdat_modules/pyparsing_external.cmake b/CMake/cdat_modules/pyparsing_external.cmake deleted file mode 100644 index b728628520..0000000000 --- a/CMake/cdat_modules/pyparsing_external.cmake +++ /dev/null @@ -1,7 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm PYPARSING) -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/pyparsing_pkg.cmake b/CMake/cdat_modules/pyparsing_pkg.cmake deleted file mode 100644 index 8c6e265e02..0000000000 --- a/CMake/cdat_modules/pyparsing_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYPARSING_MAJOR_SRC 2) -set(PYPARSING_MINOR_SRC 0) -set(PYPARSING_PATCH_SRC 2) - -set(PYPARSING_VERSION ${PYPARSING_MAJOR_SRC}.${PYPARSING_MINOR_SRC}.${PYPARSING_PATCH_SRC}) -set(PYPARSING_GZ pyparsing-${PYPARSING_VERSION}.tar.gz) -set(PYPARSING_SOURCE ${LLNL_URL}/${PYPARSING_GZ}) -set(PYPARSING_MD5 b170c5d153d190df1a536988d88e95c1) - -add_cdat_package_dependent(PYPARSING "" "" OFF "NOT CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pyqt_deps.cmake b/CMake/cdat_modules/pyqt_deps.cmake deleted file mode 100644 index 023e6753a8..0000000000 --- a/CMake/cdat_modules/pyqt_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PyQt_deps ${pkgconfig_pkg} ${qt_pkg} ${sip_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/pyqt_external.cmake b/CMake/cdat_modules/pyqt_external.cmake deleted file mode 100644 index 5a00060e98..0000000000 --- a/CMake/cdat_modules/pyqt_external.cmake +++ /dev/null @@ -1,28 +0,0 @@ -set(PyQt_source "${CMAKE_CURRENT_BINARY_DIR}/build/PyQt") -set(PyQt_configure_command - env PYTHONPATH=${PYTHONPATH} - "${PYTHON_EXECUTABLE}" configure.py - -q "${QT_QMAKE_EXECUTABLE}" - --confirm-license - -b "${CMAKE_INSTALL_PREFIX}/bin" - -d "${PYTHON_SITE_PACKAGES}" - -v "${CMAKE_INSTALL_PREFIX}/include" - -v "${CMAKE_INSTALL_PREFIX}/share" - -p "${CMAKE_INSTALL_PREFIX}/share/plugins" - -n "${CMAKE_INSTALL_PREFIX}/share/qsci" - --assume-shared - -e QtGui -e QtHelp -e QtMultimedia -e QtNetwork -e QtDeclarative -e QtOpenGL - -e QtScript -e QtScriptTools -e QtSql -e QtSvg -e QtTest -e QtWebKit - -e QtXml -e QtXmlPatterns -e QtCore -) - -ExternalProject_Add(PyQt - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${PyQt_source} - URL ${PYQT_URL}/${PYQT_GZ_${CMAKE_PLATFORM}} - URL_MD5 ${PYQT_MD5_${CMAKE_PLATFORM}} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${PyQt_configure_command} - DEPENDS ${PyQt_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pyqt_pkg.cmake b/CMake/cdat_modules/pyqt_pkg.cmake deleted file mode 100644 index a049bc7281..0000000000 --- a/CMake/cdat_modules/pyqt_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(PYQT_MAJOR 4) -set(PYQT_MINOR 8) -set(PYQT_PATCH 3) -set(PYQT_MAJOR_SRC 4) -set(PYQT_MINOR_SRC 11) -set(PYQT_PATCH_SRC 3) -set(PYQT_VERSION ${PYQT_MAJOR_SRC}.${PYQT_MINOR_SRC}.${PYQT_PATCH_SRC}) -set(PYQT_URL ${LLNL_URL}) -set(PYQT_GZ_APPLE PyQt-mac-gpl-${PYQT_VERSION}.tar.gz) -set(PYQT_GZ_UNIX PyQt-x11-gpl-${PYQT_VERSION}.tar.gz) -set(PYQT_MD5_APPLE 9bd050f1d0c91510ea8be9f41878144c ) -set(PYQT_MD5_UNIX 997c3e443165a89a559e0d96b061bf70 ) -set(PYQT_SOURCE ${PYQT_URL}/${PYQT_GZ_${CMAKE_PLATFORM}}) -set(PYQT_MD5 ${PYQT_MD5_${CMAKE_PLATFORM}}) - -add_cdat_package_dependent(PyQt "" "" ON "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/pyspharm_deps.cmake b/CMake/cdat_modules/pyspharm_deps.cmake deleted file mode 100644 index 181e0c2716..0000000000 --- a/CMake/cdat_modules/pyspharm_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pyspharm_deps ${numpy_pkg}) diff --git a/CMake/cdat_modules/pyspharm_external.cmake b/CMake/cdat_modules/pyspharm_external.cmake deleted file mode 100644 index 2c1de4f91c..0000000000 --- a/CMake/cdat_modules/pyspharm_external.cmake +++ /dev/null @@ -1,25 +0,0 @@ - -# Pyspharm -# -set(pyspharm_source "${CMAKE_CURRENT_BINARY_DIR}/build/pyspharm") - - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyspharm_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyspharm_patch_step.cmake - @ONLY) - -set(pyspharm_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyspharm_patch_step.cmake) - -ExternalProject_Add(pyspharm - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pyspharm_source} - URL ${PYSPHARM_URL}/${PYSPHARM_GZ} - URL_MD5 ${PYSPHARM_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${pyspharm_PATCH_COMMAND} - CONFIGURE_COMMAND "" - BUILD_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py install --prefix=${PYTHON_SITE_PACKAGES_PREFIX} - DEPENDS ${pyspharm_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pyspharm_pkg.cmake b/CMake/cdat_modules/pyspharm_pkg.cmake deleted file mode 100644 index c7e8eb166d..0000000000 --- a/CMake/cdat_modules/pyspharm_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(PYSPHARM_MAJOR 1) -set(PYSPHARM_MINOR 0) -set(PYSPHARM_PATCH 8) -set(PYSPHARM_VERSION ${PYSPHARM_MAJOR}.${PYSPHARM_MINOR}.${PYSPHARM_PATCH}) -set(PYSPHARM_URL ${LLNL_URL}) -set(PYSPHARM_GZ pyspharm-${PYSPHARM_VERSION}.tar.gz) -set(PYSPHARM_MD5 7b3a33dd3cbeaa4b8bf67ed5bd210931) -set(PYSPHARM_SOURCE ${PYSPHARM_URL}/${PYSPHARM_GZ}) - -add_cdat_package_dependent(pyspharm "" "" ${CDAT_BUILD_ALL} - "NOT CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pytables_deps.cmake b/CMake/cdat_modules/pytables_deps.cmake deleted file mode 100644 index d446177733..0000000000 --- a/CMake/cdat_modules/pytables_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PyTables_deps ${pkgconfig_pkg} ${python_pkg} ${cython_pkg} ${numexpr_pkg} ${numpy_pkg} ${hdf5_pkg} ${libxml2_pkg} ${libxslt_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/pytables_external.cmake b/CMake/cdat_modules/pytables_external.cmake deleted file mode 100644 index 42e7c60745..0000000000 --- a/CMake/cdat_modules/pytables_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ -include(${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake) - -# PyTables -# -set(PyTables_source "${CMAKE_CURRENT_BINARY_DIR}/build/PyTables") -ExternalProject_Add(PyTables - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${PyTables_source} - URL ${PYTABLES_URL}/${PYTABLES_GZ} - URL_MD5 ${PYTABLES_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}" PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py build --hdf5=${cdat_EXTERNALS} - INSTALL_COMMAND env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}" PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py install --hdf5=${cdat_EXTERNALS} ${PYTHON_EXTRA_PREFIX} - DEPENDS ${PyTables_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pytables_pkg.cmake b/CMake/cdat_modules/pytables_pkg.cmake deleted file mode 100644 index 22faad22ad..0000000000 --- a/CMake/cdat_modules/pytables_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYTABLES_MAJOR 3) -set(PYTABLES_MINOR 1) -set(PYTABLES_PATCH 1) -set(PYTABLES_VERSION ${PYTABLES_MAJOR}.${PYTABLES_MINOR}.${PYTABLES_PATCH}) -set(PYTABLES_URL ${LLNL_URL} ) -set(PYTABLES_GZ tables-${PYTABLES_VERSION}.tar.gz) -set(PYTABLES_MD5 38d917f0c6dfb0bc28ce9ea0c3492524) -set(PYTABLES_SOURCE ${PYTABLES_URL}/${PYTABLES_GZ}) - -add_cdat_package_dependent(PyTables "" "" OFF "NOT CDAT_BUILD_LEAN" ${CDAT_BUILD_ALL}) diff --git a/CMake/cdat_modules/python_deps.cmake b/CMake/cdat_modules/python_deps.cmake deleted file mode 100644 index 04864b10c4..0000000000 --- a/CMake/cdat_modules/python_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Python_deps ${pkgconfig_pkg} ${readline_pkg} ${libxml2_pkg} ${libxslt_pkg} ${jpeg_pkg} ${png_pkg} ${tiff_pkg}) diff --git a/CMake/cdat_modules/python_external.cmake b/CMake/cdat_modules/python_external.cmake deleted file mode 100644 index 0710a06630..0000000000 --- a/CMake/cdat_modules/python_external.cmake +++ /dev/null @@ -1,66 +0,0 @@ -#----------------------------------------------------------------------------- -set(proj Python) - -set(python_SOURCE_DIR ${cdat_BINARY_DIR}/build/Python) -set(python_BUILD_IN_SOURCE 1) - -set(python_aqua_cdat no) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake - @ONLY) - -set(python_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_patch_step.cmake) - -if(APPLE) - set(python_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake) - set(python_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake) - set(python_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake) -else() - set(python_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake) - set(python_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake) - set(python_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake) -endif() - -ExternalProject_Add(${proj} - URL ${PYTHON_URL}/${PYTHON_GZ} - URL_MD5 ${PYTHON_MD5} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${python_SOURCE_DIR} - BUILD_IN_SOURCE ${python_BUILD_IN_SOURCE} - UPDATE_COMMAND pwd - CONFIGURE_COMMAND ${python_CONFIGURE_COMMAND} - BUILD_COMMAND ${python_BUILD_COMMAND} - INSTALL_COMMAND ${python_INSTALL_COMMAND} - DEPENDS ${Python_deps} - ${ep_log_options} -) - -#----------------------------------------------------------------------------- -# Set PYTHON_INCLUDE and PYTHON_LIBRARY variables -# -set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages) - -if(APPLE) - ExternalProject_Add_Step(${proj} change_plist_name - COMMAND ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/python ${cdat_CMAKE_SOURCE_DIR}/fixName.py - DEPENDEES install - ) - set(PYTHON_INCLUDE ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/Headers) - set(PYTHON_LIBRARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/Python) - set(PYTHON_LIBRARY_DIR ${CMAKE_INSTALL_PREFIX}/lib) - set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/bin/python) - #set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}/bin/python) - set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/lib/python${PYVER}/site-packages) -else() - set(PYTHON_INCLUDE ${CMAKE_INSTALL_PREFIX}/include/python${PYVER}) - set(PYTHON_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libpython${PYVER}.so) -endif() diff --git a/CMake/cdat_modules/python_pkg.cmake b/CMake/cdat_modules/python_pkg.cmake deleted file mode 100644 index 36c97d702d..0000000000 --- a/CMake/cdat_modules/python_pkg.cmake +++ /dev/null @@ -1,59 +0,0 @@ -set(PYTHON_MAJOR_SRC 2) -set(PYTHON_MINOR_SRC 7) -set(PYTHON_PATCH_SRC 11) -set(PYTHON_VERSION ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}.${PYTHON_PATCH_SRC}) -set(PYTHON_URL ${LLNL_URL}) -set(PYTHON_GZ Python-${PYTHON_VERSION}.tgz) -set(PYTHON_MD5 6b6076ec9e93f05dd63e47eb9c15728b ) -set(PYVER ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}) -set(PYTHON_SOURCE ${PYTHON_URL}/${PYTHON_GZ}) - -add_cdat_package(Python ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}.${PYTHON_PATCH_SRC} "" "") - -# FIXME: Name style -set(CDAT_OS_XTRA_PATH "") - -set(PYTHON_SITE_PACKAGES_PREFIX ${CMAKE_INSTALL_PREFIX}) -if (APPLE) - set(PYTHON_SITE_PACKAGES_PREFIX ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}) -endif() -set(PYTHON_SITE_PACKAGES ${PYTHON_SITE_PACKAGES_PREFIX}/lib/python${PYVER}/site-packages) -set(PYTHONPATH ${PYTHON_SITE_PACKAGES}) - -if (CDAT_USE_SYSTEM_PYTHON) - find_package(PythonInterp) - set(PYVER ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}) - # \NOTE This is required or else FindPythonLibs may find whatever version is - # listed first internally and if that version exists on the system. For example - # a system might have python version 2.6 and 2.7 both installed. - set(Python_ADDITIONAL_VERSIONS ${PYVER}) - find_package(PythonLibs) - set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages) - message("[INFO] Using system python ${PYTHON_EXECUTABLE}") - message("[INFO] Putting packages in directory ${PYTHON_SITE_PACKAGES}") - set(PYTHON_EXTRA_PREFIX "--prefix=${CMAKE_INSTALL_PREFIX}") - message("[INFO] Setting up prefix for installing python packages into: ${PYTHON_EXTRA_PREFIX}") - set(ENV{LD_LIBRARY_PATH} $ENV{LD_LIBRARY_PATH}) - set(PYTHONPATH "${PYTHON_SITE_PACKAGES}:$ENV{PYTHONPATH}") - set(ENV{PYTHONPATH} "${PYTHONPATH}") - message("[INFO] Set PYTHONPATH to $ENV{PYTHONPATH}") - get_filename_component(PYTHON_EXECUTABLE_PATH ${PYTHON_EXECUTABLE} PATH) - set(PYTHON_LIBRARY ${PYTHON_LIBRARIES}) - message("[INFO] set PYTHON_LIBRARY TO" ${PYTHON_LIBRARY}) - set(PYTHON_INCLUDE ${PYTHON_INCLUDE_DIRS}) - if(APPLE) - set(CDAT_OS_XTRA_PATH ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin) - endif() -else () - set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/bin/python) - message("[INFO] Building python at ${PYTHON_EXECUTABLE}") - set(PYTHON_EXTRA_PREFIX "") - set(PYVER 2.7) - if (NOT APPLE) - set(EASY_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/bin/easy_install) - set(PIP_BINARY ${CMAKE_INSTALL_PREFIX}/bin/pip) - else () - set(EASY_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/easy_install) - set(PIP_BINARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/pip) - endif() -endif() diff --git a/CMake/cdat_modules/pyzmq_deps.cmake b/CMake/cdat_modules/pyzmq_deps.cmake deleted file mode 100644 index 507fc11800..0000000000 --- a/CMake/cdat_modules/pyzmq_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pyzmq_deps ${zmq_pkg} ${cython_pkg}) diff --git a/CMake/cdat_modules/pyzmq_external.cmake b/CMake/cdat_modules/pyzmq_external.cmake deleted file mode 100644 index e931ce77e8..0000000000 --- a/CMake/cdat_modules/pyzmq_external.cmake +++ /dev/null @@ -1,50 +0,0 @@ -# The pyzmq project - -set(pyzmq_binary "${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq") - -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyzmq_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyzmq_configure_step.cmake @ONLY) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyzmq_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyzmq_install_step.cmake @ONLY) - -set(pyzmq_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyzmq_configure_step.cmake) -set(pyzmq_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyzmq_install_step.cmake) - -set(pyzmq_source "${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq") - -# create an external project to download numpy, -# and configure and build it -ExternalProject_Add(pyzmq - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pyzmq_source} - BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq - URL ${PYZMQ_SOURCE} - URL_MD5 ${PYZMQ_MD5} - CONFIGURE_COMMAND ${pyzmq_CONFIGURE_COMMAND} - BUILD_COMMAND "" - UPDATE_COMMAND "" - INSTALL_COMMAND ${pyzmq_INSTALL_COMMAND} - DEPENDS - ${pyzmq_deps} - ${ep_log_options} - ) - -# pyzmq -# - -#ExternalProject_Add(pyzmq -# DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR} -# SOURCE_DIR ${pyzmq_source} -# URL ${PYZMQ_URL}/${PYZMQ_GZ} -# URL_MD5 ${PYZMQ_MD5} -# BUILD_IN_SOURCE 1 -# CONFIGURE_COMMAND "" -# BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build -# INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} -# DEPENDS ${pyzmq_deps} -# ${ep_log_options} -# ) diff --git a/CMake/cdat_modules/pyzmq_pkg.cmake b/CMake/cdat_modules/pyzmq_pkg.cmake deleted file mode 100644 index dd5f0fa461..0000000000 --- a/CMake/cdat_modules/pyzmq_pkg.cmake +++ /dev/null @@ -1,7 +0,0 @@ -set(PYZMQ_VERSION 14.3.1) -set(PYZMQ_URL ${LLNL_URL}) -set(PYZMQ_GZ pyzmq-${PYZMQ_VERSION}.tar.gz) -set(PYZMQ_MD5 7196b4a6fbf98022f17ffa924be3d68d) -set(PYZMQ_SOURCE ${PYZMQ_URL}/${PYZMQ_GZ}) - -add_cdat_package(pyzmq "" "" OFF) diff --git a/CMake/cdat_modules/qt4_deps.cmake b/CMake/cdat_modules/qt4_deps.cmake deleted file mode 100644 index 8b13789179..0000000000 --- a/CMake/cdat_modules/qt4_deps.cmake +++ /dev/null @@ -1 +0,0 @@ - diff --git a/CMake/cdat_modules/qt4_pkg.cmake b/CMake/cdat_modules/qt4_pkg.cmake deleted file mode 100644 index ee6057e896..0000000000 --- a/CMake/cdat_modules/qt4_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -if (CDAT_BUILD_GRAPHICS) - find_package(Qt4 4.7.2 REQUIRED) - - if (CDAT_BUILD_GUI) - if (NOT DEFINED QT_QTOPENGL_INCLUDE_DIR) - message(FATAL_ERROR "QT_QTOPENGL_INCLUDE_DIR is not set but required") - endif() - endif() -endif() - diff --git a/CMake/cdat_modules/qt_external.cmake b/CMake/cdat_modules/qt_external.cmake deleted file mode 100644 index 86085efc6a..0000000000 --- a/CMake/cdat_modules/qt_external.cmake +++ /dev/null @@ -1,66 +0,0 @@ - -set(qt_source "${CMAKE_CURRENT_BINARY_DIR}/build/Qt") -set(qt_install_dir "${cdat_EXTERNALS}") - -if(WIN32) - # if jom is in the path use it as it will be faster - find_program(JOM jom) - mark_as_advanced(JOM) - if(JOM) - set(qt_build_program "${JOM}") - else() - set(qt_build_program nmake) - endif() - set(qt_install_dir ${qt_source}) - configure_file(${Titan_CMAKE_DIR}/win_config_qt.cmake.in - ${CMAKE_CURRENT_BINARY_DIR}/win_config_qt.cmake ) - set(qt_configure ${CMAKE_COMMAND} - -P ${CMAKE_CURRENT_BINARY_DIR}/win_config_qt.cmake) - set(qt_build ${qt_build_program}) - set(qt_install "") -else() - set(qt_configure echo yes | sh configure --prefix=${qt_install_dir} -release - -nomake examples -nomake demos -no-audio-backend -no-multimedia - -phonon -opensource) - if ("-m32" STREQUAL "${CMAKE_CXX_FLAGS}") - set(qt_configure echo yes | sh ./configure -release - -nomake examples -nomake demos -no-audio-backend -no-multimedia - --prefix=${qt_install_dir} -opensource - -platform linux-g++-32) - endif () - set(qt_build ${MAKE}) - set(qt_install make install) - if(APPLE) - exec_program(${CMAKE_C_COMPILER} ARGS --version OUTPUT_VARIABLE - _gcc_version_info) - string (REGEX MATCH "[345]\\.[0-9]\\.[0-9]" - _gcc_version "${_gcc_version_info}") - if(NOT _gcc_version) - string (REGEX REPLACE ".*\\(GCC\\).* ([34]\\.[0-9]) .*" "\\1.0" - _gcc_version "${_gcc_version_info}") - endif() - if(${_gcc_version} VERSION_GREATER 4.2.0) - # Then Qt should be built 64 bit - message(STATUS "Building 64 bit Qt using cocoa.") - set(qt_configure ${qt_configure} -arch x86_64 -cocoa) - else() - # Then Qt should be built 32 bit - message(STATUS "Building 32 bit Qt using carbon.") - set(qt_configure ${qt_configure} -arch x86 -carbon) - endif() - endif() -endif() - -ExternalProject_Add(Qt - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - URL ${QT_URL}/${QT_GZ} - URL_MD5 ${QT_MD5} - SOURCE_DIR ${qt_source} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${qt_configure} - DEPENDS ${Qt_deps} - ) - -set(QT_QMAKE_EXECUTABLE "${qt_install_dir}/bin/qmake" - CACHE FILEPATH "Path to qmake executable" FORCE) - diff --git a/CMake/cdat_modules/r_deps.cmake b/CMake/cdat_modules/r_deps.cmake deleted file mode 100644 index a7016962f4..0000000000 --- a/CMake/cdat_modules/r_deps.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(R_deps ${readline_pkg}) -if (CDAT_BUILD_PARALLEL) - list(APPEND R_deps ${mpi_pkg}) -endif() diff --git a/CMake/cdat_modules/r_external.cmake b/CMake/cdat_modules/r_external.cmake deleted file mode 100644 index af1d2d3111..0000000000 --- a/CMake/cdat_modules/r_external.cmake +++ /dev/null @@ -1,51 +0,0 @@ - -set(R_source "${CMAKE_CURRENT_BINARY_DIR}/build/R") -set(R_install "${cdat_EXTERNALS}") -if (APPLE) - message("[INFO] Building R without X support for MacOS") - set(WITHX "no") - set(WITH_AQUA "yes") -else () - set(WITHX "yes") - set(WITH_AQUA "no") -endif() - -if (CDAT_BUILD_PARALLEL) - message([INFO] Enabling openmp for R) - set(R_OPENMP "--enable-openmp") -else () - message([INFO] Disabling openmp for R) - set(R_OPENMP "--disable-openmp") -endif () - -list(APPEND USR_ENVS - "CPPFLAGS=-I${cdat_EXTERNALS}/include $ENV{CPPFLAGS}" - "LDFLAGS=-L${cdat_EXTERNALS}/lib" - ) -ExternalProject_Add(R - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${R_source} - INSTALL_DIR ${R_install} - URL ${R_URL}/${R_GZ} - URL_MD5 ${R_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - DEPENDS ${R_deps} - CONFIGURE_COMMAND env ${USR_ENVS} /configure --prefix= LIBnn=lib --without-jpeglib --disable-R-framework --enable-R-shlib ${R_OPENMP} --without-cairo --without-ICU --without-system-xz --with-aqua=${WITH_AQUA} --without-tcltk --with-x=${WITHX} - INSTALL_COMMAND ${CMAKE_MAKE_PROGRAM} install - ${ep_log_options} -) -if(APPLE) - #change id and then change dependencies.. - ExternalProject_Add_Step(R InstallNameToolR - COMMAND install_name_tool -id ${R_install}/lib/R/lib/libR.dylib ${R_install}/lib/R/lib/libR.dylib - COMMAND install_name_tool -id ${R_install}/lib/R/lib/libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib - COMMAND install_name_tool -id ${R_install}/lib/R/lib/libRlapack.dylib ${R_install}/lib/R/lib/libRlapack.dylib - COMMAND install_name_tool -change libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib ${R_install}/lib/R/lib/libR.dylib - COMMAND install_name_tool -change libR.dylib ${R_install}/lib/R/lib/libR.dylib -change libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib ${R_install}//lib/R/lib/libRlapack.dylib - DEPENDEES install - WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}) -endif(APPLE) - -set(R_DIR "${R_binary}" CACHE PATH "R binary directory" FORCE) -mark_as_advanced(R_DIR) diff --git a/CMake/cdat_modules/r_pkg.cmake b/CMake/cdat_modules/r_pkg.cmake deleted file mode 100644 index 8f7e53eb48..0000000000 --- a/CMake/cdat_modules/r_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(R_MAJOR_SRC 3) -set(R_MINOR_SRC 2) -set(R_PATCH_SRC 2) -set(R_URL ${LLNL_URL}) -set(R_GZ R-${R_MAJOR_SRC}.${R_MINOR_SRC}.${R_PATCH_SRC}.tar.gz) -set(R_MD5 57cef5c2e210a5454da1979562a10e5b) -set(R_SOURCE ${R_URL}/${R_GZ}) - -set (nm R) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package_dependent(R "" "Build R" ${CDAT_BUILD_ALL} - "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/readline_deps.cmake b/CMake/cdat_modules/readline_deps.cmake deleted file mode 100644 index e347b6dfb4..0000000000 --- a/CMake/cdat_modules/readline_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(readline_deps ${pkgconfig_pkg} ${curses_pkg}) diff --git a/CMake/cdat_modules/readline_external.cmake b/CMake/cdat_modules/readline_external.cmake deleted file mode 100644 index 212f96171a..0000000000 --- a/CMake/cdat_modules/readline_external.cmake +++ /dev/null @@ -1,31 +0,0 @@ -set(readline_source "${CMAKE_CURRENT_BINARY_DIR}/build/readline") -set(readline_install "${cdat_EXTERNALS}") -set(readline_conf_args) - -set(readline_conf_args "--with-curses;--disable-static;--enable-shared") -# with -fPIC -IF(UNIX AND NOT WIN32) - FIND_PROGRAM(CMAKE_UNAME uname /bin /usr/bin /usr/local/bin ) - IF(CMAKE_UNAME) - EXEC_PROGRAM(uname ARGS -m OUTPUT_VARIABLE CMAKE_SYSTEM_PROCESSOR) - SET(CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR} CACHE INTERNAL -"processor type (i386 and x86_64)") - IF(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64") - set(readline_conf_args "CFLAGS=-fPIC" ${readline_conf_args}) - ENDIF(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64") - ENDIF(CMAKE_UNAME) -ENDIF(UNIX AND NOT WIN32) - -ExternalProject_Add(readline - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${readline_source} - INSTALL_DIR ${readline_install} - URL ${READLINE_URL}/${READLINE_GZ} - URL_MD5 ${READLINE_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/readline/shobj-conf ${readline_source}/support/shobj-conf - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${readline_conf_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${readline_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/readline_pkg.cmake b/CMake/cdat_modules/readline_pkg.cmake deleted file mode 100644 index 86eb2679c7..0000000000 --- a/CMake/cdat_modules/readline_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(RL_MAJOR 5) -set(RL_MINOR 2) -set(RL_MAJOR_SRC 6) -set(RL_MINOR_SRC 2) -set(READLINE_URL ${LLNL_URL}) -set(READLINE_GZ readline-${RL_MAJOR_SRC}.${RL_MINOR_SRC}.tar.gz) -set(READLINE_MD5 67948acb2ca081f23359d0256e9a271c) -set(READLINE_VERSION ${RL_MAJOR_SRC}.${RL_MINOR_SRC}) -set(READLINE_SOURCE ${READLINE_URL}/${READLINE_GZ}) - -add_cdat_package(readline "" "" OFF) diff --git a/CMake/cdat_modules/rpy2_deps.cmake b/CMake/cdat_modules/rpy2_deps.cmake deleted file mode 100644 index 3c3d4d90c6..0000000000 --- a/CMake/cdat_modules/rpy2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(RPY2_deps ${python_pkg} ${pip_pkg} ${r_pkg} ${six_pkg} ${singledispatch_pkg} ${windfield_pkg}) diff --git a/CMake/cdat_modules/rpy2_external.cmake b/CMake/cdat_modules/rpy2_external.cmake deleted file mode 100644 index d408ae22cd..0000000000 --- a/CMake/cdat_modules/rpy2_external.cmake +++ /dev/null @@ -1,11 +0,0 @@ -# create an external project to install RPY2, -# and configure and build it -set(nm RPY2) - -# Set PATH and R_HOME to find R -list(APPEND USR_ENVS - "R_HOME=${cdat_EXTERNALS}/lib/R" - "PATH=${cdat_EXTERNALS}/bin:$ENV{PATH}" - ) - -include(pipinstaller) diff --git a/CMake/cdat_modules/rpy2_pkg.cmake b/CMake/cdat_modules/rpy2_pkg.cmake deleted file mode 100644 index 5447bd3fb6..0000000000 --- a/CMake/cdat_modules/rpy2_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(RPY2_MAJOR_SRC 2) -set(RPY2_MINOR_SRC 6) -set(RPY2_PATCH_SRC 0) - -set(RPY2_VERSION ${RPY2_MAJOR_SRC}.${RPY2_MINOR_SRC}.${RPY2_PATCH_SRC}) -set(RPY2_GZ rpy2-${RPY2_VERSION}.tar.gz) -set(RPY2_SOURCE ${LLNL_URL}/${RPY2_GZ}) -set(RPY2_MD5 679898fbc832d4f05a5efcf1a7eb1a68) - -add_cdat_package_dependent(RPY2 "" "" ${CDAT_BUILD_ALL} - "NOT CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/sampledata_deps.cmake b/CMake/cdat_modules/sampledata_deps.cmake deleted file mode 100644 index 785ca373e1..0000000000 --- a/CMake/cdat_modules/sampledata_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(sampledata_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/sampledata_external.cmake b/CMake/cdat_modules/sampledata_external.cmake deleted file mode 100644 index 7b9a7027fd..0000000000 --- a/CMake/cdat_modules/sampledata_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ -if (CDAT_DOWNLOAD_SAMPLE_DATA) - message("[INFO] ------------------------------------------------------------------------------------------------------------------------------") - configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_download_sample_data.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/cdat_download_sample_data.cmake" - @ONLY - ) - set(sampledata_cmd ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_download_sample_data.cmake) - ExternalProject_Add(sampledata - SOURCE_DIR ${cdat_SOURCE_DIR}/Packages/dat - CONFIGURE_COMMAND ${sampledata_cmd} - BUILD_COMMAND "" - INSTALL_COMMAND "" - DEPENDS ${sampledata_deps} - ${ep_log_options} - ) -endif() diff --git a/CMake/cdat_modules/sampledata_pkg.cmake b/CMake/cdat_modules/sampledata_pkg.cmake deleted file mode 100644 index 821414e964..0000000000 --- a/CMake/cdat_modules/sampledata_pkg.cmake +++ /dev/null @@ -1,2 +0,0 @@ - -add_cdat_package(sampledata "" "" ON) diff --git a/CMake/cdat_modules/scientificpython_deps.cmake b/CMake/cdat_modules/scientificpython_deps.cmake deleted file mode 100644 index 8116fccd08..0000000000 --- a/CMake/cdat_modules/scientificpython_deps.cmake +++ /dev/null @@ -1,2 +0,0 @@ -set(scientificpython_deps ${numpy_pkg} ${netcdf_pkg} ${cdat_pkg} ${pip_pkg}) - diff --git a/CMake/cdat_modules/scientificpython_external.cmake b/CMake/cdat_modules/scientificpython_external.cmake deleted file mode 100644 index ecd5c2c9a3..0000000000 --- a/CMake/cdat_modules/scientificpython_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install ScientificPython -# and configure and build it -set(nm scientificpython) - -include(pipinstaller) diff --git a/CMake/cdat_modules/scientificpython_pkg.cmake b/CMake/cdat_modules/scientificpython_pkg.cmake deleted file mode 100644 index 206cdd7ca8..0000000000 --- a/CMake/cdat_modules/scientificpython_pkg.cmake +++ /dev/null @@ -1,7 +0,0 @@ -set(SCIENTIFICPYTHON_VERSION 2.8) -set(SCIENTIFICPYTHON_URL ${LLNL_URL}) -set(SCIENTIFICPYTHON_GZ ScientificPython-${SCIENTIFICPYTHON_VERSION}.tar.gz) -set(SCIENTIFICPYTHON_SOURCE ${SCIENTIFICPYTHON_URL}/${SCIENTIFICPYTHON_GZ}) -set(SCIENTIFICPYTHON_MD5 b87dd2b2c4be6b5421d906d39bcc59a7 ) - -add_cdat_package_dependent(scientificpython "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/scikits_deps.cmake b/CMake/cdat_modules/scikits_deps.cmake deleted file mode 100644 index 858e900f72..0000000000 --- a/CMake/cdat_modules/scikits_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(scikits_deps ${pip_pkg} ${scipy_pkg}) diff --git a/CMake/cdat_modules/scikits_external.cmake b/CMake/cdat_modules/scikits_external.cmake deleted file mode 100644 index eeff0fa013..0000000000 --- a/CMake/cdat_modules/scikits_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm scikits) - -include(pipinstaller) diff --git a/CMake/cdat_modules/scikits_pkg.cmake b/CMake/cdat_modules/scikits_pkg.cmake deleted file mode 100644 index 83d79a2500..0000000000 --- a/CMake/cdat_modules/scikits_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(SCIKITS_MAJOR_SRC 0) -set(SCIKITS_MINOR_SRC 12) -set(SCIKITS_URL ${LLNL_URL}) -set(SCIKITS_GZ scikit-learn-${SCIKITS_MAJOR_SRC}.${SCIKITS_MINOR_SRC}.tar.gz) -set(SCIKITS_MD5 0e1f6c60b43a4f447bf363583c1fc204 ) -set(SCIKITS_VERSION ${SCIKITS_MAJOR_SRC}.${SCIKITS_MINOR_SRC}) -set(SCIKITS_SOURCE ${SCIKITS_URL}/${SCIKITS_GZ}) - - -add_cdat_package_dependent(scikits "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/scipy_deps.cmake b/CMake/cdat_modules/scipy_deps.cmake deleted file mode 100644 index f7ca69d033..0000000000 --- a/CMake/cdat_modules/scipy_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(SCIPY_deps ${numpy_pkg} ${cython_pkg}) diff --git a/CMake/cdat_modules/scipy_external.cmake b/CMake/cdat_modules/scipy_external.cmake deleted file mode 100644 index ebd0ca9e79..0000000000 --- a/CMake/cdat_modules/scipy_external.cmake +++ /dev/null @@ -1,42 +0,0 @@ -# The Scipy external project - -set(SCIPY_binary "${CMAKE_CURRENT_BINARY_DIR}/build/SCIPY") - -# to configure scipy we run a cmake -P script -# the script will create a site.cfg file -# then run python setup.py config to verify setup -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/SCIPY_configure_step.cmake @ONLY) -# to build scipy we also run a cmake -P script. -# the script will set LD_LIBRARY_PATH so that -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/SCIPY_make_step.cmake @ONLY) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/SCIPY_install_step.cmake @ONLY) - -set(SCIPY_CONFIGURE_COMMAND ${CMAKE_COMMAND} - -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -DCDAT_USE_SYSTEM_LAPACK:STRING=${CDAT_USE_SYSTEM_LAPACK} -DLAPACK_LIBRARIES:STRING=${LAPACK_LIBRARIES} -DBLAS_LIBRARIES:STRING=${BLAS_LIBRARIES} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_configure_step.cmake) -set(SCIPY_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_make_step.cmake) -set(SCIPY_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_install_step.cmake) - -# create an external project to download scipy, -# and configure and build it -ExternalProject_Add(SCIPY - URL ${SCIPY_URL}/${SCIPY_GZ} - URL_MD5 ${SCIPY_MD5} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${SCIPY_binary} - BINARY_DIR ${SCIPY_binary} - CONFIGURE_COMMAND ${SCIPY_CONFIGURE_COMMAND} - BUILD_COMMAND ${SCIPY_BUILD_COMMAND} - UPDATE_COMMAND "" - INSTALL_COMMAND ${SCIPY_INSTALL_COMMAND} - DEPENDS - ${SCIPY_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/scipy_pkg.cmake b/CMake/cdat_modules/scipy_pkg.cmake deleted file mode 100644 index e582aecb6f..0000000000 --- a/CMake/cdat_modules/scipy_pkg.cmake +++ /dev/null @@ -1,21 +0,0 @@ -set(SCIPY_MAJOR 0) -set(SCIPY_MINOR 17) -set(SCIPY_PATCH 0) -set(SCIPY_MAJOR_SRC 0) -set(SCIPY_MINOR_SRC 17) -set(SCIPY_PATCH_SRC 0) -set(SCIPY_URL ${LLNL_URL}) -set(SCIPY_GZ scipy-${SCIPY_MAJOR_SRC}.${SCIPY_MINOR_SRC}.${SCIPY_PATCH_SRC}.tar.gz) -set(SCIPY_MD5 298ca04ade82814b17f5cd2d9d4c7b70) -set(SCIPY_SOURCE ${SCIPY_URL}/${SCIPY_GZ}) - -set (nm SCIPY) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) - -add_cdat_package_dependent(SCIPY "" "" OFF "CDAT_BUILD_LEAN" ON) -#if (CDAT_BUILD_ALL) -# add_cdat_package(scipy "" "" ON) -#else() -# add_cdat_package(scipy "" "" OFF) -#endif() diff --git a/CMake/cdat_modules/seawater_deps.cmake b/CMake/cdat_modules/seawater_deps.cmake deleted file mode 100644 index d8ca102702..0000000000 --- a/CMake/cdat_modules/seawater_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(seawater_deps ${python_pkg} ${numpy_pkg}) diff --git a/CMake/cdat_modules/seawater_external.cmake b/CMake/cdat_modules/seawater_external.cmake deleted file mode 100644 index a92c31447a..0000000000 --- a/CMake/cdat_modules/seawater_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# seawater -# -set(seawater_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/seawater") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/seawater_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/seawater_build_step.cmake" - @ONLY -) - -set(seawater_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/seawater_build_step.cmake) - -ExternalProject_Add(seawater - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${seawater_source_dir} - URL ${SEAWATER_URL}/${SEAWATER_GZ} - URL_MD5 ${SEAWATER_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${seawater_build_command} - INSTALL_COMMAND "" - DEPENDS ${seawater_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/seawater_pkg.cmake b/CMake/cdat_modules/seawater_pkg.cmake deleted file mode 100644 index 81bde3ba70..0000000000 --- a/CMake/cdat_modules/seawater_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(SEAWATER_MAJOR 3) -set(SEAWATER_MINOR 3) -set(SEAWATER_PATCH 4) -set(SEAWATER_VERSION ${SEAWATER_MAJOR}.${SEAWATER_MINOR}.${SEAWATER_PATCH}) -set(SEAWATER_URL ${LLNL_URL}) -set(SEAWATER_GZ python-seawater-${SEAWATER_VERSION}.tar.gz) -set(SEAWATER_MD5 0932193350f42c055e7f523578ec1b7c) - -set (nm SEAWATER) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(SEAWATER_SOURCE ${SEAWATER_URL}/${SEAWATER_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(seawater "" "" ON) -else() - add_cdat_package(seawater "" "" OFF) -endif() diff --git a/CMake/cdat_modules/setuptools_deps.cmake b/CMake/cdat_modules/setuptools_deps.cmake deleted file mode 100644 index 9e3879e6b4..0000000000 --- a/CMake/cdat_modules/setuptools_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(setuptools_deps ${pkgconfig_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/setuptools_external.cmake b/CMake/cdat_modules/setuptools_external.cmake deleted file mode 100644 index cbea071a40..0000000000 --- a/CMake/cdat_modules/setuptools_external.cmake +++ /dev/null @@ -1,38 +0,0 @@ -set(setuptools_source "${CMAKE_CURRENT_BINARY_DIR}/build/setuptools") -set(setuptools_install "${cdat_EXTERNALS}") - -# 2012-03-19 C. Doutriaux Commented this out seems to not be able to pick pythonpath and ldlibrarypath -# Seems to be way too complicated for what's really needed -#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setuptools_make_step.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/setuptools_make_step.cmake -# @ONLY) - -#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setuptools_install_step.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/setuptools_install_step.cmake -# @ONLY) - -#set(setuptools_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/setuptools_make_step.cmake) -#set(setuptools_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/setuptools_install_step.cmake) - - -# old cmnd -# BUILD_COMMAND -# INSTALL_COMMAND ${setuptools_install_command} - -ExternalProject_Add(setuptools - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${setuptools_source} - INSTALL_DIR ${setuptools_install} - URL ${SETUPTOOLS_URL}/${SETUPTOOLS_GZ} - URL_MD5 ${SETUPTOOLS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${setuptools_deps} - ${ep_log_options} -) - -set(setuptools_DIR "${setuptools_binary}" CACHE PATH "setuptools binary directory" FORCE) -mark_as_advanced(setuptools_DIR) diff --git a/CMake/cdat_modules/setuptools_pkg.cmake b/CMake/cdat_modules/setuptools_pkg.cmake deleted file mode 100644 index 97c8e93f7b..0000000000 --- a/CMake/cdat_modules/setuptools_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(SETUPTOOLS_MAJOR_SRC 19) -set(SETUPTOOLS_MINOR_SRC 2) -set(SETUPTOOLS_PATCH_SRC '') -set(SETUPTOOLS_URL ${LLNL_URL}) -set(SETUPTOOLS_GZ setuptools-${SETUPTOOLS_MAJOR_SRC}.${SETUPTOOLS_MINOR_SRC}.tar.gz) -set(SETUPTOOLS_MD5 78353b1f80375ca5e088f4b4627ffe03) -set(SETUPTOOLS_VERSION ${SETUPTOOLS_MAJOR_SRC}.${SETUPTOOLS_MINOR_SRC}) -set(SETUPTOOLS_SOURCE ${SETUPTOOLS_URL}/${SETUPTOOLS_GZ}) - -add_cdat_package(setuptools "" "" OFF) diff --git a/CMake/cdat_modules/shapely_deps.cmake b/CMake/cdat_modules/shapely_deps.cmake deleted file mode 100644 index e4cf1bcff1..0000000000 --- a/CMake/cdat_modules/shapely_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Shapely_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${geos_pkg}) diff --git a/CMake/cdat_modules/shapely_external.cmake b/CMake/cdat_modules/shapely_external.cmake deleted file mode 100644 index a04192050e..0000000000 --- a/CMake/cdat_modules/shapely_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm Shapely) -set(USR_ENVS "GEOS_CONFIG=${cdat_EXTERNALS}/bin/geos-config") -include(pipinstaller) diff --git a/CMake/cdat_modules/shapely_pkg.cmake b/CMake/cdat_modules/shapely_pkg.cmake deleted file mode 100644 index 1155206523..0000000000 --- a/CMake/cdat_modules/shapely_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set( SHAPELY_MAJOR_SRC 1 ) -set( SHAPELY_MINOR_SRC 5 ) -set( SHAPELY_PATCH_SRC 13 ) -set(SHAPELY_URL ${LLNL_URL}) -set(SHAPELY_GZ - Shapely-${SHAPELY_MAJOR_SRC}.${SHAPELY_MINOR_SRC}.${SHAPELY_PATCH_SRC}.tar.gz) -set(SHAPELY_MD5 5ee549862ae84326f5f5525bbd0b8a50) -set(SHAPELY_SOURCE ${SHAPELY_URL}/${SHAPELY_GZ}) - -set (nm SHAPELY) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -if (CDAT_BUILD_ALL) - add_cdat_package(Shapely "" "" ON) -else() - add_cdat_package(Shapely "" "" OFF) -endif() diff --git a/CMake/cdat_modules/singledispatch_deps.cmake b/CMake/cdat_modules/singledispatch_deps.cmake deleted file mode 100644 index 5ad0c5ed4f..0000000000 --- a/CMake/cdat_modules/singledispatch_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(singledispatch_deps ${python_pkg} ${setuptools_pkg} ${six_pkg}) diff --git a/CMake/cdat_modules/singledispatch_external.cmake b/CMake/cdat_modules/singledispatch_external.cmake deleted file mode 100644 index 893edf6ae1..0000000000 --- a/CMake/cdat_modules/singledispatch_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# The singledispatch project - -set(singledispatch_binary "${CMAKE_CURRENT_BINARY_DIR}/build/singledispatch") - -ExternalProject_Add(singledispatch - DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR} - SOURCE_DIR ${singledispatch_binary} - URL ${SINGLEDISPATCH_SOURCE} - URL_MD5 ${SINGLEDISPATCH_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${singledispatch_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/singledispatch_pkg.cmake b/CMake/cdat_modules/singledispatch_pkg.cmake deleted file mode 100644 index c5eb273acb..0000000000 --- a/CMake/cdat_modules/singledispatch_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set( SINGLEDISPATCH_MAJOR 3 ) -set( SINGLEDISPATCH_MINOR 4 ) -set( SINGLEDISPATCH_PATCH 0.3) -set( SINGLEDISPATCH_VERSION ${SINGLEDISPATCH_MAJOR}.${SINGLEDISPATCH_MINOR}.${SINGLEDISPATCH_PATCH} ) -set( SINGLEDISPATCH_URL ${LLNL_URL} ) -set( SINGLEDISPATCH_GZ singledispatch-${SINGLEDISPATCH_VERSION}.tar.gz ) -set( SINGLEDISPATCH_MD5 af2fc6a3d6cc5a02d0bf54d909785fcb ) - -set (nm SINGLEDISPATCH) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(SINGLEDISPATCH_SOURCE ${SINGLEDISPATCH_URL}/${SINGLEDISPATCH_GZ}) - -if (BUILD_TESTING) - add_cdat_package(singledispatch "" "" ON) -endif() diff --git a/CMake/cdat_modules/sip_deps.cmake b/CMake/cdat_modules/sip_deps.cmake deleted file mode 100644 index ee888d4354..0000000000 --- a/CMake/cdat_modules/sip_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(SIP_deps ${pkgconfig_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/sip_external.cmake b/CMake/cdat_modules/sip_external.cmake deleted file mode 100644 index bbf4f461ff..0000000000 --- a/CMake/cdat_modules/sip_external.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(SIP_configure_command ${PYTHON_EXECUTABLE} ${cdat_BINARY_DIR}/build/SIP/configure.py -b ${CMAKE_INSTALL_PREFIX}/bin -d ${PYTHON_SITE_PACKAGES} -e ${CMAKE_INSTALL_PREFIX}/include -v ${CMAKE_INSTALL_PREFIX}/share CC=${CMAKE_C_COMPILER} CXX=${CMAKE_CXX_COMPILER}) - -ExternalProject_Add(SIP - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - URL ${SIP_URL}/${SIP_GZ} - URL_MD5 ${SIP_MD5} - SOURCE_DIR ${cdat_BINARY_DIR}/build/SIP - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${SIP_configure_command} - DEPENDS ${SIP_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/sip_pkg.cmake b/CMake/cdat_modules/sip_pkg.cmake deleted file mode 100644 index c2beefbd3b..0000000000 --- a/CMake/cdat_modules/sip_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(SIP_MAJOR 4) -set(SIP_MINOR 12) -set(SIP_PATCH 1) -set(SIP_MAJOR_SRC 4) -set(SIP_MINOR_SRC 16) -set(SIP_PATCH_SRC 4) -set(SIP_VERSION ${SIP_MAJOR_SRC}.${SIP_MINOR_SRC}.${SIP_PATCH_SRC}) -set(SIP_URL http://www.riverbankcomputing.com/static/Downloads/sip${SIP_MAJOR_SRC}) -set(SIP_URL ${LLNL_URL}) -set(SIP_GZ sip-${SIP_MAJOR_SRC}.${SIP_MINOR_SRC}.${SIP_PATCH_SRC}.tar.gz) -set(SIP_MD5 a9840670a064dbf8f63a8f653776fec9 ) -set(SIP_SOURCE ${SIP_URL}/${SIP_GZ}) - -add_cdat_package_dependent(SIP "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/six_deps.cmake b/CMake/cdat_modules/six_deps.cmake deleted file mode 100644 index 20fb4f54fd..0000000000 --- a/CMake/cdat_modules/six_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(SIX_deps ${python_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/six_external.cmake b/CMake/cdat_modules/six_external.cmake deleted file mode 100644 index 5a1ae27de4..0000000000 --- a/CMake/cdat_modules/six_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm SIX) - -include(pipinstaller) diff --git a/CMake/cdat_modules/six_pkg.cmake b/CMake/cdat_modules/six_pkg.cmake deleted file mode 100644 index e8daac58a6..0000000000 --- a/CMake/cdat_modules/six_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(SIX_MAJOR_SRC 1) -set(SIX_MINOR_SRC 9) -set(SIX_PATCH_SRC 0) - -set(SIX_VERSION ${SIX_MAJOR_SRC}.${SIX_MINOR_SRC}.${SIX_PATCH_SRC}) -set(SIX_GZ six-${SIX_VERSION}.tar.gz) -set(SIX_SOURCE ${LLNL_URL}/${SIX_GZ}) -set(SIX_MD5 476881ef4012262dfc8adc645ee786c4) - -add_cdat_package_dependent(SIX "" "" ON "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/sphinx_deps.cmake b/CMake/cdat_modules/sphinx_deps.cmake deleted file mode 100644 index 8e0e9f2a19..0000000000 --- a/CMake/cdat_modules/sphinx_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Sphinx_deps ${pip_pkg} ${docutils_pkg}) diff --git a/CMake/cdat_modules/sphinx_external.cmake b/CMake/cdat_modules/sphinx_external.cmake deleted file mode 100644 index 41cf3d2c1f..0000000000 --- a/CMake/cdat_modules/sphinx_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm Sphinx) - -include(pipinstaller) diff --git a/CMake/cdat_modules/sphinx_pkg.cmake b/CMake/cdat_modules/sphinx_pkg.cmake deleted file mode 100644 index 536d6e042e..0000000000 --- a/CMake/cdat_modules/sphinx_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(SPHINX_MAJOR_SRC 1) -set(SPHINX_MINOR_SRC 2) -set(SPHINX_PATCH_SRC 2) - -set (nm SPHINX) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(SPHINX_URL ${LLNL_URL}) -set(SPHINX_GZ Sphinx-${SPHINX_VERSION}.tar.gz) -set(SPHINX_SOURCE ${SPHINX_URL}/${SPHINX_GZ}) -set(SPHINX_MD5 3dc73ccaa8d0bfb2d62fb671b1f7e8a4) - -add_cdat_package_dependent(Sphinx "" "" OFF "CDAT_BUILD_GUI" OFF) - diff --git a/CMake/cdat_modules/spyder_deps.cmake b/CMake/cdat_modules/spyder_deps.cmake deleted file mode 100644 index b543e68ade..0000000000 --- a/CMake/cdat_modules/spyder_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(spyder_deps ${pyzmq_pkg} ${pyqt_pkg} ${python_pkg} ${pip_pkg} ${numpy_pkg} ${scipy_pkg} ${sphinx_pkg} ${matplotlib_pkg}) diff --git a/CMake/cdat_modules/spyder_external.cmake b/CMake/cdat_modules/spyder_external.cmake deleted file mode 100644 index dede73c002..0000000000 --- a/CMake/cdat_modules/spyder_external.cmake +++ /dev/null @@ -1,7 +0,0 @@ -# create an external project to install spyder, -# and configure and build it - -set (nm spyder) -set(OLD "OFF") -include(pipinstaller) - diff --git a/CMake/cdat_modules/spyder_pkg.cmake b/CMake/cdat_modules/spyder_pkg.cmake deleted file mode 100644 index 664f2c3198..0000000000 --- a/CMake/cdat_modules/spyder_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(SPYDER_MAJOR_SRC 2) -set(SPYDER_MINOR_SRC 3) -set(SPYDER_PATCH_SRC 8) -set(SPYDER_URL ${LLNL_URL}) -set(SPYDER_ZIP spyder-${SPYDER_MAJOR_SRC}.${SPYDER_MINOR_SRC}.${SPYDER_PATCH_SRC}.zip) -set(SPYDER_SOURCE ${SPYDER_URL}/${SPYDER_ZIP}) -set(SPYDER_MD5 fb890dc956f606c43d560558159f3491) - -add_cdat_package_dependent(spyder "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/tcltk_deps.cmake b/CMake/cdat_modules/tcltk_deps.cmake deleted file mode 100644 index 4f4bf38e9a..0000000000 --- a/CMake/cdat_modules/tcltk_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(TclTk_deps ${pkgconfig_pkg} ${libxml2_pkg} ${libxslt_pkg} ${jpeg_pkg} ${png_pkg} ${tiff_pkg}) diff --git a/CMake/cdat_modules/tcltk_external.cmake b/CMake/cdat_modules/tcltk_external.cmake deleted file mode 100644 index 9c8baa5f6e..0000000000 --- a/CMake/cdat_modules/tcltk_external.cmake +++ /dev/null @@ -1,62 +0,0 @@ - -set(tcl_source "${CMAKE_CURRENT_BINARY_DIR}/build/tcl") -set(tk_source "${CMAKE_CURRENT_BINARY_DIR}/build/tk") -set(tcltk_install "${cdat_EXTERNALS}") - -set(tcltk_configure_args --enable-shared) - -# tcl -# -set(proj tcl-${TCLTK_MAJOR}.${TCLTK_MINOR}) - -ExternalProject_Add(${proj} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${tcl_source} - INSTALL_DIR ${tcltk_install} - URL ${TCLTK_URL}/${TCL_GZ} - URL_MD5 ${TCL_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR=/unix -DCONFIGURE_ARGS=${tcltk_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS ${TclTk_deps} - ${ep_log_options} -) - -# tk -# -set(proj tk-${TCLTK_MAJOR}.${TCLTK_MINOR}) - -ExternalProject_Add(${proj} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${tk_source} - INSTALL_DIR ${tcltk_install} - URL ${TCLTK_URL}/${TK_GZ} - URL_MD5 ${TK_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS tcl-${TCLTK_MAJOR}.${TCLTK_MINOR} - ${ep_log_options} -) - -ExternalProject_Add_Step(${proj} symlink - COMMAND ${CMAKE_COMMAND} -E create_symlink "wish${TCLTK_MAJOR}.${TCLTK_MINOR}" wish - WORKING_DIRECTORY ${tcltk_install}/bin - COMMENT "Linking wish${TCLTK_MAJOR}.${TCLTK_MINOR} to wish" - DEPENDEES install -) - -# tcltk -# - -ExternalProject_Add(TclTk - DOWNLOAD_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND "" - DEPENDS tk-${TCLTK_MAJOR}.${TCLTK_MINOR} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/tcltk_pkg.cmake b/CMake/cdat_modules/tcltk_pkg.cmake deleted file mode 100644 index 1296043e2d..0000000000 --- a/CMake/cdat_modules/tcltk_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(TCLTK_MAJOR 8) -set(TCLTK_MINOR 5) -set(TCLTK_PATCH 9) -set(TCLTK_VERSION ${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}) -set(TCLTK_URL ${LLNL_URL}) -set(TCL_GZ tcl${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}-src.tar.gz) -set(TK_GZ tk${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}-src.tar.gz) -set(TCL_MD5 8512d8db3233041dd68a81476906012a) -set(TK_MD5 7cdeb9feb61593f58a0ae61f2497580e) -# Two sources here, need to fake it -set(TCLTK_SOURCE "${TCLTK_URL}/${TCL_GZ} ${TCL_MD5}") -set(TCLTK_MD5 "${TCLTK_URL}/${TK_GZ} ${TK_MD5}") - -if (CDAT_BUILD_ESGF) - add_cdat_package(TclTk "" "" OFF) -else() - add_cdat_package_dependent(TclTk "" "" OFF "CDAT_BUILD_GUI" OFF) -endif() diff --git a/CMake/cdat_modules/termcap_deps.cmake b/CMake/cdat_modules/termcap_deps.cmake deleted file mode 100644 index 3c9a6f3aff..0000000000 --- a/CMake/cdat_modules/termcap_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(termcap_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/termcap_external.cmake b/CMake/cdat_modules/termcap_external.cmake deleted file mode 100644 index cf57c940a4..0000000000 --- a/CMake/cdat_modules/termcap_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(termcap_source "${CMAKE_CURRENT_BINARY_DIR}/build/termcap") -set(termcap_install "${cdat_EXTERNALS}") -set(termcap_conf_args) - -ExternalProject_Add(termcap - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${termcap_source} - INSTALL_DIR ${termcap_install} - URL ${TCAP_URL}/${TCAP_GZ} - URL_MD5 ${TCAP_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${termcap_conf_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${termcap_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/termcap_pkg.cmake b/CMake/cdat_modules/termcap_pkg.cmake deleted file mode 100644 index 11e6a0e928..0000000000 --- a/CMake/cdat_modules/termcap_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(TCAP_MAJOR_SRC 1) -set(TCAP_MINOR_SRC 3) -set(TCAP_PATCH_SRC 1) -set(TCAP_URL ${LLNL_URL}) -set(TCAP_GZ termcap-${TCAP_MAJOR_SRC}.${TCAP_MINOR_SRC}.${TCAP_PATCH_SRC}.tar.gz) -set(TCAP_MD5 ffe6f86e63a3a29fa53ac645faaabdfa) -set(TERMCAP_SOURCE ${TCAP_URL}/${TCAP_GZ}) -set(TERMCAP_MD5 ${TCAP_MD5}) - -set (nm TCAP) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(TERMCAP_VERSION ${TCAP_VERSION}) - -add_cdat_package(termcap "" "" OFF) - diff --git a/CMake/cdat_modules/tiff_deps.cmake b/CMake/cdat_modules/tiff_deps.cmake deleted file mode 100644 index 3a05e71e96..0000000000 --- a/CMake/cdat_modules/tiff_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(tiff_deps ${pkgconfig_pkg} ${jpeg_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/tiff_external.cmake b/CMake/cdat_modules/tiff_external.cmake deleted file mode 100644 index 248a9929d3..0000000000 --- a/CMake/cdat_modules/tiff_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(tiff_source "${CMAKE_CURRENT_BINARY_DIR}/build/tiff") -set(tiff_install "${cdat_EXTERNALS}") - -ExternalProject_Add(tiff - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${tiff_source} - INSTALL_DIR ${tiff_install} - URL ${TIFF_URL}/${TIFF_GZ} - URL_MD5 ${TIFF_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${tiff_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/tiff_pkg.cmake b/CMake/cdat_modules/tiff_pkg.cmake deleted file mode 100644 index 09a6a191a7..0000000000 --- a/CMake/cdat_modules/tiff_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(TIFF_MAJOR 3) -set(TIFF_MINOR 9) -set(TIFF_PATCH 4) -set(TIFF_URL ${LLNL_URL}) -set(TIFF_GZ tiff-${TIFF_MAJOR}.${TIFF_MINOR}.${TIFF_PATCH}.tar.gz) -set(TIFF_MD5 2006c1bdd12644dbf02956955175afd6) -set(TIFF_SOURCE ${TIFF_URL}/${TIFF_GZ}) - -set (nm TIFF) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -add_cdat_package_dependent(tiff "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/tornado_deps.cmake b/CMake/cdat_modules/tornado_deps.cmake deleted file mode 100644 index 6c8e9f67da..0000000000 --- a/CMake/cdat_modules/tornado_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(tornado_deps ${spyder_pkg} ${pyzmq_deps} ${pip_pkg}) diff --git a/CMake/cdat_modules/tornado_external.cmake b/CMake/cdat_modules/tornado_external.cmake deleted file mode 100644 index 3531582b0c..0000000000 --- a/CMake/cdat_modules/tornado_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm tornado) - -include(pipinstaller) diff --git a/CMake/cdat_modules/tornado_pkg.cmake b/CMake/cdat_modules/tornado_pkg.cmake deleted file mode 100644 index a40c77381b..0000000000 --- a/CMake/cdat_modules/tornado_pkg.cmake +++ /dev/null @@ -1,7 +0,0 @@ -set(TORNADO_VERSION 3.1) -set(TORNADO_URL ${LLNL_URL}) -set(TORNADO_GZ tornado-${TORNADO_VERSION}.tar.gz) -set(TORNADO_SOURCE ${TORNADO_URL}/${TORNADO_GZ}) -set(TORNADO_MD5 2348d626095c5675753287e9af0c321f ) - -add_cdat_package(tornado "" "" OFF) diff --git a/CMake/cdat_modules/udunits2_deps.cmake b/CMake/cdat_modules/udunits2_deps.cmake deleted file mode 100644 index b032ce41d5..0000000000 --- a/CMake/cdat_modules/udunits2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(udunits2_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/udunits2_external.cmake b/CMake/cdat_modules/udunits2_external.cmake deleted file mode 100644 index c70b20fd93..0000000000 --- a/CMake/cdat_modules/udunits2_external.cmake +++ /dev/null @@ -1,25 +0,0 @@ -set(udunits_source "${CMAKE_CURRENT_BINARY_DIR}/build/udunits2") -set(udunits_install "${cdat_EXTERNALS}") - -set(udunits_patch_command "") -if(APPLE) - # Modified configure file to workaround random flex failures - set(udunits_patch_command - ${CMAKE_COMMAND} -E copy_if_different - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/udunits2_apple_configure.in" - "${udunits_source}/configure") -endif() - -ExternalProject_Add(udunits2 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${udunits_source} - INSTALL_DIR ${udunits_install} - URL ${UDUNITS2_URL}/${UDUNITS2_GZ} - URL_MD5 ${UDUNITS2_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${udunits_patch_command} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${udunits2_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/udunits2_pkg.cmake b/CMake/cdat_modules/udunits2_pkg.cmake deleted file mode 100644 index b114ac7707..0000000000 --- a/CMake/cdat_modules/udunits2_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(UDUNITS2_URL ${LLNL_URL}) -set(UDUNITS2_MAJOR_SRC 2) -set(UDUNITS2_MINOR_SRC 2) -set(UDUNITS2_PATCH_SRC 17) -set(UDUNITS2_URL ${LLNL_URL}) -set(UDUNITS2_GZ udunits-${UDUNITS2_MAJOR_SRC}.${UDUNITS2_MINOR_SRC}.${UDUNITS2_PATCH_SRC}.tar.gz) -set(UDUNITS2_MD5 b81ab8f24125ce18702ab7b3ca4d566f ) -set(UDUNITS2_SOURCE ${UDUNITS2_URL}/${UDUNITS2_GZ}) - -set (nm UDUNITS2) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) - -add_cdat_package_dependent(udunits2 "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/uuid_deps.cmake b/CMake/cdat_modules/uuid_deps.cmake deleted file mode 100644 index 2f2b9e4ba1..0000000000 --- a/CMake/cdat_modules/uuid_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(uuid_deps ${pkgconfig_pkg} ) diff --git a/CMake/cdat_modules/uuid_external.cmake b/CMake/cdat_modules/uuid_external.cmake deleted file mode 100644 index a53deeb799..0000000000 --- a/CMake/cdat_modules/uuid_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ - -set(uuid_source "${CMAKE_CURRENT_BINARY_DIR}/build/uuid") -set(uuid_install "${cdat_EXTERNALS}") - -ExternalProject_Add(uuid - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${uuid_source} - INSTALL_DIR ${uuid_install} - URL ${UUID_URL}/${UUID_GZ} - URL_MD5 ${UUID_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${uuid_deps} - ${ep_log_options} -) - -set(uuid_DIR "${uuid_binary}" CACHE PATH "uuid binary directory" FORCE) -mark_as_advanced(uuid_DIR) diff --git a/CMake/cdat_modules/uuid_pkg.cmake b/CMake/cdat_modules/uuid_pkg.cmake deleted file mode 100644 index d05bfb620a..0000000000 --- a/CMake/cdat_modules/uuid_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(UUID_MAJOR_SRC 1) -set(UUID_MINOR_SRC 6) -set(UUID_PATCH_SRC 2) -set(UUID_URL ${LLNL_URL}) -set(UUID_GZ uuid-${UUID_MAJOR_SRC}.${UUID_MINOR_SRC}.${UUID_PATCH_SRC}.tar.gz) -set(UUID_MD5 5db0d43a9022a6ebbbc25337ae28942f) -set(UUID_SOURCE ${UUID_URL}/${UUID_GZ}) - -set (nm UUID) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -#apparently libcf needs it -add_cdat_package_dependent(uuid "" "" OFF "CDAT_BUILD_LEAN" ON) -#add_cdat_package(uuid "" "" OFF) - diff --git a/CMake/cdat_modules/uvcmetrics_deps.cmake b/CMake/cdat_modules/uvcmetrics_deps.cmake deleted file mode 100644 index a01e906ae6..0000000000 --- a/CMake/cdat_modules/uvcmetrics_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(UVCMETRICS_deps ${cdat_pkg} ) diff --git a/CMake/cdat_modules/uvcmetrics_external.cmake b/CMake/cdat_modules/uvcmetrics_external.cmake deleted file mode 100644 index 4a9ad2d1e9..0000000000 --- a/CMake/cdat_modules/uvcmetrics_external.cmake +++ /dev/null @@ -1,42 +0,0 @@ - -if (CDAT_DOWNLOAD_UVCMETRICS_TESTDATA) - set(UVCMETRICS_DOWNLOAD_FILES "") - - file(READ "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcmetrics_test_data_md5s.txt" UVCMETRICS_FILES) - string(REGEX REPLACE ";" "\\\\;" UVCMETRICS_FILES "${UVCMETRICS_FILES}") - string(REGEX REPLACE "\n" ";" UVCMETRICS_FILES "${UVCMETRICS_FILES}") - - foreach(line ${UVCMETRICS_FILES}) - string(REGEX REPLACE " +" ";" line "${line}") - list(GET line 1 base_file_path) - list(GET line 0 FILE_MD5) - - string(STRIP "${base_file_path}" base_file_path) - string(STRIP "${FILE_MD5}" FILE_MD5) - - set(FILE_PATH "${UVCMETRICS_TEST_DATA_DIRECTORY}/${base_file_path}") - list(APPEND UVCMETRICS_DOWNLOAD_FILES "${FILE_PATH}") - - set(FILE_URL "${LLNL_URL}/../sample_data/uvcmetrics_2.4.1/${base_file_path}") - - add_custom_command( - OUTPUT "${FILE_PATH}" - COMMAND "${CMAKE_COMMAND}" - -D FILE_URL="${FILE_URL}" - -D FILE_MD5="${FILE_MD5}" - -D FILE_PATH="${FILE_PATH}" - -P "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake" - DEPENDS "${uvcmetrics_data_keyfile}" - COMMENT "Downloading ${base_file_path}" - ) - endforeach() - - add_custom_target(uvcmetrics_test_data ALL DEPENDS ${UVCMETRICS_DOWNLOAD_FILES}) -endif() - -set(GIT_CMD_STR GIT_REPOSITORY "${UVCMETRICS_SOURCE}") -set(GIT_TAG GIT_TAG "${UVCMETRICS_BRANCH}") -set(nm UVCMETRICS) -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/uvcmetrics_pkg.cmake b/CMake/cdat_modules/uvcmetrics_pkg.cmake deleted file mode 100644 index 2f82940422..0000000000 --- a/CMake/cdat_modules/uvcmetrics_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set (nm UVCMETRICS) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_TAG}) -set(UVCMETRICS_URL ${LLNL_URL}) -set(UVCMETRICS_ZIP uvcmetrics-${UVCMETRICS_VERSION}.zip) -#set(UVCMETRICS_SOURCE ${UVCMETRICS_URL}/${UVCMETRICS_ZIP}) -set(UVCMETRICS_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/uvcmetrics.git ) -set(UVCMETRICS_MD5) -set(UVCMETRICS_BRANCH master) - -if (NOT CDAT_BUILD_LEAN) - add_cdat_package(UVCMETRICS "" "" ON) -endif() - diff --git a/CMake/cdat_modules/vacumm_deps.cmake b/CMake/cdat_modules/vacumm_deps.cmake deleted file mode 100644 index 9472871dd1..0000000000 --- a/CMake/cdat_modules/vacumm_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(vacumm_deps ${python_pkg} ${numpy_pkg} ${scipy_pkg} ${matplotlib_pkg} ${basemap_pkg} ${configobj_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/vacumm_external.cmake b/CMake/cdat_modules/vacumm_external.cmake deleted file mode 100644 index 0cf4556ff1..0000000000 --- a/CMake/cdat_modules/vacumm_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# vacumm -# -set(vacumm_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/vacumm") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/vacumm_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/vacumm_build_step.cmake" - @ONLY - ) - -set(vacumm_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/vacumm_build_step.cmake) - -ExternalProject_Add(vacumm - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${vacumm_source_dir} - URL ${VACUMM_URL}/${VACUMM_GZ} - URL_MD5 ${VACUMM_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${vacumm_build_command} - INSTALL_COMMAND "" - DEPENDS ${vacumm_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/vacumm_pkg.cmake b/CMake/cdat_modules/vacumm_pkg.cmake deleted file mode 100644 index 7dea0632e8..0000000000 --- a/CMake/cdat_modules/vacumm_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(VACUMM_MAJOR 3) -set(VACUMM_MINOR 0) -set(VACUMM_PATCH 0) -set(VACUMM_VERSION ${VACUMM_MAJOR}.${VACUMM_MINOR}.${VACUMM_PATCH}) -set(VACUMM_URL ${LLNL_URL} ) -set(VACUMM_GZ vacumm-${VACUMM_VERSION}.tar.gz) -set(VACUMM_MD5 b468fa72ddba9d0cd39d51164bef1dd4) - -set (nm VACUMM) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(VACUMM_SOURCE ${VACUMM_URL}/${VACUMM_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(vacumm "" "" ON) -else() - add_cdat_package(vacumm "" "" OFF) -endif() diff --git a/CMake/cdat_modules/visit_deps.cmake b/CMake/cdat_modules/visit_deps.cmake deleted file mode 100644 index 023429df27..0000000000 --- a/CMake/cdat_modules/visit_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(VisIt_deps ${pkgconfig_pkg} ${sip_pkg} ${pyqt_pkg} ${paraview_pkg} ${r_pkg}) diff --git a/CMake/cdat_modules/visit_external.cmake b/CMake/cdat_modules/visit_external.cmake deleted file mode 100644 index 7fbdb404cc..0000000000 --- a/CMake/cdat_modules/visit_external.cmake +++ /dev/null @@ -1,173 +0,0 @@ -set(VisIt_source "${CMAKE_CURRENT_BINARY_DIR}/build/VisIt") -set(VisIt_binary "${CMAKE_CURRENT_BINARY_DIR}/build/VisIt") -set(VisIt_install "${CMAKE_INSTALL_PREFIX}") - -if(QT_QMAKE_EXECUTABLE) - get_filename_component(QT_BINARY_DIR ${QT_QMAKE_EXECUTABLE} PATH) - get_filename_component(QT_ROOT ${QT_BINARY_DIR} PATH) -endif() - -GET_FILENAME_COMPONENT(CMAKE_PATH_VAR ${CMAKE_COMMAND} PATH) -SET(VISIT_C_FLAGS "${CMAKE_C_FLAGS} -I${cdat_EXTERNALS}/include") -GET_FILENAME_COMPONENT(VISIT_C_COMPILER ${CMAKE_C_COMPILER} NAME) -SET(VISIT_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I${cdat_EXTERNALS}/include") -GET_FILENAME_COMPONENT(VISIT_CXX_COMPILER ${CMAKE_CXX_COMPILER} NAME) -SET(VISIT_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${cdat_EXTERNALS}/lib") - -MACRO(DETERMINE_VISIT_ARCHITECTURE ARCH) - IF(${CMAKE_SYSTEM_NAME} STREQUAL "Linux") - IF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc") - SET(${ARCH} linux-ppc) - ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc64") - SET(${ARCH} linux-ppc64) - ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "x86_64") - SET(${ARCH} linux-x86_64) - ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ia64") - SET(${ARCH} linux-ia64) - ELSE(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc") - SET(${ARCH} linux-intel) - ENDIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "AIX") - IF($ENV{OBJECT_MODE} STREQUAL "32") - SET(${ARCH} "ibm-aix-pwr") - ELSE($ENV{OBJECT_MODE} STREQUAL "32") - SET(${ARCH} "ibm-aix-pwr64") - ENDIF($ENV{OBJECT_MODE} STREQUAL "32") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "Darwin") - IF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386") - EXECUTE_PROCESS(COMMAND uname -r - OUTPUT_STRIP_TRAILING_WHITESPACE - OUTPUT_VARIABLE _OSX_VERSION) - STRING(SUBSTRING ${_OSX_VERSION} 0 1 _OSX_MAJOR_VERSION) - IF(${_OSX_MAJOR_VERSION} STREQUAL "1") - # This will match 10, 11, 12, ... - SET(${ARCH} darwin-x86_64) - ELSE(${_OSX_MAJOR_VERSION} STREQUAL "1") - SET(${ARCH} darwin-i386) - ENDIF(${_OSX_MAJOR_VERSION} STREQUAL "1") - ELSE(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386") - SET(${ARCH} darwin-x86_64) - ENDIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "FreeBSD") - SET(${ARCH} "freebsd-${CMAKE_SYSTEM_VERSION}") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "IRIX") - SET(${ARCH} sgi-irix6-mips2) - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "SunOS") - SET(${ARCH} "sun4-${CMAKE_SYSTEM_VERSION}-sparc") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "Tru64") - SET(${ARCH} dec-osf1-alpha) - ELSE(${CMAKE_SYSTEM_NAME} STREQUAL "Linux") - # Unhandled case. Make up a string. - SET(VISITARCHTMP "${CMAKE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR}") - STRING(TOLOWER ${VISITARCHTMP} ${ARCH}) - ENDIF(${CMAKE_SYSTEM_NAME} STREQUAL "Linux") -ENDMACRO(DETERMINE_VISIT_ARCHITECTURE ARCH) - -# Note this is a workaround to handle build on APPLE -IF(APPLE) - SET(VISIT_INSTALL_PLATFORM "darwin-x86_64") -ELSE(APPLE) - DETERMINE_VISIT_ARCHITECTURE(VISIT_INSTALL_PLATFORM) -ENDIF(APPLE) - -SET(VISIT_HOSTNAME "visit-uvcdat-build") - - -#Add VisIt to ExternalProject -ExternalProject_Add(VisIt - #DOWNLOAD_DIR ${VisIt_source} #${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${VisIt_source} - BINARY_DIR ${VisIt_binary} - INSTALL_DIR ${VisIt_install} - #SVN_REPOSITORY ${VISIT_SVN} - URL ${VISIT_URL}/${VISIT_GZ} - #URL_MD5 ${VISIT_MD5} - PATCH_COMMAND "" - #CONFIGURE_COMMAND "" - BUILD_COMMAND "" - CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${VisIt_install} -DCMAKE_INSTALL_NAME_DIR=${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib -DVISIT_CONFIG_SITE:FILEPATH=${VisIt_source}/${VISIT_HOSTNAME}.cmake - DEPENDS ${VisIt_deps} - ${ep_log_options} -) - -if(NOT EXISTS ${CMAKE_INSTALL_PREFIX}/lib) - file(MAKE_DIRECTORY ${CMAKE_INSTALL_PREFIX}/lib) -endif() - -#add references to VisIt's cmake -SET(TMP_STR1 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_OSX_USE_RPATH TYPE BOOL ON)\\n\")\n") -SET(TMP_STR2 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_QT_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR3 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_PYTHON_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR4 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_HEADERS_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR5 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_VTK_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR6 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_R_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR7 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"add_definitions(-DEXTERNAL_VTK_BUILD)\\n\")\n") -SET(TMP_STR8 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(CMAKE_EXE_LINKER_FLAGS \\\"\\\${CMAKE_EXE_LINKER_FLAGS} ${VISIT_LINKER_FLAGS}\\\")\\n\")\n") -SET(TMP_STR9 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_C_FLAGS \\\"\\\${VISIT_C_FLAGS} ${VISIT_C_FLAGS}\\\")\\n\")\n") -SET(TMP_STR10 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_CXX_FLAGS \\\"\\\${VISIT_CXX_FLAGS} ${VISIT_CXX_FLAGS}\\\")\\n\")\n") - -FILE(WRITE ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR1}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR2}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR3}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR4}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR5}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR6}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR7}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR8}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR9}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR10}) - -# Before install step -#load VisIt installation -ExternalProject_Add_Step(VisIt BuildVisItPatch_Step1 - COMMAND sed -e s//"object.h"/g ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C > ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C_tmp - COMMAND mv ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C_tmp ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C - COMMAND echo yes | svn_bin/build_visit --gpl --console --cc ${VISIT_C_COMPILER} --cxx ${VISIT_CXX_COMPILER} --alt-vtk-dir ${ParaView_binary}/VTK --alt-pyqt-dir ${CMAKE_INSTALL_PREFIX} --alt-R-dir ${cdat_EXTERNALS} --alt-netcdf-dir ${cdat_EXTERNALS} --alt-hdf5-dir ${cdat_EXTERNALS} --thirdparty-path ${CMAKE_CURRENT_BINARY_DIR}/visit-thirdparty --cmake-bin-dir ${CMAKE_PATH_VAR} --alt-python-dir ${CMAKE_INSTALL_PREFIX} --alt-qt-dir ${QT_ROOT} --no-visit --makeflags -j${VISIT_PARALLEL_PROCESSORS} --log-file ${CMAKE_BINARY_DIR}/logs/VisIt-build-out.log --no-mesa --visit-build-hostname ${VisIt_source}/${VISIT_HOSTNAME}.cmake - COMMAND ${CMAKE_COMMAND} -P ${CMAKE_BINARY_DIR}/visit.cmake - DEPENDEES patch - DEPENDERS configure - WORKING_DIRECTORY ${VisIt_source}) - -#After installation -#Make symlinks of VisIt's lib, plugins, -#move pyqt_pyqtviewer.so and plugin into python site-packages -message("COMMAND1: ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}") - -message("COMMAND2: ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}-plugins") - -ExternalProject_Add_Step(VisIt InstallVisItLibSymLink - COMMAND ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION} - COMMAND ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}-plugins - DEPENDEES install - WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}) - -FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "MESSAGE(STATUS \"Executing VisIt post installation steps\")\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB hdf5_files ${HDF5_install}/lib/libhdf5*${_LINK_LIBRARY_SUFFIX}*)\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${hdf5_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n") - -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB netcdf_files ${netcdf_install}/lib/libnetcdf*${_LINK_LIBRARY_SUFFIX}*)\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${netcdf_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n") - -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB z_files ${zlib_install}/lib/libz*${_LINK_LIBRARY_SUFFIX}*)\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${z_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n") - -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB curl_files ${curl_install}/lib/libcurl*${_LINK_LIBRARY_SUFFIX}*)\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${curl_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n") - -ExternalProject_Add_Step(VisIt InstallVisItExternalLibraries - COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch - DEPENDEES InstallVisItLibSymLink - WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR} - ) - -# clean up un-necessary database readers -ExternalProject_Add_Step(VisIt RemoveUnnecessaryDatabaseReaders - COMMAND find . ! \( -iname "*netcdf*" -o -iname "*image*" -o -iname "*hdf5*" -o -iname "*pixie*" -o -iname "*vtk*" -o -iname "*mtk*" -o -iname "*xdmf*" \) -type f -delete - DEPENDEES install - WORKING_DIRECTORY ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins/databases) - -FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/r_ismev_package "r = getOption('repos'); r['CRAN'] = 'http://cran.us.r-project.org'; options(repos = r); rm(r); install.packages('ismev')") - -ExternalProject_Add_Step(VisIt AddRDependencies - COMMAND ${cdat_EXTERNALS}/bin/Rscript ${CMAKE_CURRENT_BINARY_DIR}/r_ismev_package - DEPENDEES install) diff --git a/CMake/cdat_modules/visit_pkg.cmake b/CMake/cdat_modules/visit_pkg.cmake deleted file mode 100644 index df8c7fab16..0000000000 --- a/CMake/cdat_modules/visit_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(VISIT_MAJOR 2) -set(VISIT_MINOR 6) -set(VISIT_PATCH 0) -set(VISIT_VERSION ${VISIT_MAJOR}.${VISIT_MINOR}.${VISIT_PATCH}) -set(VISIT_URL http://vis.lbl.gov/~visit) -set(VISIT_GZ visit${VISIT_VERSION}.tar.gz) -set(VISIT_MD5 cb7ff3e7d6e487a11786644a3b49331e ) -set(VISIT_SOURCE ${VISIT_URL}/${VISIT_GZ}) - -add_cdat_package_dependent(VisIt "" "Build VisIt" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/vistrails_deps.cmake b/CMake/cdat_modules/vistrails_deps.cmake deleted file mode 100644 index 98ae7150f0..0000000000 --- a/CMake/cdat_modules/vistrails_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(vistrails_deps ${python_pkg} ${cython_pkg} ${scipy_pkg}) diff --git a/CMake/cdat_modules/vistrails_external.cmake b/CMake/cdat_modules/vistrails_external.cmake deleted file mode 100644 index ae8027c262..0000000000 --- a/CMake/cdat_modules/vistrails_external.cmake +++ /dev/null @@ -1,92 +0,0 @@ -# Create an external project to clone vistrails, -# and configure and build it - -include(GetGitRevisionDescription) -set(vistrails_branch ${VISTRAILS_BRANCH}) - -get_git_head_revision(refspec sha) - -string(REGEX REPLACE ".+/(.+)" "\\1" _branch "${refspec}") - -# Did we extract out the branch? -if (NOT _branch STREQUAL "${refspec}") - # Get the remote the branh if from - get_git_remote_for_branch(${_branch} _remote) - - if (_remote) - git_remote_url(${_remote} _url) - - if (_url) - if(_url MATCHES "^.*uvcdat.git") - if(_branch STREQUAL "master") - set(vistrails_branch ${VISTRAILS_BRANCH}) - elseif(_branch STREQUAL "release") - set(vistrails_branch ${VISTRAILS_BRANCH}) - endif() - elseif(_url MATCHES "^.*uvcdat-devel.git") - set(vistrails_branch uvcdat-next) - endif() - endif() - endif() -else() - message(WARNING "Unable to branch from '${refspec}' using default VisTrails branch") -endif() - -if("${refspec}" STREQUAL "refs/heads/devel-master") - set(vistrails_branch uvcdat-next) -endif() - -message("[INFO] Using vistrails branch: ${vistrails_branch}") - -set(vistrails_tag_point_message "Specify branch of vistrails to be used for UVCDAT") -set(VISTRAILS_TAG_POINT ${vistrails_branch} CACHE STRING "${vistrails_tag_point_message}") -set(vistrails_url "${VISTRAILS_SOURCE}") - -if(CDAT_AUTO_UPDATE_VISTRAILS_TAG_POINT) - set(VISTRAILS_TAG_POINT ${vistrails_branch} CACHE STRING "${vistrails_tag_point_message}" FORCE) -endif() - -# For configure purposes -set(SOURCE_DIR "${CMAKE_INSTALL_PREFIX}/vistrails") -set(BRANCH ${VISTRAILS_TAG_POINT}) -set(GIT_URL "${vistrails_url}") -set(GIT_TARGET "vistrails") - -option(CDAT_DELETE_VISTRAILS_HISTORY "Delete GIT history of vistrails" OFF) -option(CDAT_AUTO_UPDATE_VISTRAILS_TAG_POINT "Delete GIT history of vistrails" ON) - -set(vistrails_install_command ${cdat_BINARY_DIR}/git_clone_vistrails.sh) -if(EXISTS "${SOURCE_DIR}") - configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/git_update.sh.in - ${cdat_BINARY_DIR}/git_update_vistrails.sh - @ONLY - ) - set(vistrails_install_command ${cdat_BINARY_DIR}/git_update_vistrails.sh) -else() - configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/git_clone.sh.in - ${cdat_BINARY_DIR}/git_clone_vistrails.sh - @ONLY - ) -endif() - -ExternalProject_Add(vistrails - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${CMAKE_INSTALL_PREFIX} - BUILD_IN_SOURCE 0 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND ${vistrails_install_command} - DEPENDS ${vistrails_DEPENDENCIES} - ${EP_LOG_OPTIONS} -) - -if(CDAT_DELETE_VISTRAILS_HISTORY) - ExternalProject_Add_Step(vistrails after_install - COMMAND ${CMAKE_COMMAND} -E remove_directory ${CMAKE_INSTALL_PREFIX}/vistrails/.git - DEPENDEES install - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/vistrails - ) -endif() - diff --git a/CMake/cdat_modules/vistrails_pkg.cmake b/CMake/cdat_modules/vistrails_pkg.cmake deleted file mode 100644 index de4704436d..0000000000 --- a/CMake/cdat_modules/vistrails_pkg.cmake +++ /dev/null @@ -1,6 +0,0 @@ -set(VISTRAILS_VERSION ${VISTRAILS_TAG_POINT}) -set(VISTRAILS_SOURCE "${GIT_PROTOCOL}github.com/UV-CDAT/VisTrails.git") -set(VISTRAILS_VERSION uvcdat-2.4.0) -set(VISTRAILS_MD5) -set(VISTRAILS_BRANCH uvcdat-2.4.0) -add_cdat_package_dependent(vistrails "" "" ON "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/vtk_deps.cmake b/CMake/cdat_modules/vtk_deps.cmake deleted file mode 100644 index 015636f1fd..0000000000 --- a/CMake/cdat_modules/vtk_deps.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(VTK_deps ${pkgconfig_pkg} ${python_pkg} ${tiff_pkg} ${hdf5_pkg} ${freetype_pkg} ${netcdfplus_pkg} ${netcdf_pkg} ${proj4_pkg}) - -if (NOT CDAT_BUILD_GUI) - list(APPEND VTK_deps ${qt_pkg}) -endif() - -if(NOT CDAT_BUILD_LEAN) - list(APPEND VTK_deps ${ffmpeg_pkg}) -endif() - -if(CDAT_BUILD_OFFSCREEN) - list(APPEND VTK_deps ${osmesa_pkg}) -endif() diff --git a/CMake/cdat_modules/vtk_external.cmake b/CMake/cdat_modules/vtk_external.cmake deleted file mode 100644 index 8e926a66c7..0000000000 --- a/CMake/cdat_modules/vtk_external.cmake +++ /dev/null @@ -1,184 +0,0 @@ -set(vtk_source "${CMAKE_CURRENT_BINARY_DIR}/build/VTK") -set(vtk_binary "${CMAKE_CURRENT_BINARY_DIR}/build/VTK-build") -set(vtk_install "${cdat_EXTERNALS}") - -set(GIT_CMD_STR GIT_REPOSITORY "${VTK_SOURCE}") - -set(_vtk_modules - vtkCommonComputationalGeometry - vtkCommonCore - vtkCommonExecutionModel - vtkCommonMisc - vtkCommonSystem - vtkCommonTransforms - vtkFiltersAMR - vtkFiltersCore - vtkFiltersExtraction - vtkFiltersFlowPaths - vtkFiltersGeneral - vtkFiltersGeneric - vtkFiltersGeometry - vtkFiltersHybrid - vtkFiltersImaging - vtkFiltersModeling - vtkFiltersSelection - vtkFiltersSMP - vtkFiltersSources - vtkFiltersStatistics - vtkFiltersTexture - vtkGeovisCore - vtkImagingColor - vtkImagingCore - vtkImagingGeneral - vtkImagingMath - vtkImagingSources - vtkImagingStencil - vtkInteractionImage - vtkInteractionStyle - vtkInteractionWidgets - vtkIOCore - vtkIOExport - vtkIOExportOpenGL - vtkIOGeometry - vtkIOImage - vtkIOImport - vtkRenderingCore - vtkRenderingFreeType - vtkRenderingFreeTypeOpenGL - vtkRenderingImage - vtkRenderingLabel - vtkRenderingOpenGL - vtkRenderingVolume - vtkRenderingVolumeOpenGL - vtkViewsCore - vtkViewsGeovis -) - -if(NOT CDAT_BUILD_LEAN) - list(APPEND _vtk_modules "vtkIOFFMPEG") -endif() - -# Either we use cdat zlib and libxml or system zlib and libxml -list(APPEND vtk_build_args - -DVTK_USE_SYSTEM_ZLIB:BOOL=ON - -DVTK_USE_SYSTEM_LIBXML2:BOOL=ON - -DVTK_USE_SYSTEM_HDF5:BOOL=ON - -DVTK_USE_SYSTEM_NETCDF:BOOL=ON - -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON - -DVTK_USE_SYSTEM_LIBPROJ4:BOOL=ON - -DVTK_RENDERING_BACKEND:STRING=OpenGL - -DLIBPROJ4_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/proj4/include - -DLIBPROJ4_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/proj4/lib/libproj${_LINK_LIBRARY_SUFFIX} -# -) -if (APPLE) - list(APPEND vtk_build_args - -DVTK_USE_SYSTEM_PNG:BOOL=OFF - -DVTK_USE_SYSTEM_JPEG:BOOL=OFF - ) -else() - list(APPEND vtk_build_args - -DVTK_USE_SYSTEM_PNG:BOOL=ON - -DVTK_USE_SYSTEM_JPEG:BOOL=ON - ) -endif() - -# Turn off testing and other non essential featues -list(APPEND vtk_build_args - -DBUILD_TESTING:BOOL=OFF - -DCMAKE_PREFIX_PATH:PATH=${cdat_EXTERNALS} -) - -# Use cdat zlib -#if(NOT CDAT_USE_SYSTEM_ZLIB) -# list(APPEND vtk_build_args -# -DZLIB_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include -# -DZLIB_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# ) -#endif() - -# Use cdat libxml -#if(NOT CDAT_USE_SYSTEM_LIBXML2) -# list(APPEND vtk_build_args -# -DLIBXML2_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include/libxml2 -# -DLIBXML2_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/lib/libxml2${_LINK_LIBRARY_SUFFIX} -# -DLIBXML2_XMLLINT_EXECUTABLE:FILEPATH=${cdat_EXTERNALS}/bin/xmllint -# ) -#endif() - -# Use cdat hdf5 -if(NOT CDAT_USE_SYSTEM_HDF5) - list(APPEND vtk_build_args - -DHDF5_DIR:PATH=${cdat_EXTERNALS}/ - -DHDF5_C_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include - -DHDF5_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include - -DHDF5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - -DHDF5_hdf5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - -DHDF5_hdf5_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - ) - -# if(NOT CDAT_USE_SYSTEM_ZLIB) -# list(APPEND vtk_build_args -# -DHDF5_z_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# -DHDF5_z_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# ) -# endif() -endif() - -if(CDAT_BUILD_OFFSCREEN) - list(APPEND vtk_build_args - "-DVTK_USE_X:BOOL=OFF" - "-DVTK_OPENGL_HAS_OSMESA:BOOL=ON" - "-DOPENGL_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include" - "-DOPENGL_gl_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}" - "-DOPENGL_glu_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libGLU${_LINK_LIBRARY_SUFFIX}" - "-DOSMESA_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include" - "-DOSMESA_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}" - ) -endif() - -if(CDAT_BUILD_WEB) - list(APPEND vtk_build_args - "-DVTK_Group_Web:BOOL=ON" - ) -endif() - -set(_vtk_module_options) -foreach(_module ${_vtk_modules}) - list(APPEND _vtk_module_options "-DModule_${_module}:BOOL=ON") -endforeach() - -ExternalProject_Add(VTK - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${vtk_source} - BINARY_DIR ${vtk_binary} - INSTALL_DIR ${vtk_install} - ${GIT_CMD_STR} - GIT_TAG ${VTK_BRANCH} - UPDATE_COMMAND "" - PATCH_COMMAND "" - CMAKE_CACHE_ARGS - -DBUILD_SHARED_LIBS:BOOL=ON - -DBUILD_TESTING:BOOL=OFF - -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags} - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR} - ${cdat_compiler_args} - ${vtk_build_args} - -DVTK_WRAP_PYTHON:BOOL=ON - -DPYTHON_EXECUTABLE:FILEPATH=${PYTHON_EXECUTABLE} - -DPYTHON_INCLUDE_DIR:PATH=${PYTHON_INCLUDE} - -DPYTHON_LIBRARY:FILEPATH=${PYTHON_LIBRARY} - -DPYTHON_MAJOR_VERSION:STRING=${PYTHON_MAJOR} - -DPYTHON_MINOR_VERSION:STRING=${PYTHON_MINOR} - -DVTK_Group_Rendering:BOOL=OFF - -DVTK_Group_StandAlone:BOOL=OFF - -DVTK_LEGACY_SILENT:BOOL=ON - ${_vtk_module_options} - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - DEPENDS ${VTK_deps} - ${ep_log_options} -) - -unset(GIT_CMD_STR) diff --git a/CMake/cdat_modules/vtk_pkg.cmake b/CMake/cdat_modules/vtk_pkg.cmake deleted file mode 100644 index 35504cbac6..0000000000 --- a/CMake/cdat_modules/vtk_pkg.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(VTK_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/VTK.git ) -set(VTK_MD5) -set(VTK_BRANCH uvcdat-master) -add_cdat_package_dependent(VTK "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/wget_deps.cmake b/CMake/cdat_modules/wget_deps.cmake deleted file mode 100644 index 5c04065310..0000000000 --- a/CMake/cdat_modules/wget_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(wget_deps) diff --git a/CMake/cdat_modules/wget_external.cmake b/CMake/cdat_modules/wget_external.cmake deleted file mode 100644 index 157c000386..0000000000 --- a/CMake/cdat_modules/wget_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(wget_source "${CMAKE_CURRENT_BINARY_DIR}/build/wget") -set(wget_install "${cdat_EXTERNALS}") - -ExternalProject_Add(Wget - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${wget_source} - INSTALL_DIR ${wget_install} - URL ${WGET_URL}/${WGET_GZ} - URL_MD5 ${WGET_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${wget_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/wget_pkg.cmake b/CMake/cdat_modules/wget_pkg.cmake deleted file mode 100644 index 879dfc87d6..0000000000 --- a/CMake/cdat_modules/wget_pkg.cmake +++ /dev/null @@ -1,28 +0,0 @@ -set(LLNL_URL http://uv-cdat.llnl.gov/cdat/resources) -set(WGET_MAJOR 1) -set(WGET_MINOR 12) -set(WGET_PATCH) -set(WGET_URL ${LLNL_URL}) -set(WGET_GZ wget-${WGET_MAJOR}.${WGET_MINOR}.tar.gz) -set(WGET_MD5 141461b9c04e454dc8933c9d1f2abf83) -set(WGET_SOURCE ${WGET_URL}/${WGET_GZ}) - -add_cdat_package(Wget "" "Build Wget" SYSTEM) - -set (nm WGET) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}) -if(CDAT_BUILD_WGET) - if(WIN32) - set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget.exe) - else() - set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget) - endif() -endif() -if (${WGET_EXECUTABLE} STREQUAL "WGET_EXECUTABLE-NOTFOUND") - set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget) -endif() -message("[INFO] WGET_EXECUTABLE is set to ${WGET_EXECUTABLE}") - -set(HASWGET ${WGET_EXECUTABLE}) - diff --git a/CMake/cdat_modules/windfield_deps.cmake b/CMake/cdat_modules/windfield_deps.cmake deleted file mode 100644 index bef69919b8..0000000000 --- a/CMake/cdat_modules/windfield_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(windfield_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/windfield_external.cmake b/CMake/cdat_modules/windfield_external.cmake deleted file mode 100644 index 0be2b03c20..0000000000 --- a/CMake/cdat_modules/windfield_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# Windfield` -# -set(windfield_source "${CMAKE_CURRENT_BINARY_DIR}/build/windfield") - -ExternalProject_Add(windfield - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${windfield_source} - URL ${windfield_URL}/${windfield_GZ} - URL_MD5 ${windfield_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${windfield_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/windfield_pkg.cmake b/CMake/cdat_modules/windfield_pkg.cmake deleted file mode 100644 index 1296543c65..0000000000 --- a/CMake/cdat_modules/windfield_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(windfield_MAJOR ) -set(windfield_MINOR ) -set(windfield_VERSION 547534c636efc) -set(windfield_URL ${LLNL_URL} ) -set(windfield_GZ windfield-${windfield_VERSION}.tar.bz2) -set(windfield_MD5 48989935760da881424b6adb2cb96f44 ) -set(windfield_SOURCE ${windfield_URL}/${windfield_GZ}) - -add_cdat_package_dependent(windfield "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/windspharm_deps.cmake b/CMake/cdat_modules/windspharm_deps.cmake deleted file mode 100644 index a6a45a3a97..0000000000 --- a/CMake/cdat_modules/windspharm_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(windspharm_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/windspharm_external.cmake b/CMake/cdat_modules/windspharm_external.cmake deleted file mode 100644 index a1c93750e5..0000000000 --- a/CMake/cdat_modules/windspharm_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# windspharm -# -set(windspharm_source "${CMAKE_CURRENT_BINARY_DIR}/build/windspharm") - -ExternalProject_Add(windspharm - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${windspharm_source} - URL ${windspharm_URL}/${windspharm_GZ} - URL_MD5 ${windspharm_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${windspharm_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/windspharm_pkg.cmake b/CMake/cdat_modules/windspharm_pkg.cmake deleted file mode 100644 index 4293b1a1c6..0000000000 --- a/CMake/cdat_modules/windspharm_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(windspharm_MAJOR ) -set(windspharm_MINOR ) -set(windspharm_VERSION 76a47fca1a) -set(windspharm_URL ${LLNL_URL} ) -set(windspharm_GZ windspharm-${windspharm_VERSION}.zip) -set(windspharm_MD5 8456da340724d332955f2ec946204cad) -set(windspharm_SOURCE ${windspharm_URL}/${windspharm_GZ}) - -add_cdat_package_dependent(windspharm "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/x264_deps.cmake b/CMake/cdat_modules/x264_deps.cmake deleted file mode 100644 index c4169909e2..0000000000 --- a/CMake/cdat_modules/x264_deps.cmake +++ /dev/null @@ -1,2 +0,0 @@ -# Not necessary in theory, but fixes race condition that was being experienced on Ubuntu -set(X264_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/x264_external.cmake b/CMake/cdat_modules/x264_external.cmake deleted file mode 100644 index ad75bd4b93..0000000000 --- a/CMake/cdat_modules/x264_external.cmake +++ /dev/null @@ -1,28 +0,0 @@ -# The X264 external project for ParaView -set(x264_source "${CMAKE_CURRENT_BINARY_DIR}/build/X264") -set(x264_install "${cdat_EXTERNALS}") -set(ENV{PATH} $ENV{PATH}:${cdat_EXTERNALS}/bin) - -find_program(YASM_BIN "yasm") - -if (NOT YASM_BIN) - set(x264_conf_args --disable-asm^^--enable-shared) -else() - set(x264_conf_args --enable-shared) -endif() - -ExternalProject_Add(X264 - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${x264_source} - INSTALL_DIR ${x264_install} - URL ${X264_URL}/${X264_GZ} - URL_MD5 ${X264_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${x264_conf_args} -DBASH_CONFIGURE=ON -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${X264_deps} - ${ep_log_options} - ) - -set(X264_INCLUDE_DIR ${x264_install}/include) diff --git a/CMake/cdat_modules/x264_pkg.cmake b/CMake/cdat_modules/x264_pkg.cmake deleted file mode 100644 index ba832b026a..0000000000 --- a/CMake/cdat_modules/x264_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(X264_DATE 20151006) -set(X264_TIME 2245) -set(X264_ADDENDUM "") -set(X264_URL ${LLNL_URL}) -set(X264_GZ x264-snapshot-${X264_DATE}-${X264_TIME}${X264_ADDENDUM}.tar.gz) -set(X264_MD5 e8f5a0fc8db878bcdd256715472fe379) - -set (nm X264) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_DATE}.${${nm}_TIME}) -set(X264_SOURCE ${X264_URL}/${X264_GZ}) - -add_cdat_package_dependent(X264 "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/xgks_external.cmake b/CMake/cdat_modules/xgks_external.cmake deleted file mode 100644 index 1dcf222970..0000000000 --- a/CMake/cdat_modules/xgks_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ - -set(xgks_source "${CMAKE_CURRENT_BINARY_DIR}/build/xgks") -set(xgks_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/xgks_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/xgks_configure_step.cmake - @ONLY) - -#cp -f build/xgks*/port/misc/udposix.h /home/partyd/Projects/uv-cdat/make-install/Externals/include - -ExternalProject_Add(xgks - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${xgks_source} - INSTALL_DIR ${xgks_install} - URL ${XGKS_URL}/${XGKS_GZ} - URL_MD5 ${XGKS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/xgks_configure_step.cmake - ${ep_log_options} -) diff --git a/CMake/cdat_modules/yasm_deps.cmake b/CMake/cdat_modules/yasm_deps.cmake deleted file mode 100644 index 86ac65b48b..0000000000 --- a/CMake/cdat_modules/yasm_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(YASM_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/yasm_external.cmake b/CMake/cdat_modules/yasm_external.cmake deleted file mode 100644 index 9c1744b2d7..0000000000 --- a/CMake/cdat_modules/yasm_external.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(YASM_source "${CMAKE_CURRENT_BINARY_DIR}/build/YASM") -set(YASM_install "${cdat_EXTERNALS}") - -ExternalProject_Add(YASM - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${YASM_source} - INSTALL_DIR ${YASM_install} - URL ${YASM_URL}/${YASM_GZ} - URL_MD5 ${YASM_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${YASM_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/yasm_pkg.cmake b/CMake/cdat_modules/yasm_pkg.cmake deleted file mode 100644 index d4669fe883..0000000000 --- a/CMake/cdat_modules/yasm_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(YASM_MAJOR_SRC 1) -set(YASM_MINOR_SRC 2) -set(YASM_PATCH_SRC 0) -set(YASM_URL ${LLNL_URL}) -set(YASM_GZ yasm-${YASM_MAJOR_SRC}.${YASM_MINOR_SRC}.${YASM_PATCH_SRC}.tar.gz) -set(YASM_MD5 4cfc0686cf5350dd1305c4d905eb55a6) -set(YASM_SOURCE ${YASM_URL}/${YASM_GZ}) - -set (nm YASM) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package(YASM "" "" OFF) - diff --git a/CMake/cdat_modules/zlib_deps.cmake b/CMake/cdat_modules/zlib_deps.cmake deleted file mode 100644 index 3f2626fb6b..0000000000 --- a/CMake/cdat_modules/zlib_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(zlib_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/zlib_external.cmake b/CMake/cdat_modules/zlib_external.cmake deleted file mode 100644 index 09b6fd533b..0000000000 --- a/CMake/cdat_modules/zlib_external.cmake +++ /dev/null @@ -1,55 +0,0 @@ - -# If Windows we use CMake otherwise ./configure -if(WIN32) - - set(zlib_source "${CMAKE_CURRENT_BINARY_DIR}/zlib") - set(zlib_binary "${CMAKE_CURRENT_BINARY_DIR}/zlib-build") - set(zlib_install "${cdat_EXTERNALS}") - - ExternalProject_Add(zlib - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${zlib_source} - BINARY_DIR ${zlib_build} - INSTALL_DIR ${zlib_install} - URL ${ZLIB_URL}/${ZLIB_GZ} - URL_MD5 ${ZLIB_MD5} - PATCH_COMMAND ${CMAKE_COMMAND} -E remove /zconf.h - CMAKE_CACHE_ARGS - -DCMAKE_CXX_FLAGS:STRING=${pv_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${pv_tpl_c_flags} - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR} - ${pv_tpl_compiler_args} - ${zlib_EXTRA_ARGS} - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - ${ep_log_options} - ) - -else() - - set(zlib_source "${CMAKE_CURRENT_BINARY_DIR}/build/zlib") - set(zlib_install "${cdat_EXTERNALS}") - set(CONFIGURE_ARGS --shared) - - ExternalProject_Add(zlib - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${zlib_source} - INSTALL_DIR ${zlib_install} - URL ${ZLIB_URL}/${ZLIB_GZ} - URL_MD5 ${ZLIB_MD5} - PATCH_COMMAND ${CMAKE_COMMAND} -E remove /zconf.h - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${CONFIGURE_ARGS} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cleanenv_configure_step.cmake - DEPENDS ${zlib_deps} - ${ep_log_options} - ) - -endif() - -set(ZLIB_INCLUDE_DIR ${zlib_install}/include) - -if(WIN32) - set(ZLIB_LIBRARY optimized ${zlib_install}/lib/zlib${_LINK_LIBRARY_SUFFIX} debug ${zlib_install}/lib/zlibd${_LINK_LIBRARY_SUFFIX}) -else() - set(ZLIB_LIBRARY ${ZLIB_LIBRARY_PATH}/libz${_LINK_LIBRARY_SUFFIX}) -endif() diff --git a/CMake/cdat_modules/zlib_pkg.cmake b/CMake/cdat_modules/zlib_pkg.cmake deleted file mode 100644 index a34c30885e..0000000000 --- a/CMake/cdat_modules/zlib_pkg.cmake +++ /dev/null @@ -1,24 +0,0 @@ -set(ZLIB_PATCH_SRC 5 CACHE INTEGER "Version of ZLIB to use") -MARK_AS_ADVANCED(ZLIB_PATCH_SRC) -if (ZLIB_PATCH_SRC EQUAL 3) - set(ZLIB_MD5 debc62758716a169df9f62e6ab2bc634) -elseif (ZLIB_PATCH_SRC EQUAL 5) - set(ZLIB_MD5 c735eab2d659a96e5a594c9e8541ad63) -elseif (ZLIB_PATCH_SRC EQUAL 7) - set(ZLIB_MD5 60df6a37c56e7c1366cca812414f7b85) -elseif (ZLIB_PATCH_SRC EQUAL 8) - set(ZLIB_MD5 44d667c142d7cda120332623eab69f40) -else () - message(FATAL_ERROR "error: invalid zlib patch number: '${ZLIB_PATCH_SRC}' valid: 3, 5, 7 or 8") -endif() - -set(ZLIB_MAJOR_SRC 1) -set(ZLIB_MINOR_SRC 2) -#ZLIB_PATH_SRC and md5 is configured in CMakeLists.txt because on some RedHat system we need to change it ; # I don't believe this is true anymore durack1 23 Nov 2014 -#set(ZLIB_PATCH_SRC 8) -set(ZLIB_VERSION ${ZLIB_MAJOR_SRC}.${ZLIB_MINOR_SRC}.${ZLIB_PATCH_SRC}) -set(ZLIB_URL ${LLNL_URL}) -set(ZLIB_GZ zlib-${ZLIB_VERSION}.tar.gz) -set(ZLIB_SOURCE ${ZLIB_URL}/${ZLIB_GZ}) - -add_cdat_package(zlib "" "" OFF) diff --git a/CMake/cdat_modules/zmq_deps.cmake b/CMake/cdat_modules/zmq_deps.cmake deleted file mode 100644 index 1ef560a28f..0000000000 --- a/CMake/cdat_modules/zmq_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ZMQ_deps ${sphinx_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/zmq_external.cmake b/CMake/cdat_modules/zmq_external.cmake deleted file mode 100644 index c4637b4b46..0000000000 --- a/CMake/cdat_modules/zmq_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(zmq_source "${CMAKE_CURRENT_BINARY_DIR}/build/ZMQ") -set(zmq_install "${cdat_EXTERNALS}") - -ExternalProject_Add(ZMQ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${zmq_source} - INSTALL_DIR ${zmq_install} - URL ${ZMQ_URL}/${ZMQ_GZ} - URL_MD5 ${ZMQ_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${ZMQ_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/zmq_pkg.cmake b/CMake/cdat_modules/zmq_pkg.cmake deleted file mode 100644 index 26776a9f93..0000000000 --- a/CMake/cdat_modules/zmq_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(ZMQ_MAJOR 4) -set(ZMQ_MINOR 0) -set(ZMQ_PATCH 4) -set(ZMQ_VERSION ${ZMQ_MAJOR}.${ZMQ_MINOR}.${ZMQ_PATCH}) -set(ZMQ_URL ${LLNL_URL}) -set(ZMQ_GZ zeromq-${ZMQ_VERSION}.tar.gz) -set(ZMQ_MD5 f3c3defbb5ef6cc000ca65e529fdab3b) -set(ZMQ_SOURCE ${ZMQ_URL}/${ZMQ_GZ}) - -add_cdat_package(ZMQ "" "" OFF) diff --git a/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in b/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in deleted file mode 100644 index cd9a7e5394..0000000000 --- a/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in +++ /dev/null @@ -1,38 +0,0 @@ - -if(WIN32) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/SRC/Release/lapack@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/BLAS/SRC/Release/blas@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/F2CLIBS/libf2c/Release/libf2c@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_directory "@clapack_source@/include" "@clapack_install@/include" - ) - -else() - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/SRC/liblapack@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/BLAS/SRC/libblas@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/F2CLIBS/libf2c/libf2c@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_directory "@clapack_source@/include" "@clapack_install@/include" - ) - -endif() diff --git a/CMake/cdat_modules_extra/ESMF_install_step.cmake.in b/CMake/cdat_modules_extra/ESMF_install_step.cmake.in deleted file mode 100644 index 8e754914ff..0000000000 --- a/CMake/cdat_modules_extra/ESMF_install_step.cmake.in +++ /dev/null @@ -1,35 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(ENV{ESMF_DIR} @ESMF_source@/esmf) -set(ENV{ESMF_INSTALL_PREFIX} @ESMF_install@) -set(ENV{ESMF_PTHREADS} @ESMF_pthreads@) -set(ENV{ESMF_OS} @ESMF_os@) -set(ENV{ESMF_COMPILER} @ESMF_compiler@) -set(ENV{ESMF_COMM} @ESMF_comm@) -set(ENV{ESMF_ABI} @ESMF_abi@) -set(ENV{ESMF_OPENMP} @ESMF_openmp@) -set(ENV{ESMF_MOAB} OFF) -set(ENV{ESMF_ARRAYLITE} TRUE) -set(ENV{CFLAGS} ${cdat_osx_flags_fortran}) -set(ENV{CXXFLAGS} ${cdat_osx_cxxflags_fortran}) -set(ENV{CPPFLAGS} ${cdat_osx_flags_fortran}) -set(ENV{FFLAGS} ${cdat_osx_flags_fortran}) - -execute_process( - COMMAND make install - WORKING_DIRECTORY @ESMF_source@/esmf - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -set(ESMF_source @ESMF_source@) -set(ESMF_install @ESMF_install@) -set(ESMF_COMM @mpiuni@) -set(ESMF_pthreads @ESMF_pthreads@) - -if(NOT ${res} EQUAL 0) - message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Install") -endif() -message("Install succeeded.") diff --git a/CMake/cdat_modules_extra/ESMF_make_step.cmake.in b/CMake/cdat_modules_extra/ESMF_make_step.cmake.in deleted file mode 100644 index 2240671640..0000000000 --- a/CMake/cdat_modules_extra/ESMF_make_step.cmake.in +++ /dev/null @@ -1,45 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(ENV{ESMF_DIR} @ESMF_source@/esmf) -set(ENV{ESMF_INSTALL_PREFIX} @ESMF_install@) -set(ENV{ESMF_PTHREADS} @ESMF_pthreads@) -set(ENV{ESMF_OS} @ESMF_os@) -set(ENV{ESMF_COMPILER} @ESMF_compiler@) -set(ENV{ESMF_COMM} @ESMF_comm@) -set(ENV{ESMF_ABI} @ESMF_abi@) -set(ENV{ESMF_OPENMP} @ESMF_openmp@) -set(ENV{ESMF_MOAB} OFF) -set(ENV{ESMF_ARRAYLITE} TRUE) -set(ENV{CFLAGS} ${cdat_osx_flags_fortran}) -set(ENV{CXXFLAGS} ${cdat_osx_cxxflags_fortran}) -set(ENV{CPPFLAGS} ${cdat_osx_flags_fortran}) -set(ENV{FFLAGS} ${cdat_osx_flags_fortran}) - - - -## Store the configuration used to build ESMF -set(outfile @ESMF_source@/set_esmf_env_ser.sh) -file(WRITE ${outfile} "# ESMF compiled with these environment variables\n\n") -file(APPEND ${outfile} "export ESMF_DIR=@ESMF_source@/esmf\n") -file(APPEND ${outfile} "export ESMF_INSTALL_PREFIX=@ESMF_install@\n") -file(APPEND ${outfile} "export ESMF_THREADS=@ESMF_pthreads@\n") -file(APPEND ${outfile} "export ESMF_COMM=@ESMF_COMM@\n") -file(APPEND ${outfile} "\n") -file(APPEND ${outfile} "# Full information regarding the install is found in:\n") -file(GLOB_RECURSE ESMF_mkfile "@ESMF_install@/lib/libO/e*.mk") -file(APPEND ${outfile} "# "${ESMF_mkfile}"\n") - -# make should be detected by CMAKE at some point -execute_process( - COMMAND make - WORKING_DIRECTORY @ESMF_source@/esmf - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make:\n" ${res}) -endif() -message("Make succeeded.") diff --git a/CMake/cdat_modules_extra/ESMP_install_step.cmake.in b/CMake/cdat_modules_extra/ESMP_install_step.cmake.in deleted file mode 100644 index 3d5d01f42a..0000000000 --- a/CMake/cdat_modules_extra/ESMP_install_step.cmake.in +++ /dev/null @@ -1,34 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -file(GLOB_RECURSE ESMP_esmfmkfile "@cdat_EXTERNALS@/lib/libO/*.mk") - -set(ENV{ESMFMKFILE} ${ESMP_esmfmkfile}) -foreach( item ${ESMP_esmfmkfile}) - message("item " ${item}) -endforeach( item ${ESMP_esmfmkfile}) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" generateESMP_Config.py - WORKING_DIRECTORY @ESMP_source@ - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Install") -endif() - -if(NOT EXISTS @ESMP_source@/src/ESMP_Config.py) - message(FATAL_ERROR "ESMP_Config.py not created") -endif() - -file(INSTALL @ESMP_source@ DESTINATION @PYTHON_SITE_PACKAGES@) - -if(NOT EXISTS @PYTHON_SITE_PACKAGES@/ESMP/src/ESMP_Config.py) - message(FATAL_ERROR "Install process failed") -endif() - - -message("Install succeeded.") diff --git a/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in b/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in deleted file mode 100644 index 823fcdb8a6..0000000000 --- a/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in +++ /dev/null @@ -1,9 +0,0 @@ -# Patch ESMP_LoadESMF.py to allow relative loading of config file -if(NOT WIN32) - execute_process( - WORKING_DIRECTORY @ESMP_source@/src - COMMAND patch -p1 - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/ESMP.patch - ) -endif() - diff --git a/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in deleted file mode 100644 index cee8497ed3..0000000000 --- a/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in +++ /dev/null @@ -1,42 +0,0 @@ -message("Configuring NUMPY:\n@NUMPY_binary@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -# As explained in site.cfg.example - See http://projects.scipy.org/numpy/browser/trunk/site.cfg.example -# Directories listed in variable such as library_dirs or source_dirs should be separated using os.pathsep -# On windows, the separator is ";" and ":" on unix-like platform -set(path_sep ":") -if(WIN32) - set(path_sep ";") -endif() - -# As explained in site.cfg.example, the library name without the prefix "lib" should be used. -# Nevertheless, on windows, only "libf2c" leads to a successful configuration and -# installation of NUMPY -set(f2c_libname "f2c") -if(WIN32) - set(f2c_libname "libf2c") -endif() - -# setup the site.cfg file -file(WRITE "@NUMPY_binary@/site.cfg" -" -[blas] -library_dirs = @cdat_EXTERNALS@/lib${path_sep}@cdat_EXTERNALS@/lib -libraries = blas,${f2c_libname} - -[lapack] -library_dirs = @cdat_EXTERNALS@/lib -lapack_libs = lapack -") - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py config - WORKING_DIRECTORY "@NUMPY_binary@" - RESULT_VARIABLE res - ) - -if(NOT ${res} EQUAL 0) - message(FATAL_ERROR "Error in config of NUMPY") -endif() -message("Numpy config worked.") diff --git a/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in deleted file mode 100644 index feac845d50..0000000000 --- a/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in +++ /dev/null @@ -1,19 +0,0 @@ -message("Installing NUMPY:\n@NUMPY_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@NUMPY_binary@" - RESULT_VARIABLE res -) - -if(NOT ${res} EQUAL 0) - message("NUMPY Errors detected: \n${NUMPY_OUT}\n${NUMPY_ERR}") - message(FATAL_ERROR "Error in config of NUMPY") -endif() -message("Numpy install succeeded.") diff --git a/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in deleted file mode 100644 index 94b92ebd2e..0000000000 --- a/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") - set(ENV{LDFLAGS} "$ENV{LDFLAGS}") -else() - set(ENV{LDFLAGS} "$ENV{LDFLAGS} -shared") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@NUMPY_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE NUMPY_OUT - OUTPUT_VARIABLE NUMPY_ERR) - -if(NOT ${res} EQUAL 0) - message("[ERROR] NUMPY Errors detected: \n${NUMPY_OUT}\n${NUMPY_ERR}") - message(FATAL_ERROR "[ERROR] Error in config of NUMPY") -endif() diff --git a/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in b/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in deleted file mode 100644 index 4a6e827621..0000000000 --- a/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -message("Installing PYLIBXML2:\n@PYLIBXML2_PREFIX_ARGS@") -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@PYLIBXML2_binary@/python" - RESULT_VARIABLE res - OUTPUT_VARIABLE PYLIBXML2_OUT - OUTPUT_VARIABLE PYLIBXML2_ERR -) - -if(NOT ${res} EQUAL 0) - message("libxml2-python bindings Errors detected: \n${PYLIBXML2_OUT}\n${PYLIBXML2_ERR}") - message(FATAL_ERROR "Error in config of PYLIBXML2") -endif() -message("libxml2-python bindings install succeeded.") diff --git a/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in b/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in deleted file mode 100644 index 562cb24ff1..0000000000 --- a/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in +++ /dev/null @@ -1,24 +0,0 @@ -message("Building libxml2 python bindings:\n@PYLIBXML2_binary@") -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -set(cdat_EXTERNALS @cdat_EXTERNALS@) -configure_file(@cdat_CMAKE_SOURCE_DIR@/cdat_modules_extra/PYLIBXML2_setup.py.in - @cdat_BINARY_DIR@/build/PYLIBXML2/python/setup.py) - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@PYLIBXML2_binary@/python" - RESULT_VARIABLE res - OUTPUT_VARIABLE PYLIBXML2_OUT - OUTPUT_VARIABLE PYLIBXML2_ERR) - -if(NOT ${res} EQUAL 0) - message("libxml2-python bindings Errors detected: \n${PYLIBXML2_OUT}\n${PYLIBXML2_ERR}") - message(FATAL_ERROR "Error in config of PYLIBXML2") -endif() -message("libxml2_python bindings build worked.") diff --git a/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in b/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in deleted file mode 100755 index 22fdbfb9be..0000000000 --- a/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/python -u -# -# Setup script for libxml2 and libxslt if found -# -import sys, os -from distutils.core import setup, Extension - -# Below ROOT, we expect to find include, include/libxml2, lib and bin. -# On *nix, it is not needed (but should not harm), -# on Windows, it is set by configure.js. -ROOT = r'/usr' - -# Thread-enabled libxml2 -with_threads = 1 - -# If this flag is set (windows only), -# a private copy of the dlls are included in the package. -# If this flag is not set, the libxml2 and libxslt -# dlls must be found somewhere in the PATH at runtime. -WITHDLLS = 1 and sys.platform.startswith('win') - -def missing(file): - if os.access(file, os.R_OK) == 0: - return 1 - return 0 - -try: - HOME = os.environ['HOME'] -except: - HOME="C:" - -if WITHDLLS: - # libxml dlls (expected in ROOT/bin) - dlls = [ 'iconv.dll','libxml2.dll','libxslt.dll','libexslt.dll' ] - dlls = map(lambda dll: os.path.join(ROOT,'bin',dll),dlls) - - # create __init__.py for the libxmlmods package - if not os.path.exists("libxmlmods"): - os.mkdir("libxmlmods") - open("libxmlmods/__init__.py","w").close() - - def altImport(s): - s = s.replace("import libxml2mod","from libxmlmods import libxml2mod") - s = s.replace("import libxsltmod","from libxmlmods import libxsltmod") - return s - -if sys.platform.startswith('win'): - libraryPrefix = 'lib' - platformLibs = [] -else: - libraryPrefix = '' - platformLibs = ["m","z"] - -# those are examined to find -# - libxml2/libxml/tree.h -# - iconv.h -# - libxslt/xsltconfig.h -includes_dir = [ -"/usr/include", -"/usr/local/include", -"/opt/include", -os.path.join(ROOT,'include'), -HOME, -"@cdat_EXTERNALS@/include" -]; - -xml_includes="" -for dir in includes_dir: - if not missing(dir + "/libxml2/libxml/tree.h"): - xml_includes=dir + "/libxml2" - break; - -if xml_includes == "": - print "failed to find headers for libxml2: update includes_dir" - sys.exit(1) - -iconv_includes="" -for dir in includes_dir: - if not missing(dir + "/iconv.h"): - iconv_includes=dir - break; - -if iconv_includes == "": - print "failed to find headers for libiconv: update includes_dir" - sys.exit(1) - -# those are added in the linker search path for libraries -libdirs = [ -os.path.join(ROOT,'lib'), -] - -xml_files = ["libxml2-api.xml", "libxml2-python-api.xml", - "libxml.c", "libxml.py", "libxml_wrap.h", "types.c", - "xmlgenerator.py", "README", "TODO", "drv_libxml2.py"] - -xslt_files = ["libxslt-api.xml", "libxslt-python-api.xml", - "libxslt.c", "libxsl.py", "libxslt_wrap.h", - "xsltgenerator.py"] - -if missing("libxml2-py.c") or missing("libxml2.py"): - try: - try: - import xmlgenerator - except: - import generator - except: - print "failed to find and generate stubs for libxml2, aborting ..." - print sys.exc_type, sys.exc_value - sys.exit(1) - - head = open("libxml.py", "r") - generated = open("libxml2class.py", "r") - result = open("libxml2.py", "w") - for line in head.readlines(): - if WITHDLLS: - result.write(altImport(line)) - else: - result.write(line) - for line in generated.readlines(): - result.write(line) - head.close() - generated.close() - result.close() - -with_xslt=0 -if missing("libxslt-py.c") or missing("libxslt.py"): - if missing("xsltgenerator.py") or missing("libxslt-api.xml"): - print "libxslt stub generator not found, libxslt not built" - else: - try: - import xsltgenerator - except: - print "failed to generate stubs for libxslt, aborting ..." - print sys.exc_type, sys.exc_value - else: - head = open("libxsl.py", "r") - generated = open("libxsltclass.py", "r") - result = open("libxslt.py", "w") - for line in head.readlines(): - if WITHDLLS: - result.write(altImport(line)) - else: - result.write(line) - for line in generated.readlines(): - result.write(line) - head.close() - generated.close() - result.close() - with_xslt=1 -else: - with_xslt=1 - -if with_xslt == 1: - xslt_includes="" - for dir in includes_dir: - if not missing(dir + "/libxslt/xsltconfig.h"): - xslt_includes=dir + "/libxslt" - break; - - if xslt_includes == "": - print "failed to find headers for libxslt: update includes_dir" - with_xslt = 0 - - -descr = "libxml2 package" -modules = [ 'libxml2', 'drv_libxml2' ] -if WITHDLLS: - modules.append('libxmlmods.__init__') -c_files = ['libxml2-py.c', 'libxml.c', 'types.c' ] -includes= [xml_includes, iconv_includes] -libs = [libraryPrefix + "xml2"] + platformLibs -macros = [] -if with_threads: - macros.append(('_REENTRANT','1')) -if with_xslt == 1: - descr = "libxml2 and libxslt package" - if not sys.platform.startswith('win'): - # - # We are gonna build 2 identical shared libs with merge initializing - # both libxml2mod and libxsltmod - # - c_files = c_files + ['libxslt-py.c', 'libxslt.c'] - xslt_c_files = c_files - macros.append(('MERGED_MODULES', '1')) - else: - # - # On windows the MERGED_MODULE option is not needed - # (and does not work) - # - xslt_c_files = ['libxslt-py.c', 'libxslt.c', 'types.c'] - libs.insert(0, libraryPrefix + 'exslt') - libs.insert(0, libraryPrefix + 'xslt') - includes.append(xslt_includes) - modules.append('libxslt') - - -extens=[Extension('libxml2mod', c_files, include_dirs=includes, - library_dirs=libdirs, - libraries=libs, define_macros=macros)] -if with_xslt == 1: - extens.append(Extension('libxsltmod', xslt_c_files, include_dirs=includes, - library_dirs=libdirs, - libraries=libs, define_macros=macros)) - -if missing("MANIFEST"): - - manifest = open("MANIFEST", "w") - manifest.write("setup.py\n") - for file in xml_files: - manifest.write(file + "\n") - if with_xslt == 1: - for file in xslt_files: - manifest.write(file + "\n") - manifest.close() - -if WITHDLLS: - ext_package = "libxmlmods" - if sys.version >= "2.2": - base = "lib/site-packages/" - else: - base = "" - data_files = [(base+"libxmlmods",dlls)] -else: - ext_package = None - data_files = [] - -setup (name = "libxml2-python", - # On *nix, the version number is created from setup.py.in - # On windows, it is set by configure.js - version = "2.7.8", - description = descr, - author = "Daniel Veillard", - author_email = "veillard@redhat.com", - url = "http://xmlsoft.org/python.html", - licence="MIT Licence", - py_modules=modules, - ext_modules=extens, - ext_package=ext_package, - data_files=data_files, - ) - -sys.exit(0) - diff --git a/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in deleted file mode 100644 index 00d3814972..0000000000 --- a/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in +++ /dev/null @@ -1,62 +0,0 @@ -message("Configuring SCIPY:\n@SCIPY_binary@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{PYTHONPATH} "@PYTHONPATH@") -set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}") - -# As explained in site.cfg.example - See http://projects.scipy.org/scipy/browser/trunk/site.cfg.example -# Directories listed in variable such as library_dirs or source_dirs should be separated using os.pathsep -# On windows, the separator is ";" and ":" on unix-like platform -set(path_sep ":") -if(WIN32) - set(path_sep ";") -endif() - -# As explained in site.cfg.example, the library name without the prefix "lib" should be used. -# Nevertheless, on windows, only "libf2c" leads to a successful configuration and -# installation of SCIPY -set(f2c_libname "f2c") -if(WIN32) - set(f2c_libname "libf2c") -endif() - -set(_blas_dirs) -set(_lapack_dirs) - -if (CDAT_USE_SYSTEM_LAPACK) - foreach(_path ${BLAS_LIBRARIES}) - get_filename_component(_dir ${_path} PATH) - list(APPEND _blas_dirs ${_dir}) - endforeach() - - foreach(_path ${LAPACK_LIBRARIES}) - get_filename_component(_dir ${_path} PATH) - list(APPEND _lapack_dirs ${_dir}) - endforeach() -else() - set(_blas_dirs @cdat_EXTERNALS@/lib) - set(_lapack_dirs @cdat_EXTERNALS@/lib) -endif() - -# setup the site.cfg file -file(WRITE "@SCIPY_binary@/site.cfg" -" -[blas] -library_dirs = ${_blas_dirs} -libraries = blas,${f2c_libname} - -[lapack] -library_dirs = ${_lapack_dirs} -lapack_libs = lapack -") - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py config - WORKING_DIRECTORY "@SCIPY_binary@" - RESULT_VARIABLE res - ) - -if(NOT ${res} EQUAL 0) - message(FATAL_ERROR "Error in config of SCIPY") -endif() -message("Scipy config worked.") diff --git a/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in deleted file mode 100644 index 34a3e9edae..0000000000 --- a/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -message("Installing SCIPY:\n@SCIPY_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{PYTHONPATH} "@PYTHONPATH@") -set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@SCIPY_binary@" - RESULT_VARIABLE res -) - -if(NOT ${res} EQUAL 0) - message("SCIPY Errors detected: \n${SCIPY_OUT}\n${SCIPY_ERR}") - message(FATAL_ERROR "Error in config of SCIPY") -endif() -message("Scipy install succeeded.") diff --git a/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in deleted file mode 100644 index c8d533cb18..0000000000 --- a/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in +++ /dev/null @@ -1,22 +0,0 @@ -message("Building SCIPY:\n@SCIPY_binary@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{PYTHONPATH} "@PYTHONPATH@") -set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@SCIPY_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE SCIPY_OUT - OUTPUT_VARIABLE SCIPY_ERR) - -if(NOT ${res} EQUAL 0) - message("SCIPY Errors detected: \n${SCIPY_OUT}\n${SCIPY_ERR}") - message(FATAL_ERROR "Error in config of SCIPY") -endif() -message("Scipy build worked.") diff --git a/CMake/cdat_modules_extra/basemap_install_step.cmake.in b/CMake/cdat_modules_extra/basemap_install_step.cmake.in deleted file mode 100644 index 95cb49de75..0000000000 --- a/CMake/cdat_modules_extra/basemap_install_step.cmake.in +++ /dev/null @@ -1,20 +0,0 @@ -message("Installing basemap:\n@basemap_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{GEOS_DIR} "@cdat_EXTERNALS@") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@basemap_binary@" - RESULT_VARIABLE res -) - -if(NOT ${res} EQUAL 0) - message("basemap Errors detected: \n${basemap_OUT}\n${basemap_ERR}") - message(FATAL_ERROR "Error in config of basemap") -endif() -message("Numpy install succeeded.") diff --git a/CMake/cdat_modules_extra/basemap_make_step.cmake.in b/CMake/cdat_modules_extra/basemap_make_step.cmake.in deleted file mode 100644 index 0789e4ddab..0000000000 --- a/CMake/cdat_modules_extra/basemap_make_step.cmake.in +++ /dev/null @@ -1,20 +0,0 @@ -message("Building basemap:\n@basemap_binary@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{GEOS_DIR} "@cdat_EXTERNALS@") -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@basemap_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE basemap_OUT - OUTPUT_VARIABLE basemap_ERR) - -if(NOT ${res} EQUAL 0) - message("basemap Errors detected: \n${basemap_OUT}\n${basemap_ERR}") - message(FATAL_ERROR "Error in config of basemap") -endif() -message("basemap build worked.") diff --git a/CMake/cdat_modules_extra/cdat.in b/CMake/cdat_modules_extra/cdat.in deleted file mode 100755 index 7bfcf620b3..0000000000 --- a/CMake/cdat_modules_extra/cdat.in +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -# source is not portable whereas . is -. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh" -python@PYVER@ "$@" diff --git a/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in b/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in deleted file mode 100644 index 5f5674fc18..0000000000 --- a/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in +++ /dev/null @@ -1,18 +0,0 @@ - -if(NOT APPLE) - include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -endif() - -execute_process( - COMMAND make "${BUILD_ARGS}" - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make") -endif() -message("Make succeeded.") - diff --git a/CMake/cdat_modules_extra/cdat_common_environment.cmake.in b/CMake/cdat_modules_extra/cdat_common_environment.cmake.in deleted file mode 100644 index 7a29f80050..0000000000 --- a/CMake/cdat_modules_extra/cdat_common_environment.cmake.in +++ /dev/null @@ -1,39 +0,0 @@ -message("[INFO] ADDITIONAL CFLAGS ${ADDITIONAL_CFLAGS}") -set(ENV{PATH} "@SB_BIN_DIR@:@cdat_EXTERNALS@/bin:$ENV{PATH}") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:@cdat_EXTERNALS@/lib64:@cdat_EXTERNALS@/lib/paraview-@PARAVIEW_MAJOR@.@PARAVIEW_MINOR@:$ENV{@LIBRARY_PATH@}") -if (NOT DEFINED SKIP_LDFLAGS) - set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib -L@cdat_EXTERNALS@/lib64 @cdat_external_link_directories@ -Wl,-rpath,@cdat_EXTERNALS@/lib64 @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib") -endif() -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_cxxflags@ ${ADDITIONAL_CXXFLAGS}") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") -set(ENV{FC} "") -set(ENV{FCFLAGS} "") -set(ENV{FCLIBS} "") -set(ENV{F77} "") -set(ENV{FFLAGS} "") -set(ENV{FLIBS} "") -set(ENV{LD_X11} "") # for xgks -set(ENV{PYTHONPATH} @PYTHONPATH@) -set(ENV{CC} @CMAKE_C_COMPILER@) - -if(APPLE) - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") -endif() - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -set(PYTHONUSERBASE @PYTHON_SITE_PACKAGES_PREFIX@) -#if ("@EGG_INSTALLER@" STREQUAL "PIP") -# # Set python userbase so that pip install packages locally -# set(PYTHONUSERBASE @CMAKE_INSTALL_PREFIX@) -# set(EGG_CMD env @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PYTHONUSERBASE=${PYTHONUSERBASE} @PIP_BINARY@ install --user -v --download-cache @CDAT_PACKAGE_CACHE_DIR@ ) -# if (NOT "${PIP_CERTIFICATE}" STREQUAL "") -# set(EGG_CMD ${EGG_CMD} --cert=${PIP_CERTIFICATE}) -# endif() -#else() -# set(EGG_CMD env @LD_LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} @EASY_INSTALL_BINARY@ ) -#endif() - diff --git a/CMake/cdat_modules_extra/cdat_configure_step.cmake.in b/CMake/cdat_modules_extra/cdat_configure_step.cmake.in deleted file mode 100644 index 32ecb43f0d..0000000000 --- a/CMake/cdat_modules_extra/cdat_configure_step.cmake.in +++ /dev/null @@ -1,30 +0,0 @@ -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") - -if (BASH_CONFIGURE) - set(CONFIGURE_SHELL "bash") -else() - set(CONFIGURE_SHELL "sh") -endif() - -if (CONF_PATH_XTRA) - message("[INFO] configure is in subdirectory: ${CONF_PATH_XTRA}") -else() - set(CONF_PATH_XTRA ".") -endif() -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}") -message("LD_ARGS IS $ENV{@LIBRARY_PATH@}") -message("CFLAGS : $ENV{CFLAGS}") - -execute_process( - COMMAND env CC=$ENV{CC} CFLAGS=$ENV{CFLAGS} LD_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} DYLD_FALLBACK_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PKG_CONFIG=$ENV{PKG_CONFIG} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} ${CONFIGURE_SHELL} ${CONF_PATH_XTRA}/configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in b/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in deleted file mode 100644 index f8cf8e0fa8..0000000000 --- a/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in +++ /dev/null @@ -1,10 +0,0 @@ -file(READ @cdat_SOURCE_DIR@/Packages/dat/files.txt SAMPLE_FILES) -string(REPLACE "\n" ";" SAMPLE_LIST ${SAMPLE_FILES}) -foreach(SAMPLE_FILE ${SAMPLE_LIST}) - STRING(REPLACE " " ";" DOWNLOAD_LIST ${SAMPLE_FILE}) - LIST(GET DOWNLOAD_LIST 0 MD5) - LIST(GET DOWNLOAD_LIST 1 FILE_NM) - message("[INFO] Attempting to download http://uv-cdat.llnl.gov/cdat/sample_data/${FILE_NM} WITH MD5 ${MD5} to @CMAKE_INSTALL_PREFIX@/share/uvcdat/sample_data/${FILE_NM}") - file(DOWNLOAD http://uv-cdat.llnl.gov/cdat/sample_data/${FILE_NM} @CMAKE_INSTALL_PREFIX@/share/uvcdat/sample_data/${FILE_NM} EXPECTED_MD5=${MD5}) -endforeach() -set(res 0) diff --git a/CMake/cdat_modules_extra/cdat_install_step.cmake.in b/CMake/cdat_modules_extra/cdat_install_step.cmake.in deleted file mode 100644 index 62fe3fa78c..0000000000 --- a/CMake/cdat_modules_extra/cdat_install_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND make install ${INSTALL_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in install") -endif() -message("Install succeeded.") diff --git a/CMake/cdat_modules_extra/cdat_make_step.cmake.in b/CMake/cdat_modules_extra/cdat_make_step.cmake.in deleted file mode 100644 index 59a4f113e2..0000000000 --- a/CMake/cdat_modules_extra/cdat_make_step.cmake.in +++ /dev/null @@ -1,18 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -if(NOT ${make}) - set(make make) -endif() -execute_process( - COMMAND env CFLAGS=$ENV{CFLAGS} LDFLAGS=$ENV{LDFLAGS} CPPFLAGS=$ENV{CPPFLAGS} CXXFLAGS=$ENV{CXXFLAG} ${make} -j ${BUILD_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("[ERROR] Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make") -endif() - diff --git a/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in b/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in deleted file mode 100644 index 87855421a0..0000000000 --- a/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in +++ /dev/null @@ -1,30 +0,0 @@ - -if(NOT APPLE) - set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -endif() - -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CPPFLAGS@") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CXXFLAGS@") -set(ENV{CFLAGS} "-w -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CFLAGS@") -set(ENV{LOCNCCONFIG} "@cdat_EXTERNALS@/bin/nc-config") -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") - -message("Running cdat install with path: " $ENV{PATH}) - -if(APPLE) - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" @cdat_CMAKE_SOURCE_DIR@/install.py @cdat_xtra_flags@ --enable-pp @SAMPLE_DATA@ @PYTHON_EXTRA_PREFIX@ @CDMS_ONLY@ - WORKING_DIRECTORY "@WORKING_DIR@" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Python Install. ${res}") -endif() -message("Make succeeded.") - diff --git a/CMake/cdat_modules_extra/cdat_python_step.cmake.in b/CMake/cdat_modules_extra/cdat_python_step.cmake.in deleted file mode 100644 index cf32905a1d..0000000000 --- a/CMake/cdat_modules_extra/cdat_python_step.cmake.in +++ /dev/null @@ -1,18 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -if(NOT ${make}) - set(make make) -endif() - -execute_process( - COMMAND ${make} ${BUILD_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make") -endif() -message("Make succeeded.") - diff --git a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in b/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in deleted file mode 100644 index b8dd0172d1..0000000000 --- a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in +++ /dev/null @@ -1,22 +0,0 @@ -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(ENV{CC} mpicc) -set(ENV{CXX} mpicxx) - -message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}") -message("LD_ARGS IS $ENV{@LIBRARY_PATH@}") -message("CFLAGS : $ENV{CFLAGS}") - -execute_process( - COMMAND env CC=$ENV{CC} CFLAGS=$ENV{CFLAGS} LD_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} DYLD_FALLBACK_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PKG_CONFIG=$ENV{PKG_CONFIG} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/checked_get.sh.in b/CMake/cdat_modules_extra/checked_get.sh.in deleted file mode 100755 index 7a38feab45..0000000000 --- a/CMake/cdat_modules_extra/checked_get.sh.in +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env bash - -# Does an md5 check between local and remote resource -# returns 0 (success) iff there is no match and thus indicating that -# an update is available. -# USAGE: checked_for_update [file] http://www.foo.com/file -# -check_for_update() { - local local_file - local remote_file - if (( $# == 1 )); then - remote_file=${1} - local_file=$(readlink -f ${1##*/}) - elif (( $# == 2 )); then - local_file="../sources/"${1} - remote_file=${2} - else - echo "function \"checked_for_update\": Called with incorrect number of args! (fatal)" - exit 1 - fi - echo "Local file is:",${local_file} - [ ! -e ${local_file} ] && echo " WARNING: Could not find local file ${local_file}" && return 0 - diff <(@MD5PRG@ ${local_file} | tr -s " " | cut -d " " -f @MD5CNT@ ) <(more ../md5s.txt | grep ${1} | tr -s " " | cut -d " " -f 1) >& /dev/null - [ $? != 0 ] && echo " Update Available @ ${remote_file}" && return 0 - echo " ==> ${local_file} is up to date" - return 1 -} - -# If an update is available then pull it down... then check the md5 sums again! -# -# Yes, this results in 3 network calls to pull down a file, but it -# saves total bandwidth and it also allows the updating from the -# network process to be cronttab-able while parsimonious with -# resources. It is also very good practice to make sure that code -# being executed is the RIGHT code! -# -# NOTE: Has multiple return values test for (( $? > 1 )) when looking or errors -# A return value fo 1 only means that the file is up-to-date and there -# Is no reason to fetch it. -# -# USAGE: checked_get [file] http://www.foo.com/file -# -checked_get() { - check_for_update $@ - [ $? != 0 ] && return 1 - - local local_file - local remote_file - if (( $# == 1 )); then - remote_file=${1} - local_file=${1##*/} - elif (( $# == 2 )); then - local_file="../sources/"${1} - remote_file=${2} - else - echo "function \"checked_get\": Called with incorrect number of args! (fatal)" - exit 1 - fi - echo "Local file 2 :",${local_file} - if [ -e ${local_file} ]; then - cp -v ${local_file} ${local_file}.bak - chmod 600 ${local_file}.bak -# return 0 - fi - @HASWGET@ -O ${local_file} ${remote_file} - [ $? != 0 ] && echo " ERROR: Problem pulling down [${remote_file}]" && return 2 - diff <(@MD5PRG@ ${local_file} | tr -s " " | cut -d " " -f @MD5CNT@ ) <(more ../md5s.txt | grep ${1} | tr -s " " | cut -d " " -f 1) >& /dev/null -# diff <(md5sum ${local_file} | tr -s " " | cut -d " " -f 1) <(curl ${remote_file}.md5 | tr -s " " | cut -d " " -f 1) >& /dev/null - [ $? != 0 ] && echo " WARNING: Could not verify this file!" && return 3 - echo "[VERIFIED]" - return 0 -} - -checked_get $@ -echo ${1}" is where i get the tared stuff" -tar xzf "../sources/"${1} diff --git a/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in new file mode 100755 index 0000000000..dc57305463 --- /dev/null +++ b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in @@ -0,0 +1,5 @@ + +#!/usr/bin/env bash + +source activate root +@CONDA@ remove --all -y -n @CONDA_ENVIRONMENT_NAME@ diff --git a/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in b/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in deleted file mode 100644 index a3872f3733..0000000000 --- a/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in +++ /dev/null @@ -1,17 +0,0 @@ -#set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -#set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") - -#include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -#message(CONGIFURE_ARGS IS ${CONFIGURE_ARGS}) - -execute_process( - COMMAND sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/configobj_build_step.cmake.in b/CMake/cdat_modules_extra/configobj_build_step.cmake.in deleted file mode 100644 index 5edd0af433..0000000000 --- a/CMake/cdat_modules_extra/configobj_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@configobj_source_dir@" -) diff --git a/CMake/cdat_modules_extra/curses_patch_step.cmake.in b/CMake/cdat_modules_extra/curses_patch_step.cmake.in deleted file mode 100644 index 04c28afdc3..0000000000 --- a/CMake/cdat_modules_extra/curses_patch_step.cmake.in +++ /dev/null @@ -1,5 +0,0 @@ -execute_process( - WORKING_DIRECTORY @curses_source@ - COMMAND patch -Np1 - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/curses_gcc5.patch -) diff --git a/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in b/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in deleted file mode 100644 index 7e0987908b..0000000000 --- a/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in +++ /dev/null @@ -1,78 +0,0 @@ -# EZGet Makefile -# -# Usage: -# -# Change DEBUG as needed. -# Check the paths specified here for whether they are right for you. -# Provide a path to fcddrs.h, or copy it (it's in cdat/libcdms/include) -# make -# -#-------------------------------------------------------------------- -LIBNAME = ezget -#jfp was CDMSLIB = /usr/local/lib -#jfp was CDMSINC = /usr/local/include -CDMSLIB = @cdat_EXTERNALS@/lib -CDMSINC = @cdat_EXTERNALS@/include -DEBUG = -O -# DEBUG = -g -save-temps -O0 -# Requires Absoft FORTRAN -FC = gfortran -CC = gcc -#ARCHOPT = -arch x86_64 -#ARCHOPT = -arch i386 -ARCHOPT = -m64 -mtune=native -# FOPTS = -fcray-pointer $(ARCHOPT) -W -FOPTS = -fcray-pointer $(ARCHOPT) -W -Dgfortran -Dsun -D__linux -D__linux_gfortran -fpic -fPIC -I ../../libdrs/lib -I ../include -I @cdat_EXTERNALS@/include -FFLAGS = $(DEBUG) $(FOPTS) -INSTALL_LIB = @cdat_EXTERNALS@/lib -INSTALL_INC = @cdat_EXTERNALS@/include -CPPFLAGS = $(ARCHOPT) -CPP = cpp - -FOBJECTS = Src/$(LIBNAME).o -FINCLUDES = drsdef.h drscom.h cycle.h -# FINCLUDES = -FSOURCES = $(FOBJECTS:.o=.F) - -COBJECTS = -CINCLUDES = drscdf.h -CSOURCES = $(COBJECTS:.o=.c) - -OBJECTS = $(FOBJECTS) $(COBJECTS) -SOURCES = $(FSOURCES) $(CSOURCES) -INCLUDES = $(FINCLUDES) $(CINCLUDES) -#-------------------------------------------------------------------- - -all: lib$(LIBNAME).a -#lib$(LIBNAME).so - -shared: drsdef.h lib$(LIBNAME).so - -lib$(LIBNAME).a: $(OBJECTS) - ar rv lib$(LIBNAME).a $? - -lib$(LIBNAME).so: $(OBJECTS) - $(CC) $(ARCHOPT) -lgfortran -L@cdat_EXTERNALS@/lib -L$(CDMSLIB) -I$(CDMSINC) -lcdms -shared -o lib$(LIBNAME).so $(OBJECTS) - -#-------------------------------------------------------------------- - -install: lib$(LIBNAME).a - cp lib$(LIBNAME).a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/lib$(LIBNAME).a -# cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h - -#-------------------------------------------------------------------------- - -# Miscellaneous junk - -tags: - etags $(SOURCES) $(INCLUDES) - -clean: - -rm -f Src/*.o - -rm -f *~ - -rm -f core - -.SUFFIXES: .F .o - -.F.o: - cd Src ; $(FC) $(FFLAGS) -c ../$< diff --git a/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake b/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake deleted file mode 100644 index 996ae0a281..0000000000 --- a/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake +++ /dev/null @@ -1,25 +0,0 @@ -# used variables: -# FILE_URL The url where the file is available -# FILE_PATH The destination for the file -# FILE_MD5 The expected md5 - -# check if the file already exists -if(EXISTS "${FILE_PATH}") - # check md5sum - file(MD5 "${FILE_PATH}" output_md5) - - if(${output_md5} STREQUAL ${FILE_MD5}) - return() # nothing to do - endif() -endif() - -# add a build target to download the file -file(DOWNLOAD "${FILE_URL}" "${FILE_PATH}" STATUS stat) -list(GET stat 0 exit_code) -list(GET stat 1 msg) - -# fail on error -if(NOT exit_code EQUAL 0) - file(REMOVE "${FILE_PATH}") - message(FATAL_ERROR "Error downloading: ${msg}") -endif() diff --git a/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in b/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in deleted file mode 100644 index d0ef31f298..0000000000 --- a/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in +++ /dev/null @@ -1,14 +0,0 @@ - -if(APPLE) - set(ENV{MACOSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -set(ENV{PATH} $ENV{PATH}:@cdat_EXTERNALS@/bin) - -EXECUTE_PROCESS( - # COMMAND sh configure --disable-static --disable-network --disable-zlib --disable-ffserver --disable-ffplay --disable-decoders --enable-shared --enable-swscale --prefix=@ffmpeg_install@ - COMMAND make - WORKING_DIRECTORY "@ffmpeg_source@" - RESULT_VARIABLE rv - ) diff --git a/CMake/cdat_modules_extra/git_clone.sh.in b/CMake/cdat_modules_extra/git_clone.sh.in deleted file mode 100755 index 05bb4d3fdb..0000000000 --- a/CMake/cdat_modules_extra/git_clone.sh.in +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -cd "@CMAKE_INSTALL_PREFIX@" -"@GIT_EXECUTABLE@" clone --no-checkout --depth 1 -b @BRANCH@ @GIT_URL@ "@GIT_TARGET@" -cd "@GIT_TARGET@" -if "@GIT_EXECUTABLE@" rev-parse --symbolic-full-name @BRANCH@ | grep -q '^refs/tags/'; then - "@GIT_EXECUTABLE@" checkout @BRANCH@ -else - "@GIT_EXECUTABLE@" checkout origin/@BRANCH@ -fi diff --git a/CMake/cdat_modules_extra/git_update.sh.in b/CMake/cdat_modules_extra/git_update.sh.in deleted file mode 100755 index a8b3b7954a..0000000000 --- a/CMake/cdat_modules_extra/git_update.sh.in +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -cd "@SOURCE_DIR@" -"@GIT_EXECUTABLE@" fetch origin --prune -if "@GIT_EXECUTABLE@" rev-parse --symbolic-full-name @BRANCH@ | grep -q '^refs/tags/'; then - "@GIT_EXECUTABLE@" checkout -f @BRANCH@ -else - "@GIT_EXECUTABLE@" checkout -f origin/@BRANCH@ -fi diff --git a/CMake/cdat_modules_extra/gsw_build_step.cmake.in b/CMake/cdat_modules_extra/gsw_build_step.cmake.in deleted file mode 100644 index 1a344eb810..0000000000 --- a/CMake/cdat_modules_extra/gsw_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@gsw_source_dir@" -) diff --git a/CMake/cdat_modules_extra/h5py_build_step.cmake.in b/CMake/cdat_modules_extra/h5py_build_step.cmake.in deleted file mode 100644 index 47e7400283..0000000000 --- a/CMake/cdat_modules_extra/h5py_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@h5py_source_dir@" -) diff --git a/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in b/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in deleted file mode 100644 index 588e26535f..0000000000 --- a/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in +++ /dev/null @@ -1,10 +0,0 @@ -execute_process( - COMMAND - "@CMAKE_COMMAND@" -E copy_if_different @cdat_SOURCE_DIR@/exsrc/src/h5diff_correct_ansi.c ${WORKING_DIR}/tools/lib/h5diff.c - RESULT_VARIABLE errcode -) -if("${errcode}" STREQUAL "0") - message(STATUS "h5diff.c replaced") -else() - message(FATAL_ERROR "Replacing h5diff.c failed: ${errcode}") -endif() diff --git a/CMake/cdat_modules_extra/install.py.in b/CMake/cdat_modules_extra/install.py.in deleted file mode 100644 index 58398539d5..0000000000 --- a/CMake/cdat_modules_extra/install.py.in +++ /dev/null @@ -1,945 +0,0 @@ -import sys, getopt, os, shutil, string, glob, tempfile, hashlib -from distutils.core import setup - -build_dir = os.getcwd() -logdir = os.path.join(build_dir, 'logs').replace(" ","\ ") - -# Create logs directory if it does not exits -if not os.path.exists(logdir): - os.makedirs(logdir) - -base_build_dir = os.path.join(build_dir, '..') -os.environ['BUILD_DIR'] = build_dir - -current_dir = os.path.dirname(__file__) -src_dir = os.path.join(current_dir, '..') -installation_script_dir = os.path.join(src_dir, 'installation') -here = installation_script_dir - -sys.path.append(src_dir) -sys.path.append(build_dir) -sys.path.append(installation_script_dir) - -control_script_path = os.path.join(installation_script_dir, 'control.py') -execfile(control_script_path, globals(), globals()) - -global target_prefix -target_prefix = sys.prefix -for i in range(len(sys.argv)): - a = sys.argv[i] - if a=='--prefix': - target_prefix=sys.argv[i+1] - sp = a.split("--prefix=") - if len(sp)==2: - target_prefix=sp[1] - -try: - os.makedirs(os.path.join(target_prefix,'bin')) -except Exception,err: - pass -try: - os.makedirs(os.path.join(target_prefix,'include')) -except Exception,err: - pass -try: - os.makedirs(os.path.join(target_prefix,'lib')) -except Exception,err: - pass - -cdms_include_directory = os.path.join(target_prefix, 'include', 'cdms') -cdms_library_directory = os.path.join(target_prefix, 'lib') - -version_file_path = os.path.join(base_build_dir, 'version') -Version = open(version_file_path).read().strip() -version = Version.split(".") -for i in range(len(version)): - try: - version[i]=int(version[i]) - except: - version[i]=version[i].strip() - -def norm(path): - "normalize a path" - return os.path.normpath(os.path.abspath(os.path.expanduser(path))) - -def testlib (dir, name): - "Test if there is a library in a certain directory with basic name." - if os.path.isfile(os.path.join(dir, 'lib' + name + '.a')): - return 1 - if os.path.isfile(os.path.join(dir, 'lib' + name + '.so')): - return 1 - if os.path.isfile(os.path.join(dir, 'lib' + name + '.sl')): - return 1 - return 0 - -def configure (configuration_files): - global action, target_prefix - options={} - execfile(os.path.join(installation_script_dir, 'standard.py'), globals(), options) - for file in configuration_files: - print >>sys.stderr, 'Reading configuration:', file - execfile(os.path.join(src_dir, file), globals(), options) - - # Retrieve action - action = options['action'] - # Establish libraries and directories for CDUNIF/CDMS - netcdf_directory = norm(options.get('netcdf_directory',os.environ['EXTERNALS'])) - netcdf_include_directory = norm(options.get('netcdf_include_directory', - os.path.join(os.environ['EXTERNALS'],'include'))) - - #hdf5_library_directory = norm(os.path.join(os.environ.get('HDF5LOC',os.path.join(os.environ["EXTERNALS"])), 'lib')) - if (sys.platform in ['mac',]): - cdunif_library_directories = [cdms_library_directory,"/usr/X11R6/lib"] - else: - cdunif_library_directories = [cdms_library_directory] - - options['CDMS_INCLUDE_DAP']="yes" -## if options.get('CDMS_INCLUDE_DAP','no')=='yes': -## netcdf_include_directory=norm(os.path.join(options['CDMS_DAP_DIR'],'include','libnc-dap')) -## netcdf_library_directory=norm(os.path.join(options['CDMS_DAP_DIR'],'lib')) -## dap_include=[norm(os.path.join(options['CDMS_DAP_DIR'],'include','libdap'))] -## dap_lib_dir=[norm(os.path.join(options['CDMS_DAP_DIR'],'lib'))] -## ## dap_lib=['dap','stdc++','nc-dap','dap','curl','z','ssl','crypto','dl','z','xml2','rx','z'] -## ## if (sys.platform in ['linux2',]): -## ## dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','xml2'] -## ## elif (sys.platform in ['darwin',]): -## ## dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','pthread','xml2','z'] -## dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','pthread','xml2'] -## dap_lib = ['stdc++'] -## dap_lib_dir=[] -## Libs=os.popen(norm(os.path.join(options['CDMS_DAP_DIR'],'bin','ncdap-config'))+' --libs').readlines() -## Libs+=os.popen(norm(os.path.join(options['CDMS_DAP_DIR'],'bin','dap-config'))+' --client-libs').readlines() -## for libs in Libs: -## libs=libs.split() -## for l in libs: -## if l[:2]=='-l': -## dap_lib.append(l[2:]) -## elif l[:2]=='-L'and l[2:] not in dap_lib_dir: -## dap_lib_dir.append(l[2:]) -## dap_lib.append("dap") -## dap_lib.append("xml2") -## netcdfname='nc-dap' -## ## print 'daplib:',dap_lib -## else: - if 1: - ## dap_include = [os.path.join(hdf5path,"include"),os.path.join(os.environ['EXTERNALS'],'include')] - dap_include = [] - Dirs=os.popen('%s --cflags' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0] - for d in Dirs.split(): - if d[:2]=="-I": - dnm = d[2:] - if not dnm in dap_include: - dap_include.append(dnm) - dap_lib = ['stdc++'] - dap_lib = [] - dap_lib_dir=[] - ## Libs=os.popen(norm(os.path.join(os.environ['EXTERNALS'],'bin','nc-config'))+' --libs').readlines() - Libs=os.popen('%s --libs' % os.environ.get("LOCNCCONFIG","nc-config")).readlines() - for libs in Libs: - libs=libs.split() - for l in libs: - if l[:2]=='-l': - dap_lib.append(l[2:]) - elif l[:2]=='-L'and l[2:] not in dap_lib_dir: - if l[-3:]!='lib': - l+='/lib' - dap_lib_dir.append(l[2:]) - -## if enable_netcdf3==True: -## dap_include=[] -## dap_lib_dir=[] -## else: -## dap_include = [os.path.join(hdf5path,"include"),os.path.join(os.environ['EXTERNALS'],'include')] -## dap_lib_dir = [os.path.join(hdf5path,"lib"),os.path.join(os.environ['EXTERNALS'],'lib')] -## if enable_netcdf3 is True: -## daplib=[] -## else: -## dap_lib=['hdf5_hl','hdf5','m','z','dap','nc-dap','dapclient','curl','stdc++','xml2'] -## # for now turn off the dap crap -## dap_lib=['hdf5_hl','hdf5','m','z'] - netcdfname='netcdf' - - if options.get('CDMS_INCLUDE_HDF','no')=='yes': - hdf_libraries = ['mfhdf','df','jpeg','z'] - hdf_include=[norm(os.path.join(options['CDMS_HDF_DIR'],'include'))] - hdf_lib_dir=[norm(os.path.join(options['CDMS_HDF_DIR'],'lib'))] - else: - hdf_libraries = [] - hdf_include=[] - hdf_lib_dir=[] - - PNG_VERSION="" - if (sys.platform in ['darwin']): - PNG_VERSION="15" - - grib2_libraries = ["grib2c","png"+PNG_VERSION,"jasper"] - ## if netcdf_library_directory not in cdunif_library_directories: - ## cdunif_library_directories.append(netcdf_library_directory) - cdunif_include_directories = [cdms_include_directory] - ## if netcdf_include_directory not in cdunif_include_directories: - ## cdunif_include_directories.append(netcdf_include_directory) - - - if sys.platform == "sunos5": - cdunif_include_directories.append('/usr/include') - - drs_file = "@cdatEXTERNALS@/lib/libdrs.a" - - # Establish location of X11 include and library directories - if options['x11include'] or options['x11libdir']: - if options['x11include']: - options['x11include'] = norm(options['x11include']) - if options['x11libdir']: - options['x11libdir'] = norm(options['x11libdir']) - else: - for x in x11search: - if os.path.isdir(x): - if options['x11include']: - options['x11include'].append(os.path.join(x, 'include')) - options['x11libdir'].append(os.path.join(x, 'lib')) - else: - options['x11include']=[norm(os.path.join(x, 'include'))] - options['x11libdir']=[norm(os.path.join(x, 'lib'))] - else: - for w in x11OSF1lib: - if testlib(w, 'X11'): - if not options['x11libdir']: - options['x11libdir'] = [norm(w),] - else: - options['x11libdir'].append(norm(w)) - for w in x11OSF1include: - if os.path.isdir(w): - if not options['x11include']: - options['x11include'] = [norm(w),] - else: - options['x11include'].append(norm(w)) - # Check that we have both set correctly. - if not (options['x11include'] and \ - options['x11libdir'] - ): - print >>sys.stderr, """ -Failed to find X11 directories. Please see README.txt for instructions. -""" - print options - raise SystemExit, 1 - - # Write cdat_info.py - os.chdir(installation_script_dir) - print 'Version is: ',Version - f = open(os.path.join(build_dir, 'cdat_info.py'), 'w') - sys.path.append(build_dir) - print >> f,""" -Version = '%s' -ping_checked = False -check_in_progress = False -def version(): - return %s -""" % (Version,str(version)) - if options.get('CDMS_INCLUDE_DRS','no') == 'yes': - print >>f, """ -def get_drs_dirs (): - #import Pyfort, os - import os - #c = Pyfort.get_compiler('default') - drs_dir, junk = os.path.split(drs_file) - #return c.dirlist + [drs_dir] - return [drs_dir,"/usr/local/gfortran/lib","/usr/local/lib"] - -def get_drs_libs (): - #import Pyfort - #c = Pyfort.get_compiler('default') - return ['drs','gfortran'] + %s -""" % repr(options.get("COMPILER_EXTRA_LIBS",[])) - else: - print >>f, """ -def get_drs_dirs (): - return [] -def get_drs_libs(): - return [] -""" - - print >>f, """\ - -sleep=60 #minutes (int required) - -actions_sent = {} - -SOURCE = 'CDAT' - -def get_version(): - return Version - -def get_prefix(): - import os,sys - try: - uv_setup_pth = os.environ["UVCDAT_SETUP_PATH"] - if os.uname()[0] == "Darwin": - uv_setup_pth = os.path.join(uv_setup_pth, - "Library","Frameworks","Python.framework","Versions", - "%%i.%%i" %% (sys.version_info.major,sys.version_info.minor) - ) - return uv_setup_pth - except KeyError: - raise RuntimeError("UVCDAT environment not configured. Please source the setup_runtime script.") - -def get_sampledata_path(): - import os - try: - return os.path.join(os.environ["UVCDAT_SETUP_PATH"], - "share", "uvcdat", "sample_data") - except KeyError: - raise RuntimeError("UVCDAT environment not configured. Please source the setup_runtime script.") - -def runCheck(): - import cdat_info,os - if cdat_info.ping_checked is False: - check_in_progress = True - val = None - envanom = os.environ.get("UVCDAT_ANONYMOUS_LOG",None) - if envanom is not None: - if envanom.lower() in ['true','yes','y','ok']: - val = True - elif envanom.lower() in ['false','no','n','not']: - val = False - else: - import warnings - warnings.warn("UVCDAT logging environment variable UVCDAT_ANONYMOUS_LOG should be set to 'True' or 'False', you have it set to '%%s', will be ignored" %% envanom) - if val is None: # No env variable looking in .uvcdat - fanom = os.path.join(os.environ["HOME"],".uvcdat",".anonymouslog") - if os.path.exists(fanom): - f=open(fanom) - for l in f.readlines(): - sp = l.strip().split("UVCDAT_ANONYMOUS_LOG:") - if len(sp)>1: - try: - val = eval(sp[1]) - except: - pass - f.close() - - reload(cdat_info) - return val - -def askAnonymous(val): - import cdat_info,os - while cdat_info.ping_checked is False and not val in [True, False]: # couldn't get a valid value from env or file - val2 = raw_input("Allow anonymous logging usage to help improve UV-CDAT? (you can also set the environment variable UVCDAT_ANONYMOUS_LOG to yes or no) [yes/no]") - if val2.lower() in ['y','yes','ok']: - val = True - elif val2.lower() in ['n','no','not']: - val = False - if val in [True,False]: # store result for next time - try: - fanom = os.path.join(os.environ["HOME"],".uvcdat",".anonymouslog") - if not os.path.exists(os.path.join(os.environ["HOME"],".uvcdat")): - os.makedirs(os.path.join(os.environ["HOME"],".uvcdat")) - f=open(fanom,"w") - print >>f, "#Store information about allowing UVCDAT anonymous logging" - print >>f, "# Need sto be True or False" - print >>f, "UVCDAT_ANONYMOUS_LOG: %%s" %% val - f.close() - except Exception,err: - pass - else: - if cdat_info.ping_checked: - val = cdat_info.ping - cdat_info.ping = val - cdat_info.ping_checked = True - check_in_progress = False - -def pingPCMDIdb(*args,**kargs): - import cdat_info,os - while cdat_info.check_in_progress: - reload(cdat_info) - val = cdat_info.runCheck() - if val is False: - cdat_info.ping_checked = True - cdat_info.ping = False - return - try: - if not cdat_info.ping: - return - except: - pass - cdat_info.askAnonymous(val) - import threading - kargs['target']=pingPCMDIdbThread - kargs['args']=args - t = threading.Thread(**kargs) - t.start() - -def pingPCMDIdbThread(*args,**kargs): - import threading - kargs['target']=submitPing - kargs['args']=args - t = threading.Thread(**kargs) - t.start() - import time - time.sleep(5) # Lets wait 5 seconds top for this ping to work - if t.isAlive(): - try: - t._Thread__stop() - except: - pass -def submitPing(source,action,source_version=None): - try: - import urllib2,sys,os,cdat_info,hashlib,urllib - if source in ['cdat','auto',None]: - source = cdat_info.SOURCE - if cdat_info.ping: - if not source in actions_sent.keys(): - actions_sent[source]=[] - elif action in actions_sent[source]: - return - else: - actions_sent[source].append(action) - data={} - uname = os.uname() - data['platform']=uname[0] - data['platform_version']=uname[2] - data['hashed_hostname']=hashlib.sha1(uname[1]).hexdigest() - data['source']=source - if source_version is None: - data['source_version']=cdat_info.get_version() - else: - data['source_version']=source_version - data['action']=action - data['sleep']=cdat_info.sleep - data['hashed_username']=hashlib.sha1(os.getlogin()).hexdigest() - urllib2.urlopen('http://uv-cdat.llnl.gov/UVCDATUsage/log/add/',urllib.urlencode(data)) - except Exception,err: - pass - -CDMS_INCLUDE_DAP = %s -CDMS_DAP_DIR = %s -CDMS_HDF_DIR = %s -CDMS_GRIB2LIB_DIR = %s -CDMS_INCLUDE_GRIB2LIB = %s -CDMS_INCLUDE_DRS = %s -CDMS_INCLUDE_HDF = %s -CDMS_INCLUDE_PP = %s -CDMS_INCLUDE_QL = %s -drs_file = %s -netcdf_directory = %s -netcdf_include_directory = %s -cdunif_include_directories = %s + %s + %s -cdunif_library_directories = %s + %s + %s + get_drs_dirs() -cdunif_libraries = %s + %s + get_drs_libs() + %s + %s -x11include = %s -x11libdir = %s -mathlibs = %s -action = %s -externals = %s -""" % ( - repr(options.get('CDMS_INCLUDE_DAP','no')), - repr(options.get('CDMS_DAP_DIR','.')), - repr(options.get('CDMS_HDF_DIR','.')), - repr(options.get('CDMS_GRIB2LIB_DIR',os.environ['EXTERNALS'])), - repr(options.get('CDMS_INCLUDE_GRIB2LIB',"yes")), - repr(options['CDMS_INCLUDE_DRS']), - repr(options['CDMS_INCLUDE_HDF']), - repr(options['CDMS_INCLUDE_PP']), - repr(options['CDMS_INCLUDE_QL']), - repr(drs_file), - repr(netcdf_directory), - repr(netcdf_include_directory), - repr(cdunif_include_directories),repr(dap_include),repr(hdf_include), - repr(cdunif_library_directories),repr(dap_lib_dir),repr(hdf_lib_dir), - repr(['cdms', netcdfname]),repr(dap_lib),repr(hdf_libraries),repr(grib2_libraries), - repr(options['x11include']), - repr(options['x11libdir']), - repr(options['mathlibs']), - repr(options['action']), - repr(os.environ['EXTERNALS']), - ) - if enable_aqua: - print >> f,'enable_aqua = True' - else: - print >>f, 'enable_aqua = False' - f.close() - cdat_info_path = os.path.join(os.environ['BUILD_DIR'], 'cdat_info') - if not norun: - # Install the configuration - #would be best to add 'clean' but it gives stupid warning error - sys.argv[1:]=['-q', 'install', '--prefix=%s' % target_prefix] - setup (name="cdat_info", - version="0.0", - package_dir = { 'cdat_info' : os.path.dirname(cdat_info_path)}, - ) - os.system('/bin/rm -fr build') - - py_prefix = os.path.join(target_prefix,'lib','python%i.%i' % sys.version_info[:2],'site-packages') - cdat_info_src_path = os.path.join(build_dir, 'cdat_info.py') - cdat_info_dst_path = os.path.join(py_prefix, 'cdat_info.py') - if os.path.isfile(cdat_info_src_path): - shutil.copyfile(cdat_info_src_path, cdat_info_dst_path) - else: - print>>sys.stderr, 'Failed to copy %s to %s' % (cdat_info_src_path, cdat_info_dst_path) - - os.chdir(here) - print >>sys.stderr, 'Configuration installed.' - -def usage(): - f = open('HELP.txt') - lines = f.readlines() - f.close() - for line in lines[10:-9]: - sys.stdout.write(line) - print '\tDefault Packages' - print '\t----------------' - packages.append('\n\tContributed Packages\n\t--------------------') - #execfile('installation/contrib.py',globals(),globals()) - for p in packages: - print '\t\t',p - -def main(arglist): - global norun, echo, force, do_configure, silent, action, logdir, enable_aqua,target_prefix, enable_netcdf3, hdf5path,zpath - enable_aqua = False - enable_cdms1 = False - enable_netcdf3=False - optlist, control_names = getopt.getopt(arglist, - "c:defhnPl", - ["enable-cdms-only", - "configuration=", - "debug", - "prefix=", - "echo", - "force", - "help", - "with-externals=", - "norun", - "PCMDI", - "pcmdi", - "psql","enable-psql", - "enable-hdf4","enable-HDF4", - "with-HDF4=","with-hdf4=", - "disable-hdf4","disable-HDF4", - "disable-contrib", - "enable-pp", - "enable-drs","enable-DRS", - "disable-externals-build", - "disable-pp", - ## Bellow are the arguments that could be passed to exsrc, nothing done with them - "disable-R","disable-r", - #"disable-VTK","disable-vtk", - "disable-XGKS","disable-xgks", - "disable-Pyfort","disable-pyfort", - "disable-NetCDF","disable-netcdf","disable-NETCDF", - "disable-Numeric","disable-numeric", - "disable-gplot","disable-GPLOT","disable-Gplot", - "disable-gifsicle","disable-GIFSICLE", - "disable-gifmerge","disable-GIFMERGE", - "disable-pbmplus","disable-PBMPLUS", - "disable-netpbm","disable-NETPBM", - "disable-Pmw","disable-pmw", - "disable-ioapi", - "disable-cairo", - "disable-ffmpeg", - "disable-freetype", - "disable-sampledata", - "enable-ioapi", - "enable-R","enable-r", - "enable-numpy","disable-numpy", - "enable-scipy","disable-scipy", - "enable-ipython","disable-ipython", - #"enable-VTK","enable-vtk", - "enable-XGKS","enable-xgks", - "enable-Pyfort","enable-pyfort", - "enable-NetCDF","enable-netcdf","enable-NETCDF","enable-netcdf-fortran","enable-NETCDF-Fortran", - "enable-Numeric","enable-numeric", - "enable-gplot","enable-GPlot","enable-GPLOT", - "enable-gifsicle","enable-GIFSICLE", - "enable-gifmerge","enable-GIFMERGE", - "enable-pbmplus","enable-PBMPLUS", - "enable-netpbm","enable-NETPBM", - "enable-Pmw","enable-pmw", - "enable-aqua","enable-Aqua","enable-AQUA", - "enable-cairo", - "enable-ffmpeg", - "enable-freetype", - "enable-cdms1", - "enable-netcdf3", - "enable-spanlib", - "disable-spanlib" - "disable-tkbuild", - "enable-qt", - "enable-vcs-legacy", - "enable-qt-framework", - "with-qt=", - "with-qt-lib=", - "with-qt-inc=", - "with-qt-bin=", - "qt-debug", - "list", - ] - ) - configuration_files = [] - nodap=0 - nopp=0 - nohdf=0 - selfhdf=0 - selfdap=0 - selfpp=0 - showlist=0 - qtfw=False - qtinc=None - qtlib=None - qtbin=None - qt=False - control_names = ['contrib'] - sampleData = True -## prefix_target = sys.exec_prefix - externals = os.environ.get("EXTERNALS",os.path.join(sys.prefix,"Externals")) - hdf5path = None - zpath = None - - for i in range(len(optlist)): - letter=optlist[i][0] - if letter == "--enable-vcs-legacy": - qt=True - if letter == "--enable-qt": - qt=True - if letter == "--enable-qt-framework": - qtfw=True - if letter == "--with-qt": - qtinc=os.path.join(optlist[i][1],"include") - qtlib=os.path.join(optlist[i][1],"lib") - qtbin=os.path.join(optlist[i][1],"bin") - if letter == "--with-qt-inc": - qtinc=optlist[i][1] - if letter == "--with-qt-bin": - qtbin=optlist[i][1] - if letter == "--with-qt-lib": - qtlib=optlist[i][1] - if letter == "--enable-cdms-only": - control_names = ['cdmsonly']+control_names - if 'contrib' in control_names: - control_names.pop(control_names.index('contrib')) - elif letter == "--with-externals": - externals = optlist[i][1] - elif letter in ["-c", "--configuration"]: - m = False - n = optlist[i][1] - if os.path.isfile(n): - m = n - elif os.path.isfile(n + '.py'): - m = n + '.py' - elif os.path.isfile(os.path.join('installation', n)): - m = os.path.join('installation', n) - elif os.path.isfile(os.path.join('installation', n + '.py')): - m = os.path.join('installation', n + '.py') - if m: - configuration_files.append(m) - else: - print >>sys.stderr, "Cannot find configuration file", optlist[i][1] - force = 1 - do_configure = 1 - elif letter in ["-d", "--debug"]: - debug_file = os.path.join('installation','debug.py') - configuration_files.append(debug_file) - force = 1 - do_configure = 1 - elif letter in ["-e", "--echo"]: - echo = 1 - elif letter in ["--enable-cdms1"]: - enable_cdms1 = True - elif letter in ["--enable-netcdf3"]: - enable_netcdf3 = True - elif letter in ["--enable-aqua","--enable-Aqua","--enable-AQUA"]: - enable_aqua = True - elif letter in ["-f", "--force"]: - force = 1 - do_configure = 1 - elif letter in ["-h", "--help"]: - usage() - raise SystemExit, 1 - elif letter in ["-P", "--PCMDI", "--pcmdi"]: - configuration_files.append(os.path.join('installation', 'pcmdi.py')) - force=1 - do_configure=1 # Need libcdms built a certain way too. - elif letter in ["--psql", "--enable-psql"]: - configuration_files.append(os.path.join('installation', 'psql.py')) - do_configure=1 # Need libcdms built a certain way too. -## elif letter in ["--with-OpenDAP", "--with-opendap", "--with-OPENDAP","--enable-opendap","--enable-OpenDAP","--enable-OPENDAP"]: -## configuration_files.append(os.path.join('installation', 'DAP.py')) -## do_configure=1 # Need libcdms built a certain way too. -## selfdap=1 -## elif letter in ["--with-HDF4", "--with-hdf4",'--enable-hdf4','--enable-HDF4']: -## configuration_files.append(os.path.join('installation', 'HDF.py')) -## do_configure=1 # Need libcdms built a certain way too. -## selfhdf=1 - elif letter in ["--with-hdf5",]: - hdf5path = optlist[i][1] - elif letter in ["--with-z",]: - zpath = optlist[i][1] - elif letter in ["--prefix"]: - target_prefix = optlist[i][1] - elif letter in ['--enable-drs','--enable-DRS']: - configuration_files.append(os.path.join('installation', 'pcmdi.py')) - do_configure=1 # Need libcdms built a certain way too. - elif letter in ['--enable-pp','--enable-PP']: - configuration_files.append(os.path.join('installation', 'pp.py')) - do_configure=1 # Need libcdms built a certain way too. - selfpp=1 -## elif letter in ["--enable-NetCDF","--enable-NETCDF","--enable-netcdf", -## "--enable-netcdf-fortran", -## "--disable-opendap","--disable-OpenDAP","--disable-OPENDAP"]: -## nodap=1 -## elif letter in ["--disable-hdf4","--disable-HDF4"]: -## nohdf=1 - elif letter in ["--disable-pp","--disable-PP"]: - nohdf=1 - elif letter in ["--disable-sampledata",]: - sampleData = False - elif letter in ["-n", "--norun"]: - norun = 1 - elif letter in ['--list','-l']: - showlist=1 - elif letter in ['--disable-contrib']: - for i in range(len(control_names)): - if control_names[i]=='contrib': - control_names.pop(i) - i=i-1 - CDMS_INCLUDE_DAP='yes' - if nopp==1 and selfpp==1: - raise "Error you chose to both enable and disable PP support !" - if nohdf==1 and selfhdf==1: - raise "Error you chose to both enable and disable HDF !" -## if (nodap==0 and selfdap==0) and (sys.platform in ['linux2','darwin']): -## configuration_files.append(os.path.join('installation', 'DAP.py')) -## do_configure=1 # Need libcdms built a certain way too. -## if (nohdf==0 and selfhdf==0) and (sys.platform in ['linux2','darwin']): -## configuration_files.append(os.path.join('installation', 'HDF.py')) -## do_configure=1 # Need libcdms built a certain way too. - if (nopp==0 and selfpp==0) and (sys.platform in ['linux2','darwin']): - configuration_files.append(os.path.join('installation', 'pp.py')) - do_configure=1 # Need libcdms built a certain way too. - - if hdf5path is None: hdf5path= os.path.join(externals) - if zpath is None: zpath= externals - os.environ['EXTERNALS']=externals - - control_files = [] - for n in control_names: - m = '' - if os.path.isfile(n): - m = n - elif os.path.isfile(n + '.py'): - m = n + '.py' - elif os.path.isfile(os.path.join('installation', n)): - m = os.path.join('installation', n) - elif os.path.isfile(os.path.join('installation', n + '.py')): - m = os.path.join('installation', n + '.py') - elif os.path.isfile(os.path.join(src_dir, 'installation', n + '.py')): - m = os.path.join(src_dir, 'installation', n + '.py') - - if m: - control_files.append(m) - else: - print >>sys.stderr, 'Cannot find control file', n - raise SystemExit, 1 - - for control_file in control_files: - print 'Running:',control_file - execfile(control_file, globals(), globals()) - - if showlist: - print 'List of Packages that would be installed:' - for p in packages: - print p - sys.exit() - if force: - os.system('./scripts/clean_script') - - sys.path.insert(0,os.path.join(target_prefix,'lib','python%i.%i' % sys.version_info[:2],'site-packages')) - if do_configure: - force = 1 - if os.path.isfile(os.path.join(build_dir, 'cdat_info.py')): - os.unlink(os.path.join(build_dir, 'cdat_info.py')) - print >>sys.stderr, 'Configuring & installing scripts.' - configure(configuration_files) - images_path = os.path.join(src_dir, 'images') - os.chdir(images_path) - scripts = glob.glob('*') - for script in scripts: - if script[-1] == '~': continue - if script == "README.txt": continue - target = os.path.join(target_prefix, 'bin', script) - if os.path.isfile(target): os.unlink(target) - shutil.copy(script, target) - os.chdir(here) - else: - import cdat_info - action = cdat_info.action - - # Install CDMS - cdms_library_file = os.path.join(cdms_library_directory, 'libcdms.a') - #if force or not os.path.isfile(cdms_library_file): - # install('libcdms', action) - # if (sys.platform in ['darwin',]): - # os.system('ranlib '+os.path.join(target_prefix,'lib','libcdms.a')) - - # Install Packages - package_errors=0 - package_failed=[] - if enable_cdms1: - packages.append("Packages/regrid") - packages.append("Packages/cdms") - for p in packages: - h = os.getcwd() - oldcmd=action["setup.py"]+"" - action['setup.py'] = action['setup.py'].strip()[:-1]+" build -b "+ os.environ['BUILD_DIR']+"/"+p - try: - if p == "Packages/vcs": - action["setup.py"]=oldcmd.strip()[:-1]+" --old-and-unmanageable; " - if qtfw: - action["setup.py"]=oldcmd.strip()[:-1]+" --enable-qt-framework ; " - if qt: - action["setup.py"]=oldcmd.strip()[:-1]+" --enable-qt ; " - if qtinc is not None: - action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-inc=%s ; "%qtinc - if qtlib is not None: - action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-lib=%s ; "%qtlib - if qtbin is not None: - action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-bin=%s ; "%qtbin - install(p, action) - except: - package_errors+=1 - package_failed.append(p) - os.chdir(h) - print >>sys.stderr, 'Error: Installation of Package:',p,'FAILED' - action["setup.py"]=oldcmd - - # Celebrate - if echo: - print "Simulated build complete." - elif not silent: - print >>sys.stderr, finish - if package_errors!=0: - print >>sys.stderr, '\n --- WARNING ---\n' - print >>sys.stderr,package_errors,'Packages reported as FAILED, see logs\n' - for p in package_failed: - print >>sys.stderr,'\t\t',p - print >>sys.stderr - print >>sys.stderr, '******************************************************\n' - """ - ****************************************************** - CDAT has been installed in %s . - Please make sure all modules built successfully - (see above build messages) - ****************************************************** - """ %(target_prefix,) - -def _install(file, action): - h = os.getcwd() - absfile = os.path.abspath(file) - print 'absfile ', absfile - dirname, basename = os.path.split(absfile) - dirfinal = os.path.split(dirname)[-1] - os.chdir(dirname) - name, ext = os.path.splitext(basename) - if ext.lower() == ".pfp": - p1 = action['*.pfp'] - elif action.has_key(absfile): - p1 = action[absfile] - elif action.has_key(file): - p1 = action[file] - elif action.has_key(basename): - p1 = action[basename] - else: - print "Do not know what to do with", file, "in", dirname - print >>sys.stderr, "Do not know what to do with", file, "in", dirname - raise SystemExit, 1 - - if log: - logfile = os.path.join(logdir, dirfinal+".LOG") - if not silent: - print >>sys.stderr, "Processing", dirfinal + ', log =', logfile - else: - logfile = tempfile.mktemp() - if not silent: - print >>sys.stderr, "Processing", dirfinal - p1 = p1 % { 'filename': file } - sep = " > %s 2>&1 ; " % logfile - p = sep.join(p1.split(";")) -## os.environ["CFLAGS"]="%s -L%s/lib" % (os.environ.get("CFLAGS",""), os.environ["EXTERNALS"]) - add_lib = "-L%s/lib" % (os.environ["EXTERNALS"],) - cflags_current = os.environ.get("CFLAGS","") - if cflags_current.find(add_lib) == -1: - os.environ["CFLAGS"]="%s %s" % (cflags_current, add_lib) - p = 'env CFLAGS="%s" %s' % (os.environ["CFLAGS"],p) - if echo: - print >> sys.stderr, p - print norun - if norun: - r = 0 - else: - #print '====>executing: ', p - r = os.system(p) - if r: - print >>sys.stderr, "Install failed in directory", dirname - print >>sys.stderr, "Log=", logfile - raise SystemExit, 1 - elif not log and not norun: - os.unlink(logfile) - - f = open(os.path.join(build_dir, 'rebuild.py'), 'w') - print >>f, """ -import os -j = os.system(%s) -if j: - print 'Compilation failed' - raise SystemExit, 1 -""" % (repr(p1+ " 1>LOG.rebuild"),) - f.close() - os.chdir(h) - -def install (arg, action): - arg = os.path.normpath(arg) - installer = '' - arg = os.path.join(src_dir, arg) - if os.path.isdir(arg): - for x in (glob.glob(os.path.join(arg, '*.pfp')) + \ - ['autogen.sh', - 'install.py', - 'setup.py', - 'install_script', - 'Makefile', - 'makefile'] ): - name = os.path.join(arg,x) - if os.path.isfile(name): - installer = name - break - else: - print >>sys.stderr, "Cannot find installation instructions in", arg - raise SystemExit, 1 - elif os.path.isfile(arg): - installer = arg - designator, junk = os.path.split(arg) - else: - print >>sys.stderr, "Cannot find", arg - raise SystemExit - - _install(installer, action) - - -if __name__ == "__main__": - arglist = sys.argv[1:] - main(arglist) - ## This parts creates links from Externals... - try: - import cdat_info - externals = cdat_info.externals - except: - externals = os.path.join(sys.prefix,"Externals") - externals = os.environ.get("EXTERNALS",externals) - externals_path = os.path.join(externals,'bin') - files = os.listdir(externals_path) - for file in files: - fnm = os.path.join(sys.prefix,'bin',file) - if not os.path.exists(fnm) and not os.path.islink(fnm): - try: - os.symlink(os.path.join(externals_path,file),fnm) - except: - pass - diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in new file mode 100755 index 0000000000..a00fce8842 --- /dev/null +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor proj4>=4.9.2 vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy + +source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@ +for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do + cd @cdat_SOURCE_DIR@/Packages/${pkg} + rm -rf build + if [ ${pkg} == "vcs" ]; then + python setup.py install --old-and-unmanageable + else + python setup.py install + fi +done + diff --git a/CMake/cdat_modules_extra/jasper_configure_step.cmake.in b/CMake/cdat_modules_extra/jasper_configure_step.cmake.in deleted file mode 100644 index ff0cccad79..0000000000 --- a/CMake/cdat_modules_extra/jasper_configure_step.cmake.in +++ /dev/null @@ -1,11 +0,0 @@ -# On linux 64, gdal picks the static jasper library, make sure only shared libraries -# are built (Alex Pletzer) - -# Make sure to pick up image and other libraries built by the superbuild -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -EXECUTE_PROCESS( - COMMAND sh configure --enable-shared --disable-static --prefix=@jasper_install@ - WORKING_DIRECTORY "@jasper_source@" - RESULT_VARIABLE rv - ) diff --git a/CMake/cdat_modules_extra/jpeg_install_step.cmake.in b/CMake/cdat_modules_extra/jpeg_install_step.cmake.in deleted file mode 100644 index ab724cc8ae..0000000000 --- a/CMake/cdat_modules_extra/jpeg_install_step.cmake.in +++ /dev/null @@ -1,28 +0,0 @@ - -execute_process( - COMMAND make install ${INSTALL_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -#cp build/jpeg*/lib* /Users/partyd/Kitware/uv-cdat/make-file-install/Externals/lib -#cp build/jpeg*/*.h /Users/partyd/Kitware/uv-cdat/make-file-install/Externals/include - -file(GLOB jpeglibs "@jpeg_source@/lib*") -file(GLOB jpegheaders "@jpeg_source@/*.h") - - -foreach(lib ${jpeglibs}) - execute_process( - COMMAND "@CMAKE_COMMAND@" -E copy_if_different ${lib} @jpeg_install@/lib - RESULT_VARIABLE res - OUTPUT_VARIABLE CDAT_OUT - OUTPUT_VARIABLE CDAT_ERR) -endforeach() - -foreach(header ${jpegheaders}) - execute_process( - COMMAND "@CMAKE_COMMAND@" -E copy_if_different ${header} @jpeg_install@/include - RESULT_VARIABLE res - OUTPUT_VARIABLE CDAT_OUT - OUTPUT_VARIABLE CDAT_ERR) -endforeach() \ No newline at end of file diff --git a/CMake/cdat_modules_extra/lats_Makefile.gfortran.in b/CMake/cdat_modules_extra/lats_Makefile.gfortran.in deleted file mode 100644 index 566a6b5bc5..0000000000 --- a/CMake/cdat_modules_extra/lats_Makefile.gfortran.in +++ /dev/null @@ -1,208 +0,0 @@ -# Generated automatically from Makefile.in by configure. -# -*-Mode: indented-text;-*- -# Makefile: LATS makefile -# -# Copyright: 1996, Regents of the University of California -# This software may not be distributed to others without -# permission of the author. -# -# Author: Bob Drach, Lawrence Livermore National Laboratory -# drach@llnl.gov -# -# Version: $Id: Makefile.in,v 1.12 1996/10/29 00:20:44 drach Exp $ -# -# Revision History: -# -# $Log: Makefile.in,v $ -# Revision 1.12 1996/10/29 00:20:44 drach -# - Removed name conflicts with CDMS -# -# Revision 1.11 1996/10/22 19:04:57 fiorino -# latsgrib bug in .ctl creator -# -# Revision 1.10 1996/10/16 22:09:51 drach -# - Added automatic gribmap generation -# - Restricted LATS_GRADS_GRIB convention to one grid per file -# -# Revision 1.9 1996/09/30 18:54:46 drach -# - permit installation without the sources being present -# - separate FORTRAN debug flag, since -O doesn't work on the Cray -# -# Revision 1.8 1996/09/17 16:52:31 drach -# - Misc. cleanup -# -# Revision 1.7 1996/08/29 19:27:17 drach -# - Cleaned up configuration macros, Makefile.in for portability -# -# Revision 1.6 1996/08/27 19:39:03 drach -# - Added FORTRAN test -# - Ported to other UNIX platforms -# -# Revision 1.5 1996/07/12 00:36:21 drach -# - (GRIB) use undefined flag only when set via lats_miss_XX -# - (GRIB) use delta when checking for missing data -# - (GRIB) define maximum and default precision -# - fixed lats_vartab to work correctly. -# - Added report of routine names, vertical dimension types -# -# Revision 1.4 1996/06/27 19:19:34 drach -# - Misc. cleanup -# -# Revision 1.3 1996/06/27 01:32:49 drach -# - Fixed up file permissions on install -# -# Revision 1.2 1996/06/27 01:02:38 drach -# - Added installation directives -# -# Revision 1.1 1996/06/12 18:09:23 drach -# - Initial versions -# -# -# -# Note: to generate Makefile from Makefile.in: -# ./configure --cache-file=/dev/null \ -# [--with-ncinc=] \ -# [--with-nclib=] \ -# [--prefix=cdms link: -lcdms -libdrs.so: $(OBJECTS) - $(CC) $(ARCHOPT) -headerpad_max_install_names -L/usr/X11R6/lib -L/usr/local/gfortran/lib -lgfortran -L/usr/local/lib/ -L@cdat_EXTERNALS@/lib -lnetcdf -lgrib2c -lquadmath -lcdms -shared -lpng15 -ljasper -o libdrs.so $(OBJECTS) - -drsdef.h: drsdef.HH - $(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h -#-------------------------------------------------------------------- - -install: libdrs.a - cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a - cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h - cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h -# install -f $(INSTALL_LIB) -m 644 libdrs.a -# install -f $(INSTALL_INC) -m 644 drsdef.h -# install -f $(INSTALL_INC) -m 644 drscdf.h - -#-------------------------------------------------------------------------- - -# Miscellaneous junk - -tags: - etags $(SOURCES) $(INCLUDES) - -clean: - -rm -f *.o - -rm -f *~ - -rm -f core - -.SUFFIXES: .F .o - -.F.o: - $(FC) $(FFLAGS) -c $< diff --git a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in b/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in deleted file mode 100644 index d139f0b293..0000000000 --- a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in +++ /dev/null @@ -1,89 +0,0 @@ -# DRS library Makefile -# -# Usage: -# -# To make DRS library (libdrs.a) for Linux, with Absoft FORTRAN: -# % make -# This makefile is set up for a 64-bit Macintosh and gfortran/gcc 4.6.0 -# but see comments for how to use older Macs and older gfortran/gcc. -# -#-------------------------------------------------------------------- - -# DEBUG = -O -DEBUG = -g -O -save-temps -FC = /usr/local/bin/gfortran -CC = gcc -#ARCHOPT = -arch x86_64 -#ARCHOPT = -arch i386 -ARCHOPT = -m64 - -FOPTS = -fcray-pointer $(ARCHOPT) -W -# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -byteswapio -# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -Dgfortran -Dmac -FFLAGS = $(DEBUG) $(FOPTS) -Dsun -Dgfortran -D__linux -D__linux_gfortran -Dmac -Dmac64 -CFLAGS = $(DEBUG) $(ARCHOPT) -INSTALL_LIB = @cdat_EXTERNALS@/lib -INSTALL_INC = @cdat_EXTERNALS@/include -# Somehow CPPFLAGS ends out on the gcc lines... -#CPPFLAGS = -Dmac -Dsun -D__linux -D__linux_pgi $(ARCHOPT) -#CPPFLAGS = -Dmac $(ARCHOPT) -Dsun -byteswapio note that byteswapio is never referenced -#CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac $(ARCHOPT) -CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac -Dmac64 $(ARCHOPT) -CPP = cpp - -FOBJECTS = getdat.o idenc.o putdat.o clavdb.o getdim.o iflun.o setdim.o getnam.o mvnbc.o cluvdb.o getnd.o bnsrch.o drserr.o seterr.o getind.o compnm.o dictlk.o putvdm.o setnam.o setdat.o setvdm.o getrge.o savdb.o putdat1.o getdat1.o getvdim.o inqlun.o inqdict.o prdict.o rdtrans.o wrtrans.o setrep.o gettbuf.o getrge2.o getelemd.o setcdim.o getcdim.o getedim.o confnm.o putdic.o getpelem.o mimem.o redat.o wrdat.o cllun.o readhd.o writehd.o wrdic.o redic.o aslun.o drssync.o drsreadb.o drsautosync.o midate.o d_floor.o mac.o -# formerly in FOBJECTS, needed for Fortran->cdms link: cddrsfwrap.o -# .. cddrsfwrap.o is a Fortran wrapper for libcdms; not really part of libdrs. -FINCLUDES = drsdef.h drscom.h cycle.h -FSOURCES = $(FOBJECTS:.o=.F) - -COBJECTS = ctoi.o getslab.o drsc.o drstrunc.o macintosh.o -# formerly in COBJECTS, needed for Fortran->cdms link: cddrs_fc.o -# ... cddrs_fc.o is C code to support the Fortran wrapper for libcdms; not really part of libdrs. -CINCLUDES = drscdf.h -CSOURCES = $(COBJECTS:.o=.c) - -OBJECTS = $(FOBJECTS) $(COBJECTS) -SOURCES = $(FSOURCES) $(CSOURCES) -INCLUDES = $(FINCLUDES) $(CINCLUDES) -#-------------------------------------------------------------------- - -all: drsdef.h libdrs.a libdrs.so - -shared: drsdef.h libdrs.so - -libdrs.a: $(OBJECTS) - ar rv libdrs.a $? - -# formerly in libdrs.so, needed for Fortran->cdms link: -lcdms -libdrs.so: $(OBJECTS) - $(CC) $(ARCHOPT) -headerpad_max_install_names -L/usr/local/gfortran/lib -lgfortran -L/usr/local/lib/ -L@cdat_EXTERNALS@/lib -lnetcdf -lgrib2c -lquadmath -shared -o libdrs.so $(OBJECTS) - -drsdef.h: drsdef.HH - $(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h -#-------------------------------------------------------------------- - -install: libdrs.a - cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a - cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h - cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h -# install -f $(INSTALL_LIB) -m 644 libdrs.a -# install -f $(INSTALL_INC) -m 644 drsdef.h -# install -f $(INSTALL_INC) -m 644 drscdf.h - -#-------------------------------------------------------------------------- - -# Miscellaneous junk - -tags: - etags $(SOURCES) $(INCLUDES) - -clean: - -rm -f *.o - -rm -f *~ - -rm -f core - -.SUFFIXES: .F .o - -.F.o: - $(FC) $(FFLAGS) -c $< diff --git a/CMake/cdat_modules_extra/lxml_build_step.cmake.in b/CMake/cdat_modules_extra/lxml_build_step.cmake.in deleted file mode 100644 index dca0940b94..0000000000 --- a/CMake/cdat_modules_extra/lxml_build_step.cmake.in +++ /dev/null @@ -1,19 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CXXFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CPPFLAGS}") - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@LXML_SOURCE_DIR@" - RESULT_VARIABLE res - OUTPUT_VARIABLE LXML_OUT - OUTPUT_VARIABLE LXML_ERR) - -if(NOT ${res} EQUAL 0) - message("LXML Errors detected: \n${LXML_OUT}\n${LXML_ERR}") - message(FATAL_ERROR "Error in config of LXML") -endif() -message("lxml build worked.") - diff --git a/CMake/cdat_modules_extra/lxml_install_step.cmake.in b/CMake/cdat_modules_extra/lxml_install_step.cmake.in deleted file mode 100644 index 21651e44eb..0000000000 --- a/CMake/cdat_modules_extra/lxml_install_step.cmake.in +++ /dev/null @@ -1,14 +0,0 @@ -message("Installing LXML:\n@LXML_PREFIX_ARGS@") - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@LXML_BINARY_DIR@" - RESULT_VARIABLE res -) - -if(NOT ${res} EQUAL 0) - message("LXML Errors detected: \n${LXML_OUT}\n${LXML_ERR}") - message(FATAL_ERROR "Error in config of LXML") -endif() -message("lxml install succeeded.") - diff --git a/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in b/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in deleted file mode 100644 index bb0102cf5c..0000000000 --- a/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@matplotlib_source_dir@" -) diff --git a/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in b/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in deleted file mode 100644 index 6c28091a3a..0000000000 --- a/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in +++ /dev/null @@ -1,9 +0,0 @@ -set(INSTALL_DIR "@cdat_EXTERNALS@") - -configure_file( - "@cdat_CMAKE_SOURCE_DIR@/cdat_modules_extra/matplotlib_setup_cfg.in" - "@matplotlib_source_dir@/setup.cfg" - @ONLY -) - -set(ENV{LD_LIBRARY_PATH} "${INSTALL_DIR}/lib;$ENV{LD_LIBRARY_PATH}") diff --git a/CMake/cdat_modules_extra/matplotlib_setup_cfg.in b/CMake/cdat_modules_extra/matplotlib_setup_cfg.in deleted file mode 100644 index 5dc914fe20..0000000000 --- a/CMake/cdat_modules_extra/matplotlib_setup_cfg.in +++ /dev/null @@ -1,76 +0,0 @@ -# Rename this file to setup.cfg to modify matplotlib's -# build options. - -[egg_info] - -[directories] -# Uncomment to override the default basedir in setupext.py. -# This can be a single directory or a space-delimited list of directories. -# basedirlist = @INSTALL_DIR@ - -[status] -# To suppress display of the dependencies and their versions -# at the top of the build log, uncomment the following line: -#suppress = True -# -# Uncomment to insert lots of diagnostic prints in extension code -#verbose = True - -[provide_packages] -# By default, matplotlib checks for a few dependencies and -# installs them if missing. This feature can be turned off -# by uncommenting the following lines. Acceptible values are: -# True: install, overwrite an existing installation -# False: do not install -# auto: install only if the package is unavailable. This -# is the default behavior -# -## Date/timezone support: -#pytz = False -#dateutil = False - -[gui_support] -# Matplotlib supports multiple GUI toolkits, including Cocoa, -# GTK, Fltk, MacOSX, Qt, Qt4, Tk, and WX. Support for many of -# these toolkits requires AGG, the Anti-Grain Geometry library, -# which is provided by matplotlib and built by default. -# -# Some backends are written in pure Python, and others require -# extension code to be compiled. By default, matplotlib checks -# for these GUI toolkits during installation and, if present, -# compiles the required extensions to support the toolkit. GTK -# support requires the GTK runtime environment and PyGTK. Wx -# support requires wxWidgets and wxPython. Tk support requires -# Tk and Tkinter. The other GUI toolkits do not require any -# extension code, and can be used as long as the libraries are -# installed on your system. -# -# You can uncomment any the following lines if you know you do -# not want to use the GUI toolkit. Acceptible values are: -# True: build the extension. Exits with a warning if the -# required dependencies are not available -# False: do not build the extension -# auto: build if the required dependencies are available, -# otherwise skip silently. This is the default -# behavior -# -gtk = False -gtkagg = False -tkagg = False -macosx = False -qt5agg = False - -[rc_options] -# User-configurable options -# -# Default backend, one of: Agg, Cairo, CocoaAgg, GTK, GTKAgg, GTKCairo, -# FltkAgg, MacOSX, Pdf, Ps, QtAgg, Qt4Agg, SVG, TkAgg, WX, WXAgg. -# -# The Agg, Ps, Pdf and SVG backends do not require external -# dependencies. Do not choose GTK, GTKAgg, GTKCairo, MacOSX, TkAgg or WXAgg -# if you have disabled the relevent extension modules. Agg will be used -# by default. -# -backend = @MATPLOTLIB_BACKEND@ -backend.qt4 = PyQt4 -# diff --git a/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in b/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in deleted file mode 100644 index 582bbbf9e8..0000000000 --- a/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -message("Installing mpi4py:\n@mpi4py_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@mpi4py_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE mpi4py_OUT - OUTPUT_VARIABLE mpi4py_ERR -) - -if(NOT ${res} EQUAL 0) - message("mpi4py Errors detected: \n${mpi4py_OUT}\n${mpi4py_ERR}") - message(FATAL_ERROR "Error in config of mpi4py") -endif() -message("Mpi4py install succeeded.") diff --git a/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in b/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in deleted file mode 100644 index 96f160201c..0000000000 --- a/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in +++ /dev/null @@ -1,20 +0,0 @@ -message("Building Mpi4py:\n@mpi4py_binary@") -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@mpi4py_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE mpi4py_OUT - OUTPUT_VARIABLE mpi4py_ERR) - -if(NOT ${res} EQUAL 0) - message("Mpi4py Errors detected: \n${mpi4py_OUT}\n${mpi4py_ERR}") - message(FATAL_ERROR "Error in config of mpi4py") -endif() -message("mpi4py build worked.") diff --git a/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in b/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in deleted file mode 100644 index e16a54148d..0000000000 --- a/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ - execute_process( - WORKING_DIRECTORY @netcdf_source@ - COMMAND patch -p1 - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/netcdf_clang.patch - ) - diff --git a/CMake/cdat_modules_extra/paraview_download.sh.in b/CMake/cdat_modules_extra/paraview_download.sh.in deleted file mode 100755 index dee9d7f795..0000000000 --- a/CMake/cdat_modules_extra/paraview_download.sh.in +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -cd @CMAKE_CURRENT_BINARY_DIR@/build/ - -"@GIT_EXECUTABLE@" clone @PARAVIEW_SOURCE@ -cd ParaView -"@GIT_EXECUTABLE@" checkout @paraview_branch@ -"@GIT_EXECUTABLE@" submodule init - -SUBMODULES=`git submodule status | sed 's/.* //' | sed ':a;N;$!ba;s/\n/ /g'` - -for SUBMODULE in $SUBMODULES -do - tmp=`git config --get submodule.$SUBMODULE.url` - tmp=`echo $tmp | sed 's/@REPLACE_GIT_PROTOCOL_PREFIX@/@GIT_PROTOCOL_PREFIX@/g'` - git config "submodule.$SUBMODULE.url" $tmp -done - -"@GIT_EXECUTABLE@" submodule update --recursive diff --git a/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in b/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in deleted file mode 100644 index aafa3a9715..0000000000 --- a/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in +++ /dev/null @@ -1,25 +0,0 @@ - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib") -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_flags@ @cdat_external_include_directories@") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_cppflags@ @cdat_external_include_directories@") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_cxxflags@") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @ParaView_binary@/Utilities/VTKPythonWrapping - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in VTK Python Install") -endif() - -message("Install succeeded.") - diff --git a/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in b/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in deleted file mode 100644 index 09cbc2ad85..0000000000 --- a/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in +++ /dev/null @@ -1,9 +0,0 @@ - -set(EXTERNALS @pbmplus_install@) -configure_file(@cdat_external_patch_dir@/src/pbmplus/Makefile.in - @pbmplus_source@/Makefile - @ONLY) - -configure_file(@cdat_external_patch_dir@/src/pbmplus/pnm/Makefile.in - ${pbmplus_source}/pnm/Makefile - @ONLY) diff --git a/CMake/cdat_modules_extra/pmw_install_step.cmake.in b/CMake/cdat_modules_extra/pmw_install_step.cmake.in deleted file mode 100644 index 769aa7454f..0000000000 --- a/CMake/cdat_modules_extra/pmw_install_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @Pmw_source@/src - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Install") -endif() -message("Install succeeded.") diff --git a/CMake/cdat_modules_extra/pmw_make_step.cmake.in b/CMake/cdat_modules_extra/pmw_make_step.cmake.in deleted file mode 100644 index a1d3f9759c..0000000000 --- a/CMake/cdat_modules_extra/pmw_make_step.cmake.in +++ /dev/null @@ -1,15 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY @Pmw_source@/src - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make") -endif() -message("Make succeeded.") diff --git a/CMake/cdat_modules_extra/predownload.py.in b/CMake/cdat_modules_extra/predownload.py.in deleted file mode 100755 index 3a3af91046..0000000000 --- a/CMake/cdat_modules_extra/predownload.py.in +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/python - -import shlex -import subprocess -import urllib2 -import os - -fileName = "@PARTS_BUILT_INFO@" -fetched_data = "@cdat_BINARY_DIR@/fetched_for_offline" -try: - os.makedirs(fetched_data) -except: - pass -try: - os.makedirs(os.path.join(fetched_data,"contrib")) -except: - pass - - -def fetch(url,md5=None): - try: - import hashlib - HAS_HASHLIB=True - except: - HAS_HASHLIB=False - if md5 is None: - HAS_HASHLIB=False - - print "Fetching: ",url - if HAS_HASHLIB: - print "Will control md5" - u = urllib2.urlopen(url) - nm = os.path.join(fetched_data,url.split("/")[-1]) - f=open(nm,'w') - sz = 65536 - if HAS_HASHLIB: - hsh =hashlib.md5() - buf = u.read(sz) - while len(buf)>0: - f.write(buf) - if HAS_HASHLIB: - hsh.update(buf) - buf=u.read(sz) - f.close() - if HAS_HASHLIB and hsh.hexdigest()!=md5: - raise Exception,"Error downloading file: %s, md5 does not match" % nm - -def processFile(name): - f=open(name) - for ln in f.xreadlines(): - sp = ln.split() - nm = sp[0] - ver = sp[1] - try: - url = sp[2] - except: - url = None - try: - md5 = sp[3] - except: - md5 = None - try: - url2 = sp[4] - except: - url2 = None - try: - md5b = sp[5] - except: - md5b = None - if url=="N/A": - continue - elif url.find("git://")>-1 or url.strip()[-4:]==".git": - if md5 is None: - md5 = "master" - nm = url.split("/")[-1][:-4] - cmd = "git clone --depth 1 -b %s %s %s/%s " % (md5,url,fetched_data,nm) - subprocess.Popen(shlex.split(cmd)) - elif url is not None: - fetch(url,md5) - if url2 is not None: - fetch(url2,md5b) - ## Ok now does the git submodules - for c in ["eof2","windfield","sciMake","windspharm","eofs"]: - cmd = "cp -rf @cdat_SOURCE_DIR@/contrib/%s %s/contrib" % (c,fetched_data) - subprocess.Popen(shlex.split(cmd)) -if __name__ == "__main__": - processFile(fileName) - diff --git a/CMake/cdat_modules_extra/preofflinebuild.sh.in b/CMake/cdat_modules_extra/preofflinebuild.sh.in deleted file mode 100755 index b42dacfdec..0000000000 --- a/CMake/cdat_modules_extra/preofflinebuild.sh.in +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/sh - -fetched_data="fetched_for_offline" -cp ${fetched_data}/*gz ${fetched_data}/*bz2 ${fetched_data}/*zip @cdat_BINARY_DIR@ -cp -r ${fetched_data}/contrib @cdat_SOURCE_DIR@ -cp -rf ${fetched_data}/vistrails @CMAKE_INSTALL_PREFIX@ -cp -rf ${fetched_data}/paraview-*/* @cdat_BINARY_DIR@/build/ParaView -tar -xf @cdat_BINARY_DIR@/visit*.gz -C @cdat_BINARY_DIR@ -rm -rf @cdat_BINARY_DIR@/build/VisIt -mv @cdat_BINARY_DIR@/src @cdat_BINARY_DIR@/build/VisIt - diff --git a/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in b/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in deleted file mode 100644 index 910bef7e26..0000000000 --- a/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @PyOpenGL_source@ - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in b/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in deleted file mode 100644 index 41fe74e840..0000000000 --- a/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY @PyOpenGL_source@ - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in b/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in deleted file mode 100644 index 54bf52d5fc..0000000000 --- a/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ - execute_process( - WORKING_DIRECTORY @pyspharm_source@ - COMMAND patch - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/pyspharm_setup.patch - ) - diff --git a/CMake/cdat_modules_extra/python_configure_step.cmake.in b/CMake/cdat_modules_extra/python_configure_step.cmake.in deleted file mode 100644 index 27f6532751..0000000000 --- a/CMake/cdat_modules_extra/python_configure_step.cmake.in +++ /dev/null @@ -1,42 +0,0 @@ -CMAKE_POLICY(SET CMP0012 NEW) - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}") -set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib @cdat_osx_ld_flags@ $ENV{LDFLAGS}") -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS} $ENV{CFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS} $ENV{CPPFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS} $ENV{CXXFLAGS}") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") -set(ENV{FC} "") -set(ENV{FCFLAGS} "") -set(ENV{FCLIBS} "") -set(ENV{F77} "") -set(ENV{FFLAGS} "") -set(ENV{FLIBS} "") -set(ENV{LD_X11} "") # for xgks -set(ENV{OPT} "") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -if(APPLE) - set(ENV{AQUA_CDAT} "no") - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") - unset(ENV{MAKEFLAGS}) - if(@_CURRENT_OSX_SDK_VERSION@ VERSION_LESS "10.11") - set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --with-system-expat --enable-framework=@CMAKE_INSTALL_PREFIX@/Library/Frameworks) - else() - set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --enable-framework=@CMAKE_INSTALL_PREFIX@/Library/Frameworks) - endif() -elseif(UNIX) - set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --enable-shared --enable-unicode=ucs4) -endif() - -EXECUTE_PROCESS( - COMMAND sh configure ${library_param} - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) diff --git a/CMake/cdat_modules_extra/python_install_step.cmake.in b/CMake/cdat_modules_extra/python_install_step.cmake.in deleted file mode 100644 index 74a63d1815..0000000000 --- a/CMake/cdat_modules_extra/python_install_step.cmake.in +++ /dev/null @@ -1,51 +0,0 @@ - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -# During install for what ever reason python will fail if these are set. - -set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}") -if(NOT APPLE) - set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -endif() -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib @cdat_osx_ld_flags@") -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS}") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") -set(ENV{FC} "") -set(ENV{FCFLAGS} "") -set(ENV{FCLIBS} "") -set(ENV{F77} "") -set(ENV{FFLAGS} "") -set(ENV{FLIBS} "") -set(ENV{LD_X11} "") # for xgks -set(ENV{OPT} "") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -if(APPLE) - set(ENV{AQUA_CDAT} "no") - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") - unset(ENV{MAKEFLAGS}) - - EXECUTE_PROCESS( - COMMAND make frameworkinstallunixtools - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) - - EXECUTE_PROCESS( - COMMAND make frameworkinstall - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) - -else() - - EXECUTE_PROCESS( - COMMAND make install - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) - -endif() diff --git a/CMake/cdat_modules_extra/python_make_step.cmake.in b/CMake/cdat_modules_extra/python_make_step.cmake.in deleted file mode 100644 index 674463f893..0000000000 --- a/CMake/cdat_modules_extra/python_make_step.cmake.in +++ /dev/null @@ -1,34 +0,0 @@ - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}") -set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib @cdat_osx_ld_flags@ $ENV{LDFLAGS}") -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS} $ENV{CFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS} $ENV{CPPFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS} $ENV{CXXFLAGS}") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") -set(ENV{FC} "") -set(ENV{FCFLAGS} "") -set(ENV{FCLIBS} "") -set(ENV{F77} "") -set(ENV{FFLAGS} "") -set(ENV{FLIBS} "") -set(ENV{LD_X11} "") # for xgks -set(ENV{OPT} "") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -if(APPLE) - set(ENV{AQUA_CDAT} "no") - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") - unset(ENV{MAKEFLAGS}) -endif() - -EXECUTE_PROCESS( - COMMAND make - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) diff --git a/CMake/cdat_modules_extra/python_patch_step.cmake.in b/CMake/cdat_modules_extra/python_patch_step.cmake.in deleted file mode 100644 index ff2843efb5..0000000000 --- a/CMake/cdat_modules_extra/python_patch_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -execute_process( - COMMAND - "@CMAKE_COMMAND@" -E copy_if_different "@cdat_SOURCE_DIR@/pysrc/src/setup-@PYTHON_VERSION@.py" "@python_SOURCE_DIR@/setup.py" - RESULT_VARIABLE errcode -) -if("${errcode}" STREQUAL "0") - message(STATUS "setup.py replaced") -else() - message(FATAL_ERROR "Replacing setup.py failed: ${errcode}") -endif() - -execute_process( - COMMAND - "@CMAKE_COMMAND@" -E copy_if_different "@cdat_SOURCE_DIR@/pysrc/src/site-@PYTHON_VERSION@.py" "@python_SOURCE_DIR@/Lib/site.py" - RESULT_VARIABLE errcode -) -if("${errcode}" STREQUAL "0") - message(STATUS "site.py replaced") -else() - message(FATAL_ERROR "Replacing site.py failed: ${errcode}") -endif() diff --git a/CMake/cdat_modules_extra/python_setup.py.in b/CMake/cdat_modules_extra/python_setup.py.in deleted file mode 100644 index 106853088e..0000000000 --- a/CMake/cdat_modules_extra/python_setup.py.in +++ /dev/null @@ -1,1918 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision: 78785 $" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine - -from distutils import log -from distutils import sysconfig -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - - # Figure out the location of the source code for extension modules - # (This logic is copied in distutils.test.test_sysconfig, - # so building in a separate directory does not break test_distutils.) - moddir = os.path.join(os.getcwd(), srcdir, 'Modules') - moddir = os.path.normpath(moddir) - srcdir, tail = os.path.split(moddir) - srcdir = os.path.normpath(srcdir) - moddir = os.path.normpath(moddir) - - moddirlist = [moddir] - incdirlist = ['./Include'] - - # Platform-dependent module source and include directories - platform = self.get_platform() - if platform in ('darwin', 'mac') and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(os.getcwd(), srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append('./Mac/Include') - - alldirlist = moddirlist + incdirlist - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = glob("Include/*.h") + ["pyconfig.h"] - - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, alldirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - ext.include_dirs.append( '.' ) # to get config.h - for incdir in incdirlist: - ext.include_dirs.append( os.path.join(srcdir, incdir) ) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - if platform != 'mac': - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print "Failed to find the necessary bits to build these modules:" - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if self.get_platform() == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if self.get_platform() == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def get_platform(self): - # Get value of sys.platform - for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: - if sys.platform.startswith(platform): - return platform - return sys.platform - - def detect_modules(self): - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr': - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - lib_dirs = self.compiler.library_dirs + [ - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - platform = self.get_platform() - (srcdir,) = sysconfig.get_config_vars('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - if platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if platform in ['darwin', 'beos', 'mac']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c'], - libraries=math_libs) ) - - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.0 _fileio module - exts.append( Extension("_fileio", ["_fileio.c"]) ) - # Python 3.0 _bytesio module - exts.append( Extension("_bytesio", ["_bytesio.c"]) ) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - exts.append( Extension('fcntl', ['fcntlmodule.c']) ) - if platform not in ['mac']: - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - else: - missing.extend(['pwd', 'grp', 'spwd']) - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if platform not in ['atheos', 'mac']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - if platform not in ['mac']: - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - else: - missing.append('syslog') - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - if platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if self.compiler.find_library_file(lib_dirs, - 'ncursesw'): - readline_libs.append('ncursesw') - elif self.compiler.find_library_file(lib_dirs, - 'ncurses'): - readline_libs.append('ncurses') - elif self.compiler.find_library_file(lib_dirs, 'curses'): - readline_libs.append('curses') - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - if platform not in ['mac']: - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - else: - missing.append('crypt') - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c'], - depends = ['socketmodule.h']) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - for ssl_inc_dir in inc_dirs + search_for_ssl_incs_in: - name = os.path.join(ssl_inc_dir, 'openssl', 'opensslv.h') - if os.path.isfile(name): - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - break - except IOError: - pass - - # first version found is what we'll use (as the compiler should) - if openssl_ver: - break - - #print 'openssl_ver = 0x%08x' % openssl_ver - - if (ssl_incs is not None and - ssl_libs is not None and - openssl_ver >= 0x00907000): - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - # these aren't strictly missing since they are unneeded. - #missing.extend(['_sha', '_md5']) - else: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - missing.append('_hashlib') - - if (openssl_ver < 0x00908000): - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (4, 7) - min_db_ver = (3, 3) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - ] - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - # Look for a version specific db-X.Y before an ambiguoius dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - for d in inc_dirs + sqlite_inc_paths: - f = os.path.join(d, "sqlite3.h") - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if sys.platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - - if sys.platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - # The standard Unix dbm module: - if platform not in ['cygwin']: - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't - if self.compiler.find_library_file(lib_dirs, 'ndbm'): - ndbm_libs = ['ndbm'] - else: - ndbm_libs = [] - exts.append( Extension('dbm', ['dbmmodule.c'], - define_macros=[('HAVE_NDBM_H',None)], - libraries = ndbm_libs ) ) - elif self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - exts.append( Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[('HAVE_GDBM_NDBM_H',None)], - libraries = gdbm_libs ) ) - elif find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - exts.append( Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[('HAVE_GDBM_DASH_NDBM_H',None)], - libraries = gdbm_libs ) ) - else: - missing.append('dbm') - elif db_incs is not None: - exts.append( Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[('HAVE_BERKDB_H',None), - ('DB_DBM_HSEARCH',None)], - libraries=dblibs)) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if (self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if platform not in ['mac', 'win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if (self.compiler.find_library_file(lib_dirs, 'ncursesw')): - curses_libs = ['ncursesw'] - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif (self.compiler.find_library_file(lib_dirs, 'ncurses')): - curses_libs = ['ncurses'] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif (self.compiler.find_library_file(lib_dirs, 'curses') - and platform != 'darwin'): - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if sys.platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if sys.platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a - # group of developers on SourceForge; see www.libexpat.org for - # more information. The pyexpat module was written by Paul - # Prescod after a prototype by Jack Jansen. The Expat source - # is included in Modules/expat/. Usage of a system - # shared libexpat.so/expat.dll is not advised. - # - # More information on Expat can be found at www.libexpat.org. - # - expatinc = os.path.join(os.getcwd(), srcdir, 'Modules', 'expat') - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = [expatinc], - sources = ['pyexpat.c', - 'expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c', - ], - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = [expatinc], - sources = ['_elementtree.c'], - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif platform == 'darwin': # Mac OSX - macros = dict( - HAVE_SEM_OPEN=1, - HAVE_SEM_TIMEDWAIT=0, - HAVE_FD_TRANSFER=1, - HAVE_BROKEN_SEM_GETVALUE=1 - ) - libraries = [] - - elif platform == 'cygwin': # Cygwin - macros = dict( - HAVE_SEM_OPEN=1, - HAVE_SEM_TIMEDWAIT=1, - HAVE_FD_TRANSFER=0, - HAVE_BROKEN_SEM_UNLINK=1 - ) - libraries = [] - - elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict( # FreeBSD - HAVE_SEM_OPEN=0, - HAVE_SEM_TIMEDWAIT=0, - HAVE_FD_TRANSFER=1, - ) - libraries = [] - - elif platform.startswith('openbsd'): - macros = dict( # OpenBSD - HAVE_SEM_OPEN=0, # Not implemented - HAVE_SEM_TIMEDWAIT=0, - HAVE_FD_TRANSFER=1, - ) - libraries = [] - - elif platform.startswith('netbsd'): - macros = dict( # at least NetBSD 5 - HAVE_SEM_OPEN=1, - HAVE_SEM_TIMEDWAIT=0, - HAVE_FD_TRANSFER=1, - HAVE_BROKEN_SEM_GETVALUE=1 - ) - libraries = [] - - else: # Linux and other unices - macros = dict( - HAVE_SEM_OPEN=1, - HAVE_SEM_TIMEDWAIT=1, - HAVE_FD_TRANSFER=1 - ) - libraries = ['rt'] - - if platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - - if macros.get('HAVE_SEM_OPEN', False): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8'): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - for fw in 'Tcl', 'Tk': - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - platform = self.get_platform() - if (platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.4', '84']: - tklib = self.compiler.find_library_file(lib_dirs, 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in sys.platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - (srcdir,) = sysconfig.get_config_vars('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if sys.platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - (srcdir,) = sysconfig.get_config_vars('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [] - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - exec open(ffi_configfile) in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c', - '_ctypes/malloc_closure.c'] - depends = ['_ctypes/ctypes.h'] - - if sys.platform == 'darwin': - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif sys.platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif sys.platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if sys.platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in b/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in deleted file mode 100644 index 96a849a056..0000000000 --- a/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in +++ /dev/null @@ -1,20 +0,0 @@ -message("Building PyZMQ:\n@pyzmq_binary@") -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py configure --zmq=@cdat_EXTERNALS@ - WORKING_DIRECTORY "@pyzmq_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE pyzmq_OUT - OUTPUT_VARIABLE pyzmq_ERR) - -if(NOT ${res} EQUAL 0) - message("PyZMQ errors detected: \n${pyzmq_OUT}\n${pyzmq_ERR}") - message(FATAL_ERROR "Error in config of pyzmq") -endif() -message("pyzmq build worked.") diff --git a/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in b/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in deleted file mode 100644 index da21d89c1e..0000000000 --- a/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -message("Installing pyzmq:\n@pyzmq_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install --zmq=@cdat_EXTERNALS@ --prefix=@PYTHON_SITE_PACKAGES_PREFIX@ - WORKING_DIRECTORY "@pyzmq_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE pyzmq_OUT - OUTPUT_VARIABLE pyzmq_ERR -) - -if(NOT ${res} EQUAL 0) - message("pyzmq Errors detected: \n${pyzmq_OUT}\n${pyzmq_ERR}") - message(FATAL_ERROR "Error in config of pyzmq") -endif() -message("pyzmq install succeeded.") diff --git a/CMake/cdat_modules_extra/reset_runtime.csh.in b/CMake/cdat_modules_extra/reset_runtime.csh.in deleted file mode 100644 index c4ed68faac..0000000000 --- a/CMake/cdat_modules_extra/reset_runtime.csh.in +++ /dev/null @@ -1,24 +0,0 @@ -# First of all reset variables -foreach v ( PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH prompt ) - set tmp="UVCDAT_ORIGINAL_"${v} - if ( `eval echo \$\?$tmp` ) then - echo ${v}" env variable reset" - set vv=`eval echo \$$tmp` - setenv ${v} ${vv} - unsetenv ${tmp} - endif -end - -# Now variables for which we may have changed value or created -foreach v ( OPAL_PREFIX LIBOVERLAY_SCROLLBAR ) - set tmp="UVCDAT_ORIGINAL_"${v} - if ( `eval echo \$\?$tmp` ) then - echo ${v}" env variable reset" - set vv=`eval echo \$$tmp` - setenv ${v} ${vv} - else - unsetenv ${tmp} - endif -end -unsetenv UVCDAT_PROMPT_STRING -unsetenv UVCDAT_SETUP_PATH diff --git a/CMake/cdat_modules_extra/reset_runtime.sh.in b/CMake/cdat_modules_extra/reset_runtime.sh.in deleted file mode 100644 index 37f9577278..0000000000 --- a/CMake/cdat_modules_extra/reset_runtime.sh.in +++ /dev/null @@ -1,16 +0,0 @@ -# First of all reset variables -for v in PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH PS1 OPAL_PREFIX LIBOVERLAY_SCROLLBAR ; do - tmp="UVCDAT_ORIGINAL_"${v} - if [ -n "${!tmp}" ] ; then - echo ${v}" env variable reset" - if [ "${!tmp}" != " " ] ; then - export ${v}=${!tmp} - else - unset ${v} - fi - unset ${tmp} - fi -done -unset UVCDAT_PROMPT_STRING -unset UVCDAT_SETUP_PATH -return 0 diff --git a/CMake/cdat_modules_extra/runpytest.in b/CMake/cdat_modules_extra/runpytest.in deleted file mode 100755 index 42fe55e682..0000000000 --- a/CMake/cdat_modules_extra/runpytest.in +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -# glue script to playback a recorded uvcdat vistrail and compare the result -# with a known good baseline image. -# takes three arguments: the name of the vistrail.vt:tagname to playback -# a set of aliases for that trail (to replace filenames for example) -# the filename of the image to compare against - -# setup uvcdat run time environment -. @CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh - -# play back the requested vistrail and make an image -"@PYTHON_EXECUTABLE@" \ - @CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py \ - -R \ - -S $1\ - -b $2\ - -a $3\ - -e @CMAKE_BINARY_DIR@/Testing/Temporary - -# compare that image with the baseline(s) for it -"@PYTHON_EXECUTABLE@" \ - @cdat_SOURCE_DIR@/testing/checkimage.py \ - @CMAKE_BINARY_DIR@/Testing/Temporary/$4 \ - $5/$4 \ - $6 diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in index 972a674adb..4946cf488e 100755 --- a/CMake/cdat_modules_extra/runtest.in +++ b/CMake/cdat_modules_extra/runtest.in @@ -1,4 +1,9 @@ #!/bin/bash -# source is not portable whereas . is -. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh" -$@ +echo "ACTIVATING ENV:"@CONDA_ENVIRONMENT_NAME@ +source activate @CONDA_ENVIRONMENT_NAME@ +export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"` +echo "Python:" `which python` +echo "Running: "$* +python -c "import vtk;print 'VTK_VERSION:',vtk.VTK_VERSION" +python -c "import numpy;print 'NUMPY_VERSION:',numpy.version.version" +$* diff --git a/CMake/cdat_modules_extra/seawater_build_step.cmake.in b/CMake/cdat_modules_extra/seawater_build_step.cmake.in deleted file mode 100644 index 7118a8eb25..0000000000 --- a/CMake/cdat_modules_extra/seawater_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@seawater_source_dir@" -) diff --git a/CMake/cdat_modules_extra/setup_runtime.csh.in b/CMake/cdat_modules_extra/setup_runtime.csh.in deleted file mode 100755 index 8a7f1c83b0..0000000000 --- a/CMake/cdat_modules_extra/setup_runtime.csh.in +++ /dev/null @@ -1,117 +0,0 @@ -# Main install prefix set by user or post install script: -# UVCDAT_INSTALL_PREFIX - -# First reset any existing UVCDAT env -. @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.csh - -# Now store existing env var that we will be tweaking -foreach v ( PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH OPAL_PREFIX LIBOVERLAY_SCROLLBAR ) - if ( `eval echo \$\?$v` ) then - set vv=`eval echo \$$v` - setenv UVCDAT_ORIGINAL_${v} ${vv} - else - setenv UVCDAT_ORIGINAL_${v} " " - endif -end - -setenv UVCDAT_PROMPT_STRING @UVCDAT_PROMPT_STRING@ -if ( $?UVCDAT_PROMPT_BEGINNING ) then - setenv UVCDAT_ORIGINAL_prompt ${prompt} - set prompt = "[@UVCDAT_PROMPT_STRING@]${prompt}" -else if ( $?UVCDAT_PROMPT_END ) then - setenv UVCDAT_ORIGINAL_prompt ${prompt} - set prompt = "${prompt}[@UVCDAT_PROMPT_STRING@]" -endif - -# If unset, use the value configured by cmake by default. - -# Everything beyond this point will be determined relatively -# from this path. -if ( $?UVCDAT_INSTALL_PREFIX ) then - set install_prefix=${UVCDAT_INSTALL_PREFIX} -else - set install_prefix=@CMAKE_INSTALL_PREFIX@ -endif - -# Try to prevent the user from sourcing twice, -# which can lead to errors. -if ( $?UVCDAT_SETUP_PATH ) then - if ( ${UVCDAT_SETUP_PATH} == ${install_prefix} ) then - echo 'Nothing to do since UVCDAT is already setup at '${UVCDAT_SETUP_PATH} - exit 0 - else - echo 'ERROR: UVCDAT setup was previously sourced at '${UVCDAT_SETUP_PATH} - echo 'ERROR: There is no need to run setup_runtime manually anymore.' - echo 'ERROR: Open a new shell in order to use a different install location.' - echo 'ERROR: Or execute source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh.' - exit 1 - endif -endif - -# Check that the install prefix exists, otherwise stop. -if ( ! -d ${install_prefix} ) then - echo 'ERROR: '${install_prefix}' is not a directory.' - exit 1 -endif - -if ( ! $?LD_LIBRARY_PATH ) then - setenv LD_LIBRARY_PATH '' -endif - -if ( ! $?PYTHONPATH ) then - setenv PYTHONPATH '' -endif - -if ( ! $?PATH ) then - setenv PATH '' -endif - -if ( '@QT_LIB_DIR@' != '' ) then - if ( -d @QT_LIB_DIR@ ) then - setenv LD_LIBRARY_PATH @QT_LIB_DIR@:${LD_LIBRARY_PATH} - endif -endif - -foreach d ( @SETUP_LIBRARY_PATHS@ ) - set f=${install_prefix}/${d} - if ( -d ${f} ) then - setenv LD_LIBRARY_PATH ${f}:${LD_LIBRARY_PATH} - endif -end - -if ( `uname` == 'Darwin' ) then - setenv LD_LIBRARY_PATH /usr/X11R6/lib:/usr/lib:${LD_LIBRARY_PATH} - setenv DYLD_FALLBACK_LIBRARY_PATH ${LD_LIBRARY_PATH} -endif - -foreach d ( @SETUP_PYTHON_PATHS@ ) - set f=${install_prefix}/${d} - if ( -d ${f} ) then - setenv PYTHONPATH ${f}:${PYTHONPATH} - endif -end - -foreach d ( @SETUP_EXECUTABLE_PATHS@ ) - set f=${install_prefix}/${d} - if ( -d ${f} ) then - setenv PATH ${f}:${PATH} - endif -end - -if ( -d ${install_prefix}/Externals/lib/R ) then - setenv R_HOME ${install_prefix}/Externals/lib/R -endif - -setenv GDAL_DATA ${install_prefix}/Externals/share/gdal -setenv OPAL_PREFIX ${install_prefix}/Externals -setenv LIBOVERLAY_SCROLLBAR 0 - -setenv UVCDAT_SETUP_PATH ${install_prefix} - -unset install_prefix - -echo 'Successfully updated your environment to use UVCDAT' -echo '(changes are valid for this session/terminal only)' -echo 'Version: '${UVCDAT_PROMPT_STRING} -echo 'Location: '${UVCDAT_SETUP_PATH} -echo 'Reset these changes by running: source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.csh' diff --git a/CMake/cdat_modules_extra/setup_runtime.sh.in b/CMake/cdat_modules_extra/setup_runtime.sh.in deleted file mode 100755 index 0476b092bf..0000000000 --- a/CMake/cdat_modules_extra/setup_runtime.sh.in +++ /dev/null @@ -1,111 +0,0 @@ -# Everything beyond this point will be determined relatively -# from this path. -install_prefix="@CMAKE_INSTALL_PREFIX@" -# Reset previous uvcdat env messing up -. @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh - -# Saves what we will mess with -for v in PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH OPAL_PREFIX LIBOVERLAY_SCROLLBAR ; do - tmp="${v}" - if [ -n "${!tmp}" ] ; then - export UVCDAT_ORIGINAL_${v}=${!v} - else - export UVCDAT_ORIGINAL_${v}=" " - fi -done - -function cleanup { - unset cleanup install_prefix library_paths python_paths executable_paths -} - -# Try to prevent the user from sourcing twice, -# which can lead to errors. -if [ -n "${UVCDAT_SETUP_PATH}" ] ; then - if [ "${UVCDAT_SETUP_PATH}" = "${install_prefix}" ] ; then - echo "Nothing to do since UVCDAT is already setup at: ${UVCDAT_SETUP_PATH}" 1>&2 - cleanup - return 0 - else - echo "ERROR: UVCDAT setup was previously sourced at: ${UVCDAT_SETUP_PATH}" 1>&2 - echo "ERROR: There is no need to run setup_runtime manually anymore." 1>&2 - echo "ERROR: Open a new shell in order to use a different install location." 1>&2 - echo "ERROR: Or execute source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh." 1>&2 - cleanup - return 1 - fi -fi - -# Check that the install prefix exists, otherwise stop. -if [ ! -d "${install_prefix}" ] ; then - echo "ERROR: ${install_prefix} is not a directory." 1>&2 - cleanup - return 1 -fi - -# cmake set variables -library_paths=( @SETUP_LIBRARY_PATHS@ ) -python_paths=( @SETUP_PYTHON_PATHS@ ) -executable_paths=( @SETUP_EXECUTABLE_PATHS@ ) - -export UVCDAT_PROMPT_STRING=@UVCDAT_PROMPT_STRING@ -if [ "$UVCDAT_ENABLE_PROMPT_BEGINNING" ] ; then - export UVCDAT_ORIGINAL_PS1=${PS1}" " - export PS1="[@UVCDAT_PROMPT_STRING@]$PS1" - -elif [ "$UVCDAT_ENABLE_PROMPT_END" ] ; then - export UVCDAT_ORIGINAL_PS1=${PS1}" " - export PS1="$PS1[@UVCDAT_PROMPT_STRING@]" -fi - -if [ -d '@QT_LIB_DIR@' ] ; then - LD_LIBRARY_PATH='@QT_LIB_DIR@:'"${LD_LIBRARY_PATH}" -fi - -for d in "${library_paths[@]}" ; do - f="${install_prefix}/${d}" - if [ -d "${f}" ] ; then - LD_LIBRARY_PATH="${f}:${LD_LIBRARY_PATH}" - fi -done - -if [ `uname` = 'Darwin' ] ; then - LD_LIBRARY_PATH="/usr/X11R6/lib:/usr/lib:${LD_LIBRARY_PATH}" - export DYLD_FALLBACK_LIBRARY_PATH="${LD_LIBRARY_PATH}" -fi - -for d in "${python_paths[@]}" ; do - f="${install_prefix}/${d}" - if [ -d "${f}" ] ; then - PYTHONPATH="${f}:${PYTHONPATH}" - fi - unset f -done - -for d in "${executable_paths[@]}" ; do - f="${install_prefix}/${d}" - if [ -d "${f}" ] ; then - PATH="${f}:${PATH}" - fi - unset f -done - -if [ -d "${install_prefix}/Externals/lib/R" ] ; then - export R_HOME="${install_prefix}/Externals/lib/R" -fi - -export GDAL_DATA="${install_prefix}/Externals/share/gdal" -export OPAL_PREFIX="${install_prefix}/Externals" -export LIBOVERLAY_SCROLLBAR=0 - -export PATH -export LD_LIBRARY_PATH -export PYTHONPATH - -export UVCDAT_SETUP_PATH="${install_prefix}" -cleanup -echo "Successfully updated your environment to use UVCDAT" 1>&2 -echo "(changes are valid for this session/terminal only)" 1>&2 -echo "Version: ${UVCDAT_PROMPT_STRING}" 1>&2 -echo "Location: ${UVCDAT_SETUP_PATH}" 1>&2 -echo "Reset these changes by running: source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh" 1>&2 -return 0 diff --git a/CMake/cdat_modules_extra/setuptools_install_step.cmake.in b/CMake/cdat_modules_extra/setuptools_install_step.cmake.in deleted file mode 100644 index 0e5f477c54..0000000000 --- a/CMake/cdat_modules_extra/setuptools_install_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @setuptools_source@ - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/setuptools_make_step.cmake.in b/CMake/cdat_modules_extra/setuptools_make_step.cmake.in deleted file mode 100644 index 7ddaec6a0c..0000000000 --- a/CMake/cdat_modules_extra/setuptools_make_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY @setuptools_source@ - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/site.cfg.in b/CMake/cdat_modules_extra/site.cfg.in deleted file mode 100644 index 1a250deb70..0000000000 --- a/CMake/cdat_modules_extra/site.cfg.in +++ /dev/null @@ -1,4 +0,0 @@ -[DEFAULT] -library_dirs = @EXTERNALS@/lib -include_dirs = @EXTERNALS@/include - diff --git a/CMake/cdat_modules_extra/udunits2_apple_configure.in b/CMake/cdat_modules_extra/udunits2_apple_configure.in deleted file mode 100755 index 5bb7d2828c..0000000000 --- a/CMake/cdat_modules_extra/udunits2_apple_configure.in +++ /dev/null @@ -1,18006 +0,0 @@ -#! /bin/sh -# Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.68 for UDUNITS 2.2.17. -# -# Report bugs to . -# -# -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, -# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software -# Foundation, Inc. -# -# -# This configure script is free software; the Free Software Foundation -# gives unlimited permission to copy, distribute and modify it. -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -if test "x$CONFIG_SHELL" = x; then - as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else - case \`(set -o) 2>/dev/null\` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi -" - as_required="as_fn_return () { (exit \$1); } -as_fn_success () { as_fn_return 0; } -as_fn_failure () { as_fn_return 1; } -as_fn_ret_success () { return 0; } -as_fn_ret_failure () { return 1; } - -exitcode=0 -as_fn_success || { exitcode=1; echo as_fn_success failed.; } -as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } -as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } -as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } -if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : - -else - exitcode=1; echo positional parameters were not saved. -fi -test x\$exitcode = x0 || exit 1" - as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO - as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO - eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && - test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 -test \$(( 1 + 1 )) = 2 || exit 1 - - test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( - ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' - ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO - ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO - PATH=/empty FPATH=/empty; export PATH FPATH - test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ - || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1" - if (eval "$as_required") 2>/dev/null; then : - as_have_required=yes -else - as_have_required=no -fi - if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : - -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -as_found=false -for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - as_found=: - case $as_dir in #( - /*) - for as_base in sh bash ksh sh5; do - # Try only shells that exist, to save several forks. - as_shell=$as_dir/$as_base - if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : - CONFIG_SHELL=$as_shell as_have_required=yes - if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : - break 2 -fi -fi - done;; - esac - as_found=false -done -$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : - CONFIG_SHELL=$SHELL as_have_required=yes -fi; } -IFS=$as_save_IFS - - - if test "x$CONFIG_SHELL" != x; then : - # We cannot yet assume a decent shell, so we have to provide a - # neutralization value for shells without unset; and this also - # works around shells that cannot unset nonexistent variables. - # Preserve -v and -x to the replacement shell. - BASH_ENV=/dev/null - ENV=/dev/null - (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV - export CONFIG_SHELL - case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; - esac - exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"} -fi - - if test x$as_have_required = xno; then : - $as_echo "$0: This script requires a shell more modern than all" - $as_echo "$0: the shells that I found on your system." - if test x${ZSH_VERSION+set} = xset ; then - $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" - $as_echo "$0: be upgraded to zsh 4.3.4 or later." - else - $as_echo "$0: Please tell bug-autoconf@gnu.org and -$0: support-udunits@unidata.ucar.edu about your system, -$0: including any error possibly output before this -$0: message. Then install a modern shell, or manually run -$0: the script under such a shell if you do have one." - fi - exit 1 -fi -fi -fi -SHELL=${CONFIG_SHELL-/bin/sh} -export SHELL -# Unset more variables known to interfere with behavior of common tools. -CLICOLOR_FORCE= GREP_OPTIONS= -unset CLICOLOR_FORCE GREP_OPTIONS - -## --------------------- ## -## M4sh Shell Functions. ## -## --------------------- ## -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - - - as_lineno_1=$LINENO as_lineno_1a=$LINENO - as_lineno_2=$LINENO as_lineno_2a=$LINENO - eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && - test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { - # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -p' - fi -else - as_ln_s='cp -p' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - -SHELL=${CONFIG_SHELL-/bin/sh} - - -test -n "$DJDIR" || exec 7<&0 &1 - -# Name of the host. -# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, -# so uname gets run too. -ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` - -# -# Initializations. -# -ac_default_prefix=/usr/local -ac_clean_files= -ac_config_libobj_dir=. -LIBOBJS= -cross_compiling=no -subdirs= -MFLAGS= -MAKEFLAGS= - -# Identity of this package. -PACKAGE_NAME='UDUNITS' -PACKAGE_TARNAME='udunits' -PACKAGE_VERSION='2.2.17' -PACKAGE_STRING='UDUNITS 2.2.17' -PACKAGE_BUGREPORT='support-udunits@unidata.ucar.edu' -PACKAGE_URL='' - -ac_unique_file="lib/converter.c" -# Factoring default headers for most tests. -ac_includes_default="\ -#include -#ifdef HAVE_SYS_TYPES_H -# include -#endif -#ifdef HAVE_SYS_STAT_H -# include -#endif -#ifdef STDC_HEADERS -# include -# include -#else -# ifdef HAVE_STDLIB_H -# include -# endif -#endif -#ifdef HAVE_STRING_H -# if !defined STDC_HEADERS && defined HAVE_MEMORY_H -# include -# endif -# include -#endif -#ifdef HAVE_STRINGS_H -# include -#endif -#ifdef HAVE_INTTYPES_H -# include -#endif -#ifdef HAVE_STDINT_H -# include -#endif -#ifdef HAVE_UNISTD_H -# include -#endif" - -ac_subst_vars='am__EXEEXT_FALSE -am__EXEEXT_TRUE -LTLIBOBJS -LIBOBJS -OTOOL64 -OTOOL -LIPO -NMEDIT -DSYMUTIL -MANIFEST_TOOL -RANLIB -ac_ct_AR -AR -DLLTOOL -OBJDUMP -NM -ac_ct_DUMPBIN -DUMPBIN -LD -FGREP -SED -host_os -host_vendor -host_cpu -host -build_os -build_vendor -build_cpu -build -LIBTOOL -HAVE_CUNIT_FALSE -HAVE_CUNIT_TRUE -LD_CUNIT -EGREP -GREP -ac_ct_FC -FCFLAGS -FC -LEXLIB -LEX_OUTPUT_ROOT -LEX -YFLAGS -YACC -LN_S -CPP -am__fastdepCC_FALSE -am__fastdepCC_TRUE -CCDEPMODE -AMDEPBACKSLASH -AMDEP_FALSE -AMDEP_TRUE -am__quote -am__include -DEPDIR -OBJEXT -EXEEXT -ac_ct_CC -CPPFLAGS -LDFLAGS -CFLAGS -CC -ENABLE_UDUNITS_1_FALSE -ENABLE_UDUNITS_1_TRUE -DEBUG_FALSE -DEBUG_TRUE -LIBS_COVERAGE -CFLAGS_COVERAGE -am__untar -am__tar -AMTAR -am__leading_dot -SET_MAKE -AWK -mkdir_p -MKDIR_P -INSTALL_STRIP_PROGRAM -STRIP -install_sh -MAKEINFO -AUTOHEADER -AUTOMAKE -AUTOCONF -ACLOCAL -VERSION -PACKAGE -CYGPATH_W -am__isrc -INSTALL_DATA -INSTALL_SCRIPT -INSTALL_PROGRAM -target_alias -host_alias -build_alias -LIBS -ECHO_T -ECHO_N -ECHO_C -DEFS -mandir -localedir -libdir -psdir -pdfdir -dvidir -htmldir -infodir -docdir -oldincludedir -includedir -localstatedir -sharedstatedir -sysconfdir -datadir -datarootdir -libexecdir -sbindir -bindir -program_transform_name -prefix -exec_prefix -PACKAGE_URL -PACKAGE_BUGREPORT -PACKAGE_STRING -PACKAGE_VERSION -PACKAGE_TARNAME -PACKAGE_NAME -PATH_SEPARATOR -SHELL' -ac_subst_files='' -ac_user_opts=' -enable_option_checking -enable_coverage -enable_debug -enable_udunits_1 -enable_dependency_tracking -enable_shared -enable_static -with_pic -enable_fast_install -with_gnu_ld -with_sysroot -enable_libtool_lock -' - ac_precious_vars='build_alias -host_alias -target_alias -CC -CFLAGS -LDFLAGS -LIBS -CPPFLAGS -CPP -YACC -YFLAGS -FC -FCFLAGS' - - -# Initialize some variables set by options. -ac_init_help= -ac_init_version=false -ac_unrecognized_opts= -ac_unrecognized_sep= -# The variables have the same names as the options, with -# dashes changed to underlines. -cache_file=/dev/null -exec_prefix=NONE -no_create= -no_recursion= -prefix=NONE -program_prefix=NONE -program_suffix=NONE -program_transform_name=s,x,x, -silent= -site= -srcdir= -verbose= -x_includes=NONE -x_libraries=NONE - -# Installation directory options. -# These are left unexpanded so users can "make install exec_prefix=/foo" -# and all the variables that are supposed to be based on exec_prefix -# by default will actually change. -# Use braces instead of parens because sh, perl, etc. also accept them. -# (The list follows the same order as the GNU Coding Standards.) -bindir='${exec_prefix}/bin' -sbindir='${exec_prefix}/sbin' -libexecdir='${exec_prefix}/libexec' -datarootdir='${prefix}/share' -datadir='${datarootdir}' -sysconfdir='${prefix}/etc' -sharedstatedir='${prefix}/com' -localstatedir='${prefix}/var' -includedir='${prefix}/include' -oldincludedir='/usr/include' -docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' -infodir='${datarootdir}/info' -htmldir='${docdir}' -dvidir='${docdir}' -pdfdir='${docdir}' -psdir='${docdir}' -libdir='${exec_prefix}/lib' -localedir='${datarootdir}/locale' -mandir='${datarootdir}/man' - -ac_prev= -ac_dashdash= -for ac_option -do - # If the previous option needs an argument, assign it. - if test -n "$ac_prev"; then - eval $ac_prev=\$ac_option - ac_prev= - continue - fi - - case $ac_option in - *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; - *=) ac_optarg= ;; - *) ac_optarg=yes ;; - esac - - # Accept the important Cygnus configure options, so we can diagnose typos. - - case $ac_dashdash$ac_option in - --) - ac_dashdash=yes ;; - - -bindir | --bindir | --bindi | --bind | --bin | --bi) - ac_prev=bindir ;; - -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) - bindir=$ac_optarg ;; - - -build | --build | --buil | --bui | --bu) - ac_prev=build_alias ;; - -build=* | --build=* | --buil=* | --bui=* | --bu=*) - build_alias=$ac_optarg ;; - - -cache-file | --cache-file | --cache-fil | --cache-fi \ - | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) - ac_prev=cache_file ;; - -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ - | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) - cache_file=$ac_optarg ;; - - --config-cache | -C) - cache_file=config.cache ;; - - -datadir | --datadir | --datadi | --datad) - ac_prev=datadir ;; - -datadir=* | --datadir=* | --datadi=* | --datad=*) - datadir=$ac_optarg ;; - - -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ - | --dataroo | --dataro | --datar) - ac_prev=datarootdir ;; - -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ - | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) - datarootdir=$ac_optarg ;; - - -disable-* | --disable-*) - ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=no ;; - - -docdir | --docdir | --docdi | --doc | --do) - ac_prev=docdir ;; - -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) - docdir=$ac_optarg ;; - - -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) - ac_prev=dvidir ;; - -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) - dvidir=$ac_optarg ;; - - -enable-* | --enable-*) - ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=\$ac_optarg ;; - - -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ - | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ - | --exec | --exe | --ex) - ac_prev=exec_prefix ;; - -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ - | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ - | --exec=* | --exe=* | --ex=*) - exec_prefix=$ac_optarg ;; - - -gas | --gas | --ga | --g) - # Obsolete; use --with-gas. - with_gas=yes ;; - - -help | --help | --hel | --he | -h) - ac_init_help=long ;; - -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) - ac_init_help=recursive ;; - -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) - ac_init_help=short ;; - - -host | --host | --hos | --ho) - ac_prev=host_alias ;; - -host=* | --host=* | --hos=* | --ho=*) - host_alias=$ac_optarg ;; - - -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) - ac_prev=htmldir ;; - -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ - | --ht=*) - htmldir=$ac_optarg ;; - - -includedir | --includedir | --includedi | --included | --include \ - | --includ | --inclu | --incl | --inc) - ac_prev=includedir ;; - -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ - | --includ=* | --inclu=* | --incl=* | --inc=*) - includedir=$ac_optarg ;; - - -infodir | --infodir | --infodi | --infod | --info | --inf) - ac_prev=infodir ;; - -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) - infodir=$ac_optarg ;; - - -libdir | --libdir | --libdi | --libd) - ac_prev=libdir ;; - -libdir=* | --libdir=* | --libdi=* | --libd=*) - libdir=$ac_optarg ;; - - -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ - | --libexe | --libex | --libe) - ac_prev=libexecdir ;; - -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ - | --libexe=* | --libex=* | --libe=*) - libexecdir=$ac_optarg ;; - - -localedir | --localedir | --localedi | --localed | --locale) - ac_prev=localedir ;; - -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) - localedir=$ac_optarg ;; - - -localstatedir | --localstatedir | --localstatedi | --localstated \ - | --localstate | --localstat | --localsta | --localst | --locals) - ac_prev=localstatedir ;; - -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ - | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) - localstatedir=$ac_optarg ;; - - -mandir | --mandir | --mandi | --mand | --man | --ma | --m) - ac_prev=mandir ;; - -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) - mandir=$ac_optarg ;; - - -nfp | --nfp | --nf) - # Obsolete; use --without-fp. - with_fp=no ;; - - -no-create | --no-create | --no-creat | --no-crea | --no-cre \ - | --no-cr | --no-c | -n) - no_create=yes ;; - - -no-recursion | --no-recursion | --no-recursio | --no-recursi \ - | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) - no_recursion=yes ;; - - -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ - | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ - | --oldin | --oldi | --old | --ol | --o) - ac_prev=oldincludedir ;; - -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ - | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ - | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) - oldincludedir=$ac_optarg ;; - - -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) - ac_prev=prefix ;; - -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) - prefix=$ac_optarg ;; - - -program-prefix | --program-prefix | --program-prefi | --program-pref \ - | --program-pre | --program-pr | --program-p) - ac_prev=program_prefix ;; - -program-prefix=* | --program-prefix=* | --program-prefi=* \ - | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) - program_prefix=$ac_optarg ;; - - -program-suffix | --program-suffix | --program-suffi | --program-suff \ - | --program-suf | --program-su | --program-s) - ac_prev=program_suffix ;; - -program-suffix=* | --program-suffix=* | --program-suffi=* \ - | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) - program_suffix=$ac_optarg ;; - - -program-transform-name | --program-transform-name \ - | --program-transform-nam | --program-transform-na \ - | --program-transform-n | --program-transform- \ - | --program-transform | --program-transfor \ - | --program-transfo | --program-transf \ - | --program-trans | --program-tran \ - | --progr-tra | --program-tr | --program-t) - ac_prev=program_transform_name ;; - -program-transform-name=* | --program-transform-name=* \ - | --program-transform-nam=* | --program-transform-na=* \ - | --program-transform-n=* | --program-transform-=* \ - | --program-transform=* | --program-transfor=* \ - | --program-transfo=* | --program-transf=* \ - | --program-trans=* | --program-tran=* \ - | --progr-tra=* | --program-tr=* | --program-t=*) - program_transform_name=$ac_optarg ;; - - -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) - ac_prev=pdfdir ;; - -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) - pdfdir=$ac_optarg ;; - - -psdir | --psdir | --psdi | --psd | --ps) - ac_prev=psdir ;; - -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) - psdir=$ac_optarg ;; - - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - silent=yes ;; - - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) - ac_prev=sbindir ;; - -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ - | --sbi=* | --sb=*) - sbindir=$ac_optarg ;; - - -sharedstatedir | --sharedstatedir | --sharedstatedi \ - | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ - | --sharedst | --shareds | --shared | --share | --shar \ - | --sha | --sh) - ac_prev=sharedstatedir ;; - -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ - | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ - | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ - | --sha=* | --sh=*) - sharedstatedir=$ac_optarg ;; - - -site | --site | --sit) - ac_prev=site ;; - -site=* | --site=* | --sit=*) - site=$ac_optarg ;; - - -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) - ac_prev=srcdir ;; - -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) - srcdir=$ac_optarg ;; - - -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ - | --syscon | --sysco | --sysc | --sys | --sy) - ac_prev=sysconfdir ;; - -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ - | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) - sysconfdir=$ac_optarg ;; - - -target | --target | --targe | --targ | --tar | --ta | --t) - ac_prev=target_alias ;; - -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) - target_alias=$ac_optarg ;; - - -v | -verbose | --verbose | --verbos | --verbo | --verb) - verbose=yes ;; - - -version | --version | --versio | --versi | --vers | -V) - ac_init_version=: ;; - - -with-* | --with-*) - ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=\$ac_optarg ;; - - -without-* | --without-*) - ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=no ;; - - --x) - # Obsolete; use --with-x. - with_x=yes ;; - - -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ - | --x-incl | --x-inc | --x-in | --x-i) - ac_prev=x_includes ;; - -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ - | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) - x_includes=$ac_optarg ;; - - -x-libraries | --x-libraries | --x-librarie | --x-librari \ - | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) - ac_prev=x_libraries ;; - -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ - | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) - x_libraries=$ac_optarg ;; - - -*) as_fn_error $? "unrecognized option: \`$ac_option' -Try \`$0 --help' for more information" - ;; - - *=*) - ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` - # Reject names that are not valid shell variable names. - case $ac_envvar in #( - '' | [0-9]* | *[!_$as_cr_alnum]* ) - as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; - esac - eval $ac_envvar=\$ac_optarg - export $ac_envvar ;; - - *) - # FIXME: should be removed in autoconf 3.0. - $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 - expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && - $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 - : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" - ;; - - esac -done - -if test -n "$ac_prev"; then - ac_option=--`echo $ac_prev | sed 's/_/-/g'` - as_fn_error $? "missing argument to $ac_option" -fi - -if test -n "$ac_unrecognized_opts"; then - case $enable_option_checking in - no) ;; - fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; - *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; - esac -fi - -# Check all directory arguments for consistency. -for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ - datadir sysconfdir sharedstatedir localstatedir includedir \ - oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir -do - eval ac_val=\$$ac_var - # Remove trailing slashes. - case $ac_val in - */ ) - ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` - eval $ac_var=\$ac_val;; - esac - # Be sure to have absolute directory names. - case $ac_val in - [\\/$]* | ?:[\\/]* ) continue;; - NONE | '' ) case $ac_var in *prefix ) continue;; esac;; - esac - as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" -done - -# There might be people who depend on the old broken behavior: `$host' -# used to hold the argument of --host etc. -# FIXME: To remove some day. -build=$build_alias -host=$host_alias -target=$target_alias - -# FIXME: To remove some day. -if test "x$host_alias" != x; then - if test "x$build_alias" = x; then - cross_compiling=maybe - $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host. - If a cross compiler is detected then cross compile mode will be used" >&2 - elif test "x$build_alias" != "x$host_alias"; then - cross_compiling=yes - fi -fi - -ac_tool_prefix= -test -n "$host_alias" && ac_tool_prefix=$host_alias- - -test "$silent" = yes && exec 6>/dev/null - - -ac_pwd=`pwd` && test -n "$ac_pwd" && -ac_ls_di=`ls -di .` && -ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || - as_fn_error $? "working directory cannot be determined" -test "X$ac_ls_di" = "X$ac_pwd_ls_di" || - as_fn_error $? "pwd does not report name of working directory" - - -# Find the source files, if location was not specified. -if test -z "$srcdir"; then - ac_srcdir_defaulted=yes - # Try the directory containing this script, then the parent directory. - ac_confdir=`$as_dirname -- "$as_myself" || -$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_myself" : 'X\(//\)[^/]' \| \ - X"$as_myself" : 'X\(//\)$' \| \ - X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_myself" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - srcdir=$ac_confdir - if test ! -r "$srcdir/$ac_unique_file"; then - srcdir=.. - fi -else - ac_srcdir_defaulted=no -fi -if test ! -r "$srcdir/$ac_unique_file"; then - test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." - as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" -fi -ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" -ac_abs_confdir=`( - cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" - pwd)` -# When building in place, set srcdir=. -if test "$ac_abs_confdir" = "$ac_pwd"; then - srcdir=. -fi -# Remove unnecessary trailing slashes from srcdir. -# Double slashes in file names in object file debugging info -# mess up M-x gdb in Emacs. -case $srcdir in -*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; -esac -for ac_var in $ac_precious_vars; do - eval ac_env_${ac_var}_set=\${${ac_var}+set} - eval ac_env_${ac_var}_value=\$${ac_var} - eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} - eval ac_cv_env_${ac_var}_value=\$${ac_var} -done - -# -# Report the --help message. -# -if test "$ac_init_help" = "long"; then - # Omit some internal or obsolete options to make the list less imposing. - # This message is too long to be a string in the A/UX 3.1 sh. - cat <<_ACEOF -\`configure' configures UDUNITS 2.2.17 to adapt to many kinds of systems. - -Usage: $0 [OPTION]... [VAR=VALUE]... - -To assign environment variables (e.g., CC, CFLAGS...), specify them as -VAR=VALUE. See below for descriptions of some of the useful variables. - -Defaults for the options are specified in brackets. - -Configuration: - -h, --help display this help and exit - --help=short display options specific to this package - --help=recursive display the short help of all the included packages - -V, --version display version information and exit - -q, --quiet, --silent do not print \`checking ...' messages - --cache-file=FILE cache test results in FILE [disabled] - -C, --config-cache alias for \`--cache-file=config.cache' - -n, --no-create do not create output files - --srcdir=DIR find the sources in DIR [configure dir or \`..'] - -Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX - [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX - [PREFIX] - -By default, \`make install' will install all the files in -\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -an installation prefix other than \`$ac_default_prefix' using \`--prefix', -for instance \`--prefix=\$HOME'. - -For better control, use the options below. - -Fine tuning of the installation directories: - --bindir=DIR user executables [EPREFIX/bin] - --sbindir=DIR system admin executables [EPREFIX/sbin] - --libexecdir=DIR program executables [EPREFIX/libexec] - --sysconfdir=DIR read-only single-machine data [PREFIX/etc] - --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] - --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --libdir=DIR object code libraries [EPREFIX/lib] - --includedir=DIR C header files [PREFIX/include] - --oldincludedir=DIR C header files for non-gcc [/usr/include] - --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] - --datadir=DIR read-only architecture-independent data [DATAROOTDIR] - --infodir=DIR info documentation [DATAROOTDIR/info] - --localedir=DIR locale-dependent data [DATAROOTDIR/locale] - --mandir=DIR man documentation [DATAROOTDIR/man] - --docdir=DIR documentation root [DATAROOTDIR/doc/udunits] - --htmldir=DIR html documentation [DOCDIR] - --dvidir=DIR dvi documentation [DOCDIR] - --pdfdir=DIR pdf documentation [DOCDIR] - --psdir=DIR ps documentation [DOCDIR] -_ACEOF - - cat <<\_ACEOF - -Program names: - --program-prefix=PREFIX prepend PREFIX to installed program names - --program-suffix=SUFFIX append SUFFIX to installed program names - --program-transform-name=PROGRAM run sed PROGRAM on installed program names - -System types: - --build=BUILD configure for building on BUILD [guessed] - --host=HOST cross-compile to build programs to run on HOST [BUILD] -_ACEOF -fi - -if test -n "$ac_init_help"; then - case $ac_init_help in - short | recursive ) echo "Configuration of UDUNITS 2.2.17:";; - esac - cat <<\_ACEOF - -Optional Features: - --disable-option-checking ignore unrecognized --enable/--with options - --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) - --enable-FEATURE[=ARG] include FEATURE [ARG=yes] - --enable-coverage Turn on code-coverage support - --enable-debug Turn on debugging support - --disable-udunits-1 Turn off support for the UDUNITS-1 API - [default=enabled] - --disable-dependency-tracking speeds up one-time build - --enable-dependency-tracking do not reject slow dependency extractors - --enable-shared[=PKGS] build shared libraries [default=yes] - --enable-static[=PKGS] build static libraries [default=yes] - --enable-fast-install[=PKGS] - optimize for fast installation [default=yes] - --disable-libtool-lock avoid locking (might break parallel builds) - -Optional Packages: - --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) - --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] - --with-sysroot=DIR Search for dependent libraries within DIR - (or the compiler's sysroot if not specified). - -Some influential environment variables: - CC C compiler command - CFLAGS C compiler flags - LDFLAGS linker flags, e.g. -L if you have libraries in a - nonstandard directory - LIBS libraries to pass to the linker, e.g. -l - CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if - you have headers in a nonstandard directory - CPP C preprocessor - YACC The `Yet Another Compiler Compiler' implementation to use. - Defaults to the first program found out of: `bison -y', `byacc', - `yacc'. - YFLAGS The list of arguments that will be passed by default to $YACC. - This script will default YFLAGS to the empty string to avoid a - default value of `-d' given by some make applications. - FC Fortran compiler command - FCFLAGS Fortran compiler flags - -Use these variables to override the choices made by `configure' or to help -it to find libraries and programs with nonstandard names/locations. - -Report bugs to . -_ACEOF -ac_status=$? -fi - -if test "$ac_init_help" = "recursive"; then - # If there are subdirs, report their specific --help. - for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue - test -d "$ac_dir" || - { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || - continue - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - cd "$ac_dir" || { ac_status=$?; continue; } - # Check for guested configure. - if test -f "$ac_srcdir/configure.gnu"; then - echo && - $SHELL "$ac_srcdir/configure.gnu" --help=recursive - elif test -f "$ac_srcdir/configure"; then - echo && - $SHELL "$ac_srcdir/configure" --help=recursive - else - $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi || ac_status=$? - cd "$ac_pwd" || { ac_status=$?; break; } - done -fi - -test -n "$ac_init_help" && exit $ac_status -if $ac_init_version; then - cat <<\_ACEOF -UDUNITS configure 2.2.17 -generated by GNU Autoconf 2.68 - -Copyright (C) 2010 Free Software Foundation, Inc. -This configure script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it. -_ACEOF - exit -fi - -## ------------------------ ## -## Autoconf initialization. ## -## ------------------------ ## - -# ac_fn_c_try_compile LINENO -# -------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_compile - -# ac_fn_c_try_cpp LINENO -# ---------------------- -# Try to preprocess conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_cpp () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if { { ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } > conftest.i && { - test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || - test ! -s conftest.err - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_cpp - -# ac_fn_c_try_link LINENO -# ----------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_link () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext conftest$ac_exeext - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && { - test "$cross_compiling" = yes || - $as_test_x conftest$ac_exeext - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information - # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would - # interfere with the next link command; also delete a directory that is - # left behind by Apple's compiler. We do this before executing the actions. - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_link - -# ac_fn_fc_try_compile LINENO -# --------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_fc_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_fc_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_fc_try_compile - -# ac_fn_c_try_run LINENO -# ---------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes -# that executables *can* be run. -ac_fn_c_try_run () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then : - ac_retval=0 -else - $as_echo "$as_me: program exited with status $ac_status" >&5 - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=$ac_status -fi - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_run - -# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES -# ------------------------------------------------------- -# Tests whether HEADER exists, giving a warning if it cannot be compiled using -# the include files in INCLUDES and setting the cache variable VAR -# accordingly. -ac_fn_c_check_header_mongrel () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if eval \${$3+:} false; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -else - # Is the header compilable? -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 -$as_echo_n "checking $2 usability... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -#include <$2> -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_header_compiler=yes -else - ac_header_compiler=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 -$as_echo "$ac_header_compiler" >&6; } - -# Is the header present? -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 -$as_echo_n "checking $2 presence... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include <$2> -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - ac_header_preproc=yes -else - ac_header_preproc=no -fi -rm -f conftest.err conftest.i conftest.$ac_ext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 -$as_echo "$ac_header_preproc" >&6; } - -# So? What about this header? -case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( - yes:no: ) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 -$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 -$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} - ;; - no:yes:* ) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 -$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 -$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 -$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 -$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 -$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} -( $as_echo "## ----------------------------------------------- ## -## Report this to support-udunits@unidata.ucar.edu ## -## ----------------------------------------------- ##" - ) | sed "s/^/$as_me: WARNING: /" >&2 - ;; -esac - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - eval "$3=\$ac_header_compiler" -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_header_mongrel - -# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES -# ------------------------------------------------------- -# Tests whether HEADER exists and can be compiled using the include files in -# INCLUDES, setting the cache variable VAR accordingly. -ac_fn_c_check_header_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -#include <$2> -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_header_compile - -# ac_fn_c_check_type LINENO TYPE VAR INCLUDES -# ------------------------------------------- -# Tests whether TYPE exists after having included INCLUDES, setting cache -# variable VAR accordingly. -ac_fn_c_check_type () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - eval "$3=no" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -if (sizeof ($2)) - return 0; - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -if (sizeof (($2))) - return 0; - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -else - eval "$3=yes" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_type - -# ac_fn_c_check_func LINENO FUNC VAR -# ---------------------------------- -# Tests whether FUNC exists, setting the cache variable VAR accordingly -ac_fn_c_check_func () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -/* Define $2 to an innocuous variant, in case declares $2. - For example, HP-UX 11i declares gettimeofday. */ -#define $2 innocuous_$2 - -/* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $2 (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ - -#ifdef __STDC__ -# include -#else -# include -#endif - -#undef $2 - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char $2 (); -/* The GNU C library defines this for functions which it implements - to always fail with ENOSYS. Some functions are actually named - something starting with __ and the normal name is an alias. */ -#if defined __stub_$2 || defined __stub___$2 -choke me -#endif - -int -main () -{ -return $2 (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_func - -# ac_fn_fc_try_link LINENO -# ------------------------ -# Try to link conftest.$ac_ext, and return whether this succeeded. -ac_fn_fc_try_link () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext conftest$ac_exeext - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_fc_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && { - test "$cross_compiling" = yes || - $as_test_x conftest$ac_exeext - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information - # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would - # interfere with the next link command; also delete a directory that is - # left behind by Apple's compiler. We do this before executing the actions. - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_fc_try_link -cat >config.log <<_ACEOF -This file contains any messages produced by compilers while -running configure, to aid debugging if configure makes a mistake. - -It was created by UDUNITS $as_me 2.2.17, which was -generated by GNU Autoconf 2.68. Invocation command line was - - $ $0 $@ - -_ACEOF -exec 5>>config.log -{ -cat <<_ASUNAME -## --------- ## -## Platform. ## -## --------- ## - -hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` - -/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` -/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` -/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` -/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` - -_ASUNAME - -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - $as_echo "PATH: $as_dir" - done -IFS=$as_save_IFS - -} >&5 - -cat >&5 <<_ACEOF - - -## ----------- ## -## Core tests. ## -## ----------- ## - -_ACEOF - - -# Keep a trace of the command line. -# Strip out --no-create and --no-recursion so they do not pile up. -# Strip out --silent because we don't want to record it for future runs. -# Also quote any args containing shell meta-characters. -# Make two passes to allow for proper duplicate-argument suppression. -ac_configure_args= -ac_configure_args0= -ac_configure_args1= -ac_must_keep_next=false -for ac_pass in 1 2 -do - for ac_arg - do - case $ac_arg in - -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - continue ;; - *\'*) - ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - case $ac_pass in - 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; - 2) - as_fn_append ac_configure_args1 " '$ac_arg'" - if test $ac_must_keep_next = true; then - ac_must_keep_next=false # Got value, back to normal. - else - case $ac_arg in - *=* | --config-cache | -C | -disable-* | --disable-* \ - | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ - | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ - | -with-* | --with-* | -without-* | --without-* | --x) - case "$ac_configure_args0 " in - "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; - esac - ;; - -* ) ac_must_keep_next=true ;; - esac - fi - as_fn_append ac_configure_args " '$ac_arg'" - ;; - esac - done -done -{ ac_configure_args0=; unset ac_configure_args0;} -{ ac_configure_args1=; unset ac_configure_args1;} - -# When interrupted or exit'd, cleanup temporary files, and complete -# config.log. We remove comments because anyway the quotes in there -# would cause problems or look ugly. -# WARNING: Use '\'' to represent an apostrophe within the trap. -# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. -trap 'exit_status=$? - # Save into config.log some information that might help in debugging. - { - echo - - $as_echo "## ---------------- ## -## Cache variables. ## -## ---------------- ##" - echo - # The following way of writing the cache mishandles newlines in values, -( - for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - (set) 2>&1 | - case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - sed -n \ - "s/'\''/'\''\\\\'\'''\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" - ;; #( - *) - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) - echo - - $as_echo "## ----------------- ## -## Output variables. ## -## ----------------- ##" - echo - for ac_var in $ac_subst_vars - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - - if test -n "$ac_subst_files"; then - $as_echo "## ------------------- ## -## File substitutions. ## -## ------------------- ##" - echo - for ac_var in $ac_subst_files - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - fi - - if test -s confdefs.h; then - $as_echo "## ----------- ## -## confdefs.h. ## -## ----------- ##" - echo - cat confdefs.h - echo - fi - test "$ac_signal" != 0 && - $as_echo "$as_me: caught signal $ac_signal" - $as_echo "$as_me: exit $exit_status" - } >&5 - rm -f core *.core core.conftest.* && - rm -f -r conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status -' 0 -for ac_signal in 1 2 13 15; do - trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal -done -ac_signal=0 - -# confdefs.h avoids OS command line length limits that DEFS can exceed. -rm -f -r conftest* confdefs.h - -$as_echo "/* confdefs.h */" > confdefs.h - -# Predefined preprocessor variables. - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_NAME "$PACKAGE_NAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_TARNAME "$PACKAGE_TARNAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_VERSION "$PACKAGE_VERSION" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_STRING "$PACKAGE_STRING" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_URL "$PACKAGE_URL" -_ACEOF - - -# Let the site file select an alternate cache file if it wants to. -# Prefer an explicitly selected file to automatically selected ones. -ac_site_file1=NONE -ac_site_file2=NONE -if test -n "$CONFIG_SITE"; then - # We do not want a PATH search for config.site. - case $CONFIG_SITE in #(( - -*) ac_site_file1=./$CONFIG_SITE;; - */*) ac_site_file1=$CONFIG_SITE;; - *) ac_site_file1=./$CONFIG_SITE;; - esac -elif test "x$prefix" != xNONE; then - ac_site_file1=$prefix/share/config.site - ac_site_file2=$prefix/etc/config.site -else - ac_site_file1=$ac_default_prefix/share/config.site - ac_site_file2=$ac_default_prefix/etc/config.site -fi -for ac_site_file in "$ac_site_file1" "$ac_site_file2" -do - test "x$ac_site_file" = xNONE && continue - if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 -$as_echo "$as_me: loading site script $ac_site_file" >&6;} - sed 's/^/| /' "$ac_site_file" >&5 - . "$ac_site_file" \ - || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "failed to load site script $ac_site_file -See \`config.log' for more details" "$LINENO" 5; } - fi -done - -if test -r "$cache_file"; then - # Some versions of bash will fail to source /dev/null (special files - # actually), so we avoid doing that. DJGPP emulates it as a regular file. - if test /dev/null != "$cache_file" && test -f "$cache_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 -$as_echo "$as_me: loading cache $cache_file" >&6;} - case $cache_file in - [\\/]* | ?:[\\/]* ) . "$cache_file";; - *) . "./$cache_file";; - esac - fi -else - { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 -$as_echo "$as_me: creating cache $cache_file" >&6;} - >$cache_file -fi - -# Check that the precious variables saved in the cache have kept the same -# value. -ac_cache_corrupted=false -for ac_var in $ac_precious_vars; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val=\$ac_cv_env_${ac_var}_value - eval ac_new_val=\$ac_env_${ac_var}_value - case $ac_old_set,$ac_new_set in - set,) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,set) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then - # differences in whitespace do not lead to failure. - ac_old_val_w=`echo x $ac_old_val` - ac_new_val_w=`echo x $ac_new_val` - if test "$ac_old_val_w" != "$ac_new_val_w"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 -$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} - ac_cache_corrupted=: - else - { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 -$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} - eval $ac_var=\$ac_old_val - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 -$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 -$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} - fi;; - esac - # Pass precious variables to config.status. - if test "$ac_new_set" = set; then - case $ac_new_val in - *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; - *) ac_arg=$ac_var=$ac_new_val ;; - esac - case " $ac_configure_args " in - *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. - *) as_fn_append ac_configure_args " '$ac_arg'" ;; - esac - fi -done -if $ac_cache_corrupted; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 -$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} - as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 -fi -## -------------------- ## -## Main body of script. ## -## -------------------- ## - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - -ac_aux_dir= -for ac_dir in build-aux "$srcdir"/build-aux; do - if test -f "$ac_dir/install-sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install-sh -c" - break - elif test -f "$ac_dir/install.sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install.sh -c" - break - elif test -f "$ac_dir/shtool"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/shtool install -c" - break - fi -done -if test -z "$ac_aux_dir"; then - as_fn_error $? "cannot find install-sh, install.sh, or shtool in build-aux \"$srcdir\"/build-aux" "$LINENO" 5 -fi - -# These three variables are undocumented and unsupported, -# and are intended to be withdrawn in a future Autoconf release. -# They can cause serious problems if a builder's source tree is in a directory -# whose full name contains unusual characters. -ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. -ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. -ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. - - - -am__api_version='1.11' - -# Find a good install program. We prefer a C program (faster), -# so one script is as good as another. But avoid the broken or -# incompatible versions: -# SysV /etc/install, /usr/sbin/install -# SunOS /usr/etc/install -# IRIX /sbin/install -# AIX /bin/install -# AmigaOS /C/install, which installs bootblocks on floppy discs -# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag -# AFS /usr/afsws/bin/install, which mishandles nonexistent args -# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" -# OS/2's system install, which has a completely different semantic -# ./install, which can be erroneously created by make from ./install.sh. -# Reject install programs that cannot install multiple files. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 -$as_echo_n "checking for a BSD-compatible install... " >&6; } -if test -z "$INSTALL"; then -if ${ac_cv_path_install+:} false; then : - $as_echo_n "(cached) " >&6 -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - # Account for people who put trailing slashes in PATH elements. -case $as_dir/ in #(( - ./ | .// | /[cC]/* | \ - /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ - ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ - /usr/ucb/* ) ;; - *) - # OSF1 and SCO ODT 3.0 have their own names for install. - # Don't use installbsd from OSF since it installs stuff as root - # by default. - for ac_prog in ginstall scoinst install; do - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then - if test $ac_prog = install && - grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # AIX install. It has an incompatible calling convention. - : - elif test $ac_prog = install && - grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # program-specific install script used by HP pwplus--don't use. - : - else - rm -rf conftest.one conftest.two conftest.dir - echo one > conftest.one - echo two > conftest.two - mkdir conftest.dir - if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && - test -s conftest.one && test -s conftest.two && - test -s conftest.dir/conftest.one && - test -s conftest.dir/conftest.two - then - ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" - break 3 - fi - fi - fi - done - done - ;; -esac - - done -IFS=$as_save_IFS - -rm -rf conftest.one conftest.two conftest.dir - -fi - if test "${ac_cv_path_install+set}" = set; then - INSTALL=$ac_cv_path_install - else - # As a last resort, use the slow shell script. Don't cache a - # value for INSTALL within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - INSTALL=$ac_install_sh - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 -$as_echo "$INSTALL" >&6; } - -# Use test -z because SunOS4 sh mishandles braces in ${var-val}. -# It thinks the first close brace ends the variable substitution. -test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' - -test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' - -test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 -$as_echo_n "checking whether build environment is sane... " >&6; } -# Just in case -sleep 1 -echo timestamp > conftest.file -# Reject unsafe characters in $srcdir or the absolute working directory -# name. Accept space and tab only in the latter. -am_lf=' -' -case `pwd` in - *[\\\"\#\$\&\'\`$am_lf]*) - as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; -esac -case $srcdir in - *[\\\"\#\$\&\'\`$am_lf\ \ ]*) - as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;; -esac - -# Do `set' in a subshell so we don't clobber the current shell's -# arguments. Must try -L first in case configure is actually a -# symlink; some systems play weird games with the mod time of symlinks -# (eg FreeBSD returns the mod time of the symlink's containing -# directory). -if ( - set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` - if test "$*" = "X"; then - # -L didn't work. - set X `ls -t "$srcdir/configure" conftest.file` - fi - rm -f conftest.file - if test "$*" != "X $srcdir/configure conftest.file" \ - && test "$*" != "X conftest.file $srcdir/configure"; then - - # If neither matched, then we have a broken ls. This can happen - # if, for instance, CONFIG_SHELL is bash and it inherits a - # broken ls alias from the environment. This has actually - # happened. Such a system could not be considered "sane". - as_fn_error $? "ls -t appears to fail. Make sure there is not a broken -alias in your environment" "$LINENO" 5 - fi - - test "$2" = conftest.file - ) -then - # Ok. - : -else - as_fn_error $? "newly created file is older than distributed files! -Check your system clock" "$LINENO" 5 -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -test "$program_prefix" != NONE && - program_transform_name="s&^&$program_prefix&;$program_transform_name" -# Use a double $ so make ignores it. -test "$program_suffix" != NONE && - program_transform_name="s&\$&$program_suffix&;$program_transform_name" -# Double any \ or $. -# By default was `s,x,x', remove it if useless. -ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' -program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` - -# expand $ac_aux_dir to an absolute path -am_aux_dir=`cd $ac_aux_dir && pwd` - -if test x"${MISSING+set}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; - *) - MISSING="\${SHELL} $am_aux_dir/missing" ;; - esac -fi -# Use eval to expand $SHELL -if eval "$MISSING --run true"; then - am_missing_run="$MISSING --run " -else - am_missing_run= - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5 -$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} -fi - -if test x"${install_sh}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; - *) - install_sh="\${SHELL} $am_aux_dir/install-sh" - esac -fi - -# Installed binaries are usually stripped using `strip' when the user -# run `make install-strip'. However `strip' might not be the right -# tool to use in cross-compilation environments, therefore Automake -# will honor the `STRIP' environment variable to overrule this program. -if test "$cross_compiling" != no; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. -set dummy ${ac_tool_prefix}strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$STRIP"; then - ac_cv_prog_STRIP="$STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_STRIP="${ac_tool_prefix}strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -STRIP=$ac_cv_prog_STRIP -if test -n "$STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 -$as_echo "$STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_STRIP"; then - ac_ct_STRIP=$STRIP - # Extract the first word of "strip", so it can be a program name with args. -set dummy strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_STRIP"; then - ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_STRIP="strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP -if test -n "$ac_ct_STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 -$as_echo "$ac_ct_STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_STRIP" = x; then - STRIP=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - STRIP=$ac_ct_STRIP - fi -else - STRIP="$ac_cv_prog_STRIP" -fi - -fi -INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 -$as_echo_n "checking for a thread-safe mkdir -p... " >&6; } -if test -z "$MKDIR_P"; then - if ${ac_cv_path_mkdir+:} false; then : - $as_echo_n "(cached) " >&6 -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in mkdir gmkdir; do - for ac_exec_ext in '' $ac_executable_extensions; do - { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue - case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( - 'mkdir (GNU coreutils) '* | \ - 'mkdir (coreutils) '* | \ - 'mkdir (fileutils) '4.1*) - ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext - break 3;; - esac - done - done - done -IFS=$as_save_IFS - -fi - - test -d ./--version && rmdir ./--version - if test "${ac_cv_path_mkdir+set}" = set; then - MKDIR_P="$ac_cv_path_mkdir -p" - else - # As a last resort, use the slow shell script. Don't cache a - # value for MKDIR_P within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - MKDIR_P="$ac_install_sh -d" - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 -$as_echo "$MKDIR_P" >&6; } - -mkdir_p="$MKDIR_P" -case $mkdir_p in - [\\/$]* | ?:[\\/]*) ;; - */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; -esac - -for ac_prog in gawk mawk nawk awk -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_AWK+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$AWK"; then - ac_cv_prog_AWK="$AWK" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_AWK="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -AWK=$ac_cv_prog_AWK -if test -n "$AWK"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 -$as_echo "$AWK" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$AWK" && break -done - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 -$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } -set x ${MAKE-make} -ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` -if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat >conftest.make <<\_ACEOF -SHELL = /bin/sh -all: - @echo '@@@%%%=$(MAKE)=@@@%%%' -_ACEOF -# GNU make sometimes prints "make[1]: Entering ...", which would confuse us. -case `${MAKE-make} -f conftest.make 2>/dev/null` in - *@@@%%%=?*=@@@%%%*) - eval ac_cv_prog_make_${ac_make}_set=yes;; - *) - eval ac_cv_prog_make_${ac_make}_set=no;; -esac -rm -f conftest.make -fi -if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - SET_MAKE= -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - SET_MAKE="MAKE=${MAKE-make}" -fi - -rm -rf .tst 2>/dev/null -mkdir .tst 2>/dev/null -if test -d .tst; then - am__leading_dot=. -else - am__leading_dot=_ -fi -rmdir .tst 2>/dev/null - -if test "`cd $srcdir && pwd`" != "`pwd`"; then - # Use -I$(srcdir) only when $(srcdir) != ., so that make's output - # is not polluted with repeated "-I." - am__isrc=' -I$(srcdir)' - # test to see if srcdir already configured - if test -f $srcdir/config.status; then - as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 - fi -fi - -# test whether we have cygpath -if test -z "$CYGPATH_W"; then - if (cygpath --version) >/dev/null 2>/dev/null; then - CYGPATH_W='cygpath -w' - else - CYGPATH_W=echo - fi -fi - - -# Define the identity of the package. - PACKAGE='udunits' - VERSION='2.2.17' - - -cat >>confdefs.h <<_ACEOF -#define PACKAGE "$PACKAGE" -_ACEOF - - -cat >>confdefs.h <<_ACEOF -#define VERSION "$VERSION" -_ACEOF - -# Some tools Automake needs. - -ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} - - -AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} - - -AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} - - -AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} - - -MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} - -# We need awk for the "check" target. The system "awk" is bad on -# some platforms. -# Always define AMTAR for backward compatibility. - -AMTAR=${AMTAR-"${am_missing_run}tar"} - -am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -' - - - - - -ac_config_headers="$ac_config_headers config.h" - - -CFLAGS_COVERAGE='' -LIBS_COVERAGE='' - -# Check whether --enable-coverage was given. -if test "${enable_coverage+set}" = set; then : - enableval=$enable_coverage; case "${enableval}" in - yes) CFLAGS_COVERAGE='--coverage' - LIBS_COVERAGE=-lgcov - coverage_enabled=true;; - no) ;; - *) as_fn_error $? "bad value ${enableval} for --enable-coverage" "$LINENO" 5 ;; -esac -fi - - - - -# Check whether --enable-debug was given. -if test "${enable_debug+set}" = set; then : - enableval=$enable_debug; case "${enableval}" in - yes) - CFLAGS="-g${CFLAGS:+ $CFLAGS}" - debug=true ;; - no) - CFLAGS="-O${CFLAGS:+ $CFLAGS}" - debug=false ;; - *) as_fn_error $? "bad value ${enableval} for --enable-debug" "$LINENO" 5 ;; -esac -else - if test "$coverage_enabled" = true; then - CFLAGS="-g${CFLAGS:+ $CFLAGS}" - debug=true -else - debug=false -fi - -fi - - if test x$debug = xtrue; then - DEBUG_TRUE= - DEBUG_FALSE='#' -else - DEBUG_TRUE='#' - DEBUG_FALSE= -fi - - - if true; then - ENABLE_UDUNITS_1_TRUE= - ENABLE_UDUNITS_1_FALSE='#' -else - ENABLE_UDUNITS_1_TRUE='#' - ENABLE_UDUNITS_1_FALSE= -fi - -# Check whether --enable-udunits-1 was given. -if test "${enable_udunits_1+set}" = set; then : - enableval=$enable_udunits_1; case "${enableval}" in - no) if false; then - ENABLE_UDUNITS_1_TRUE= - ENABLE_UDUNITS_1_FALSE='#' -else - ENABLE_UDUNITS_1_TRUE='#' - ENABLE_UDUNITS_1_FALSE= -fi - ;; - yes) ;; - *) as_fn_error $? "bad value ${enableval} for --enable-udunits-1" "$LINENO" 5 ;; - esac -fi - - -# Ensure that compilation is optimized and with assertions disabled by default. -CFLAGS=${CFLAGS:--O} -CPPFLAGS=${CPPFLAGS:--DNDEBUG} - -# The default absolute pathname of the installed units database. "pkgdatadir" -# isn't a configure-variable in the normal sense: it doesn't appear in -# "config.status" yet appears in "Makefile"; consequently, the following -# nonsense just to avoid defining the pathname in the makefile so that Eclipse -# is happy. -pkgdatadir=$(eval echo $(eval echo `echo ${datadir}`/${PACKAGE})) - -cat >>confdefs.h <<_ACEOF -#define DEFAULT_UDUNITS2_XML_PATH "${pkgdatadir}/udunits2.xml" -_ACEOF - - -# Checks for programs. -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. -set dummy ${ac_tool_prefix}gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_CC="${ac_tool_prefix}gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_CC"; then - ac_ct_CC=$CC - # Extract the first word of "gcc", so it can be a program name with args. -set dummy gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_CC="gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -else - CC="$ac_cv_prog_CC" -fi - -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. -set dummy ${ac_tool_prefix}cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_CC="${ac_tool_prefix}cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - fi -fi -if test -z "$CC"; then - # Extract the first word of "cc", so it can be a program name with args. -set dummy cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else - ac_prog_rejected=no -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then - ac_prog_rejected=yes - continue - fi - ac_cv_prog_CC="cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -if test $ac_prog_rejected = yes; then - # We found a bogon in the path, so make sure we never use it. - set dummy $ac_cv_prog_CC - shift - if test $# != 0; then - # We chose a different compiler from the bogus one. - # However, it has the same basename, so the bogon will be chosen - # first if we set CC to just the basename; use the full file name. - shift - ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" - fi -fi -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - for ac_prog in cl.exe - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_CC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$CC" && break - done -fi -if test -z "$CC"; then - ac_ct_CC=$CC - for ac_prog in cl.exe -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_CC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_CC" && break -done - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -fi - -fi - - -test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "no acceptable C compiler found in \$PATH -See \`config.log' for more details" "$LINENO" 5; } - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done - -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" -# Try to create an executable without -o first, disregard a.out. -# It will help us diagnose broken compilers, and finding out an intuition -# of exeext. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 -$as_echo_n "checking whether the C compiler works... " >&6; } -ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` - -# The possible output files: -ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" - -ac_rmfiles= -for ac_file in $ac_files -do - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - * ) ac_rmfiles="$ac_rmfiles $ac_file";; - esac -done -rm -f $ac_rmfiles - -if { { ac_try="$ac_link_default" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link_default") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. -# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' -# in a Makefile. We should not override ac_cv_exeext if it was cached, -# so that the user can short-circuit this test for compilers unknown to -# Autoconf. -for ac_file in $ac_files '' -do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) - ;; - [ab].out ) - # We found the default executable, but exeext='' is most - # certainly right. - break;; - *.* ) - if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; - then :; else - ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - fi - # We set ac_cv_exeext here because the later test for it is not - # safe: cross compilers may not add the suffix if given an `-o' - # argument, so we may need to know it at that point already. - # Even if this section looks crufty: it has the advantage of - # actually working. - break;; - * ) - break;; - esac -done -test "$ac_cv_exeext" = no && ac_cv_exeext= - -else - ac_file='' -fi -if test -z "$ac_file"; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -$as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error 77 "C compiler cannot create executables -See \`config.log' for more details" "$LINENO" 5; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 -$as_echo_n "checking for C compiler default output file name... " >&6; } -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 -$as_echo "$ac_file" >&6; } -ac_exeext=$ac_cv_exeext - -rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 -$as_echo_n "checking for suffix of executables... " >&6; } -if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # If both `conftest.exe' and `conftest' are `present' (well, observable) -# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will -# work properly (i.e., refer to `conftest.exe'), while it won't with -# `rm'. -for ac_file in conftest.exe conftest conftest.*; do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - break;; - * ) break;; - esac -done -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of executables: cannot compile and link -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest conftest$ac_cv_exeext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 -$as_echo "$ac_cv_exeext" >&6; } - -rm -f conftest.$ac_ext -EXEEXT=$ac_cv_exeext -ac_exeext=$EXEEXT -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -int -main () -{ -FILE *f = fopen ("conftest.out", "w"); - return ferror (f) || fclose (f) != 0; - - ; - return 0; -} -_ACEOF -ac_clean_files="$ac_clean_files conftest.out" -# Check that the compiler produces executables we can run. If not, either -# the compiler is broken, or we cross compile. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 -$as_echo_n "checking whether we are cross compiling... " >&6; } -if test "$cross_compiling" != yes; then - { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if { ac_try='./conftest$ac_cv_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - cross_compiling=no - else - if test "$cross_compiling" = maybe; then - cross_compiling=yes - else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot run C compiled programs. -If you meant to cross compile, use \`--host'. -See \`config.log' for more details" "$LINENO" 5; } - fi - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 -$as_echo "$cross_compiling" >&6; } - -rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 -$as_echo_n "checking for suffix of object files... " >&6; } -if ${ac_cv_objext+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.o conftest.obj -if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - for ac_file in conftest.o conftest.obj conftest.*; do - test -f "$ac_file" || continue; - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; - *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` - break;; - esac -done -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of object files: cannot compile -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest.$ac_cv_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 -$as_echo "$ac_cv_objext" >&6; } -OBJEXT=$ac_cv_objext -ac_objext=$OBJEXT -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 -$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } -if ${ac_cv_c_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -#ifndef __GNUC__ - choke me -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_c_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 -$as_echo "$ac_cv_c_compiler_gnu" >&6; } -if test $ac_compiler_gnu = yes; then - GCC=yes -else - GCC= -fi -ac_test_CFLAGS=${CFLAGS+set} -ac_save_CFLAGS=$CFLAGS -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 -$as_echo_n "checking whether $CC accepts -g... " >&6; } -if ${ac_cv_prog_cc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_save_c_werror_flag=$ac_c_werror_flag - ac_c_werror_flag=yes - ac_cv_prog_cc_g=no - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -else - CFLAGS="" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -else - ac_c_werror_flag=$ac_save_c_werror_flag - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - ac_c_werror_flag=$ac_save_c_werror_flag -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 -$as_echo "$ac_cv_prog_cc_g" >&6; } -if test "$ac_test_CFLAGS" = set; then - CFLAGS=$ac_save_CFLAGS -elif test $ac_cv_prog_cc_g = yes; then - if test "$GCC" = yes; then - CFLAGS="-g -O2" - else - CFLAGS="-g" - fi -else - if test "$GCC" = yes; then - CFLAGS="-O2" - else - CFLAGS= - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 -$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } -if ${ac_cv_prog_cc_c89+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_cv_prog_cc_c89=no -ac_save_CC=$CC -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#include -#include -/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ -struct buf { int x; }; -FILE * (*rcsopen) (struct buf *, struct stat *, int); -static char *e (p, i) - char **p; - int i; -{ - return p[i]; -} -static char *f (char * (*g) (char **, int), char **p, ...) -{ - char *s; - va_list v; - va_start (v,p); - s = g (p, va_arg (v,int)); - va_end (v); - return s; -} - -/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has - function prototypes and stuff, but not '\xHH' hex character constants. - These don't provoke an error unfortunately, instead are silently treated - as 'x'. The following induces an error, until -std is added to get - proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an - array size at least. It's necessary to write '\x00'==0 to get something - that's true only with -std. */ -int osf4_cc_array ['\x00' == 0 ? 1 : -1]; - -/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters - inside strings and character constants. */ -#define FOO(x) 'x' -int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; - -int test (int i, double x); -struct s1 {int (*f) (int a);}; -struct s2 {int (*f) (double a);}; -int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); -int argc; -char **argv; -int -main () -{ -return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; - ; - return 0; -} -_ACEOF -for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ - -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" -do - CC="$ac_save_CC $ac_arg" - if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_c89=$ac_arg -fi -rm -f core conftest.err conftest.$ac_objext - test "x$ac_cv_prog_cc_c89" != "xno" && break -done -rm -f conftest.$ac_ext -CC=$ac_save_CC - -fi -# AC_CACHE_VAL -case "x$ac_cv_prog_cc_c89" in - x) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 -$as_echo "none needed" >&6; } ;; - xno) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 -$as_echo "unsupported" >&6; } ;; - *) - CC="$CC $ac_cv_prog_cc_c89" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 -$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; -esac -if test "x$ac_cv_prog_cc_c89" != xno; then : - -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -DEPDIR="${am__leading_dot}deps" - -ac_config_commands="$ac_config_commands depfiles" - - -am_make=${MAKE-make} -cat > confinc << 'END' -am__doit: - @echo this is the am__doit target -.PHONY: am__doit -END -# If we don't find an include directive, just comment out the code. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 -$as_echo_n "checking for style of include used by $am_make... " >&6; } -am__include="#" -am__quote= -_am_result=none -# First try GNU make style include. -echo "include confinc" > confmf -# Ignore all kinds of additional output from `make'. -case `$am_make -s -f confmf 2> /dev/null` in #( -*the\ am__doit\ target*) - am__include=include - am__quote= - _am_result=GNU - ;; -esac -# Now try BSD make style include. -if test "$am__include" = "#"; then - echo '.include "confinc"' > confmf - case `$am_make -s -f confmf 2> /dev/null` in #( - *the\ am__doit\ target*) - am__include=.include - am__quote="\"" - _am_result=BSD - ;; - esac -fi - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 -$as_echo "$_am_result" >&6; } -rm -f confinc confmf - -# Check whether --enable-dependency-tracking was given. -if test "${enable_dependency_tracking+set}" = set; then : - enableval=$enable_dependency_tracking; -fi - -if test "x$enable_dependency_tracking" != xno; then - am_depcomp="$ac_aux_dir/depcomp" - AMDEPBACKSLASH='\' -fi - if test "x$enable_dependency_tracking" != xno; then - AMDEP_TRUE= - AMDEP_FALSE='#' -else - AMDEP_TRUE='#' - AMDEP_FALSE= -fi - - - -depcc="$CC" am_compiler_list= - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 -$as_echo_n "checking dependency style of $depcc... " >&6; } -if ${am_cv_CC_dependencies_compiler_type+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then - # We make a subdir and do the tests there. Otherwise we can end up - # making bogus files that we don't know about and never remove. For - # instance it was reported that on HP-UX the gcc test will end up - # making a dummy file named `D' -- because `-MD' means `put the output - # in D'. - mkdir conftest.dir - # Copy depcomp to subdir because otherwise we won't find it if we're - # using a relative directory. - cp "$am_depcomp" conftest.dir - cd conftest.dir - # We will build objects and dependencies in a subdirectory because - # it helps to detect inapplicable dependency modes. For instance - # both Tru64's cc and ICC support -MD to output dependencies as a - # side effect of compilation, but ICC will put the dependencies in - # the current directory while Tru64 will put them in the object - # directory. - mkdir sub - - am_cv_CC_dependencies_compiler_type=none - if test "$am_compiler_list" = ""; then - am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` - fi - am__universal=false - case " $depcc " in #( - *\ -arch\ *\ -arch\ *) am__universal=true ;; - esac - - for depmode in $am_compiler_list; do - # Setup a source with many dependencies, because some compilers - # like to wrap large dependency lists on column 80 (with \), and - # we should not choose a depcomp mode which is confused by this. - # - # We need to recreate these files for each test, as the compiler may - # overwrite some of them when testing with obscure command lines. - # This happens at least with the AIX C compiler. - : > sub/conftest.c - for i in 1 2 3 4 5 6; do - echo '#include "conftst'$i'.h"' >> sub/conftest.c - # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with - # Solaris 8's {/usr,}/bin/sh. - touch sub/conftst$i.h - done - echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf - - # We check with `-c' and `-o' for the sake of the "dashmstdout" - # mode. It turns out that the SunPro C++ compiler does not properly - # handle `-M -o', and we need to detect this. Also, some Intel - # versions had trouble with output in subdirs - am__obj=sub/conftest.${OBJEXT-o} - am__minus_obj="-o $am__obj" - case $depmode in - gcc) - # This depmode causes a compiler race in universal mode. - test "$am__universal" = false || continue - ;; - nosideeffect) - # after this tag, mechanisms are not by side-effect, so they'll - # only be used when explicitly requested - if test "x$enable_dependency_tracking" = xyes; then - continue - else - break - fi - ;; - msvisualcpp | msvcmsys) - # This compiler won't grok `-c -o', but also, the minuso test has - # not run yet. These depmodes are late enough in the game, and - # so weak that their functioning should not be impacted. - am__obj=conftest.${OBJEXT-o} - am__minus_obj= - ;; - none) break ;; - esac - if depmode=$depmode \ - source=sub/conftest.c object=$am__obj \ - depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ - $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ - >/dev/null 2>conftest.err && - grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && - grep $am__obj sub/conftest.Po > /dev/null 2>&1 && - ${MAKE-make} -s -f confmf > /dev/null 2>&1; then - # icc doesn't choke on unknown options, it will just issue warnings - # or remarks (even with -Werror). So we grep stderr for any message - # that says an option was ignored or not supported. - # When given -MP, icc 7.0 and 7.1 complain thusly: - # icc: Command line warning: ignoring option '-M'; no argument required - # The diagnosis changed in icc 8.0: - # icc: Command line remark: option '-MP' not supported - if (grep 'ignoring option' conftest.err || - grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else - am_cv_CC_dependencies_compiler_type=$depmode - break - fi - fi - done - - cd .. - rm -rf conftest.dir -else - am_cv_CC_dependencies_compiler_type=none -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 -$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } -CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type - - if - test "x$enable_dependency_tracking" != xno \ - && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then - am__fastdepCC_TRUE= - am__fastdepCC_FALSE='#' -else - am__fastdepCC_TRUE='#' - am__fastdepCC_FALSE= -fi - - -if test "x$CC" != xcc; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5 -$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5 -$as_echo_n "checking whether cc understands -c and -o together... " >&6; } -fi -set dummy $CC; ac_cc=`$as_echo "$2" | - sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -# Make sure it works both with $CC and with simple cc. -# We do the test twice because some compilers refuse to overwrite an -# existing .o file with -o, though they will create one. -ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5' -rm -f conftest2.* -if { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && - test -f conftest2.$ac_objext && { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; -then - eval ac_cv_prog_cc_${ac_cc}_c_o=yes - if test "x$CC" != xcc; then - # Test first that cc exists at all. - if { ac_try='cc -c conftest.$ac_ext >&5' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5' - rm -f conftest2.* - if { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && - test -f conftest2.$ac_objext && { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; - then - # cc works too. - : - else - # cc exists but doesn't like -o. - eval ac_cv_prog_cc_${ac_cc}_c_o=no - fi - fi - fi -else - eval ac_cv_prog_cc_${ac_cc}_c_o=no -fi -rm -f core conftest* - -fi -if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - -$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h - -fi - -# FIXME: we rely on the cache variable name because -# there is no other way. -set dummy $CC -am_cc=`echo $2 | sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -if test "$am_t" != yes; then - # Losing compiler, so override with the script. - # FIXME: It is wrong to rewrite CC. - # But if we don't then we get into trouble of one sort or another. - # A longer-term fix would be to have automake use am__CC in this case, - # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" - CC="$am_aux_dir/compile $CC" -fi - - -#if test "$ac_cv_prog_cc_${ac_cc}_c_o" = yes; then -# case "$AM_CFLAGS" in -# "-g") ;; -# *) AM_CFLAGS="${AM_CFLAGS:+$AM_CFLAGS }-g";; -# esac -#fi -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 -$as_echo_n "checking how to run the C preprocessor... " >&6; } -# On Suns, sometimes $CPP names a directory. -if test -n "$CPP" && test -d "$CPP"; then - CPP= -fi -if test -z "$CPP"; then - if ${ac_cv_prog_CPP+:} false; then : - $as_echo_n "(cached) " >&6 -else - # Double quotes because CPP needs to be expanded - for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" - do - ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - -else - # Broken: fails on valid input. -continue -fi -rm -f conftest.err conftest.i conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - # Broken: success on invalid input. -continue -else - # Passes both tests. -ac_preproc_ok=: -break -fi -rm -f conftest.err conftest.i conftest.$ac_ext - -done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.i conftest.err conftest.$ac_ext -if $ac_preproc_ok; then : - break -fi - - done - ac_cv_prog_CPP=$CPP - -fi - CPP=$ac_cv_prog_CPP -else - ac_cv_prog_CPP=$CPP -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 -$as_echo "$CPP" >&6; } -ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - -else - # Broken: fails on valid input. -continue -fi -rm -f conftest.err conftest.i conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - # Broken: success on invalid input. -continue -else - # Passes both tests. -ac_preproc_ok=: -break -fi -rm -f conftest.err conftest.i conftest.$ac_ext - -done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.i conftest.err conftest.$ac_ext -if $ac_preproc_ok; then : - -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "C preprocessor \"$CPP\" fails sanity check -See \`config.log' for more details" "$LINENO" 5; } -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 -$as_echo_n "checking whether ln -s works... " >&6; } -LN_S=$as_ln_s -if test "$LN_S" = "ln -s"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 -$as_echo "no, using $LN_S" >&6; } -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 -$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } -set x ${MAKE-make} -ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` -if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat >conftest.make <<\_ACEOF -SHELL = /bin/sh -all: - @echo '@@@%%%=$(MAKE)=@@@%%%' -_ACEOF -# GNU make sometimes prints "make[1]: Entering ...", which would confuse us. -case `${MAKE-make} -f conftest.make 2>/dev/null` in - *@@@%%%=?*=@@@%%%*) - eval ac_cv_prog_make_${ac_make}_set=yes;; - *) - eval ac_cv_prog_make_${ac_make}_set=no;; -esac -rm -f conftest.make -fi -if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - SET_MAKE= -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - SET_MAKE="MAKE=${MAKE-make}" -fi - -for ac_prog in 'bison -y' byacc -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_YACC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$YACC"; then - ac_cv_prog_YACC="$YACC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_YACC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -YACC=$ac_cv_prog_YACC -if test -n "$YACC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $YACC" >&5 -$as_echo "$YACC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$YACC" && break -done -test -n "$YACC" || YACC="yacc" - - -for ac_prog in flex lex -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_LEX+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$LEX"; then - ac_cv_prog_LEX="$LEX" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_LEX="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -LEX=$ac_cv_prog_LEX -if test -n "$LEX"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LEX" >&5 -$as_echo "$LEX" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$LEX" && break -done -test -n "$LEX" || LEX=":" - -if test "x$LEX" != "x:"; then - cat >conftest.l <<_ACEOF -%% -a { ECHO; } -b { REJECT; } -c { yymore (); } -d { yyless (1); } -e { yyless (input () != 0); } -f { unput (yytext[0]); } -. { BEGIN INITIAL; } -%% -#ifdef YYTEXT_POINTER -extern char *yytext; -#endif -int -main (void) -{ - return ! yylex () + ! yywrap (); -} -_ACEOF -{ { ac_try="$LEX conftest.l" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$LEX conftest.l") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking lex output file root" >&5 -$as_echo_n "checking lex output file root... " >&6; } -if ${ac_cv_prog_lex_root+:} false; then : - $as_echo_n "(cached) " >&6 -else - -# UVCDAT patch (DL) This fails on some apple Travis CI builds, and isn't used by this package. -# Just disable the file test. -# This: - $as_echo_n "(skipped) " >&6 -# Replaces this: -#if test -f lex.yy.c; then -# ac_cv_prog_lex_root=lex.yy -#elif test -f lexyy.c; then -# ac_cv_prog_lex_root=lexyy -#else -# as_fn_error $? "cannot find output from $LEX; giving up" "$LINENO" 5 -#fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_lex_root" >&5 -$as_echo "$ac_cv_prog_lex_root" >&6; } -LEX_OUTPUT_ROOT=$ac_cv_prog_lex_root - -if test -z "${LEXLIB+set}"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking lex library" >&5 -$as_echo_n "checking lex library... " >&6; } -if ${ac_cv_lib_lex+:} false; then : - $as_echo_n "(cached) " >&6 -else - - ac_save_LIBS=$LIBS - ac_cv_lib_lex='none needed' - for ac_lib in '' -lfl -ll; do - LIBS="$ac_lib $ac_save_LIBS" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -`cat $LEX_OUTPUT_ROOT.c` -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_lex=$ac_lib -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - test "$ac_cv_lib_lex" != 'none needed' && break - done - LIBS=$ac_save_LIBS - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lex" >&5 -$as_echo "$ac_cv_lib_lex" >&6; } - test "$ac_cv_lib_lex" != 'none needed' && LEXLIB=$ac_cv_lib_lex -fi - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether yytext is a pointer" >&5 -$as_echo_n "checking whether yytext is a pointer... " >&6; } -if ${ac_cv_prog_lex_yytext_pointer+:} false; then : - $as_echo_n "(cached) " >&6 -else - # POSIX says lex can declare yytext either as a pointer or an array; the -# default is implementation-dependent. Figure out which it is, since -# not all implementations provide the %pointer and %array declarations. -ac_cv_prog_lex_yytext_pointer=no -ac_save_LIBS=$LIBS -LIBS="$LEXLIB $ac_save_LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - - #define YYTEXT_POINTER 1 -`cat $LEX_OUTPUT_ROOT.c` -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_prog_lex_yytext_pointer=yes -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_save_LIBS - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_lex_yytext_pointer" >&5 -$as_echo "$ac_cv_prog_lex_yytext_pointer" >&6; } -if test $ac_cv_prog_lex_yytext_pointer = yes; then - -$as_echo "#define YYTEXT_POINTER 1" >>confdefs.h - -fi -rm -f conftest.l $LEX_OUTPUT_ROOT.c - -fi -if test "$LEX" = :; then - LEX=${am_missing_run}flex -fi -ac_ext=${ac_fc_srcext-f} -ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' -ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_fc_compiler_gnu -if test -n "$ac_tool_prefix"; then - for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77 - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$FC"; then - ac_cv_prog_FC="$FC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_FC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -FC=$ac_cv_prog_FC -if test -n "$FC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $FC" >&5 -$as_echo "$FC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$FC" && break - done -fi -if test -z "$FC"; then - ac_ct_FC=$FC - for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77 -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_FC"; then - ac_cv_prog_ac_ct_FC="$ac_ct_FC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_FC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_FC=$ac_cv_prog_ac_ct_FC -if test -n "$ac_ct_FC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_FC" >&5 -$as_echo "$ac_ct_FC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_FC" && break -done - - if test "x$ac_ct_FC" = x; then - FC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - FC=$ac_ct_FC - fi -fi - - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for Fortran compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done -rm -f a.out - -# If we don't use `.F' as extension, the preprocessor is not run on the -# input file. (Note that this only needs to work for GNU compilers.) -ac_save_ext=$ac_ext -ac_ext=F -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Fortran compiler" >&5 -$as_echo_n "checking whether we are using the GNU Fortran compiler... " >&6; } -if ${ac_cv_fc_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat > conftest.$ac_ext <<_ACEOF - program main -#ifndef __GNUC__ - choke me -#endif - - end -_ACEOF -if ac_fn_fc_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_fc_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_fc_compiler_gnu" >&5 -$as_echo "$ac_cv_fc_compiler_gnu" >&6; } -ac_ext=$ac_save_ext -ac_test_FCFLAGS=${FCFLAGS+set} -ac_save_FCFLAGS=$FCFLAGS -FCFLAGS= -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $FC accepts -g" >&5 -$as_echo_n "checking whether $FC accepts -g... " >&6; } -if ${ac_cv_prog_fc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - FCFLAGS=-g -cat > conftest.$ac_ext <<_ACEOF - program main - - end -_ACEOF -if ac_fn_fc_try_compile "$LINENO"; then : - ac_cv_prog_fc_g=yes -else - ac_cv_prog_fc_g=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_fc_g" >&5 -$as_echo "$ac_cv_prog_fc_g" >&6; } -if test "$ac_test_FCFLAGS" = set; then - FCFLAGS=$ac_save_FCFLAGS -elif test $ac_cv_prog_fc_g = yes; then - if test "x$ac_cv_fc_compiler_gnu" = xyes; then - FCFLAGS="-g -O2" - else - FCFLAGS="-g" - fi -else - if test "x$ac_cv_fc_compiler_gnu" = xyes; then - FCFLAGS="-O2" - else - FCFLAGS= - fi -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -# Checks for libraries. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing dirname" >&5 -$as_echo_n "checking for library containing dirname... " >&6; } -if ${ac_cv_search_dirname+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_func_search_save_LIBS=$LIBS -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dirname (); -int -main () -{ -return dirname (); - ; - return 0; -} -_ACEOF -for ac_lib in '' gen; do - if test -z "$ac_lib"; then - ac_res="none required" - else - ac_res=-l$ac_lib - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - fi - if ac_fn_c_try_link "$LINENO"; then : - ac_cv_search_dirname=$ac_res -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext - if ${ac_cv_search_dirname+:} false; then : - break -fi -done -if ${ac_cv_search_dirname+:} false; then : - -else - ac_cv_search_dirname=no -fi -rm conftest.$ac_ext -LIBS=$ac_func_search_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_dirname" >&5 -$as_echo "$ac_cv_search_dirname" >&6; } -ac_res=$ac_cv_search_dirname -if test "$ac_res" != no; then : - test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" - -else - as_fn_error $? "cannot find function dirname" "$LINENO" 5 -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing log10" >&5 -$as_echo_n "checking for library containing log10... " >&6; } -if ${ac_cv_search_log10+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_func_search_save_LIBS=$LIBS -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char log10 (); -int -main () -{ -return log10 (); - ; - return 0; -} -_ACEOF -for ac_lib in '' m; do - if test -z "$ac_lib"; then - ac_res="none required" - else - ac_res=-l$ac_lib - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - fi - if ac_fn_c_try_link "$LINENO"; then : - ac_cv_search_log10=$ac_res -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext - if ${ac_cv_search_log10+:} false; then : - break -fi -done -if ${ac_cv_search_log10+:} false; then : - -else - ac_cv_search_log10=no -fi -rm conftest.$ac_ext -LIBS=$ac_func_search_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_log10" >&5 -$as_echo "$ac_cv_search_log10" >&6; } -ac_res=$ac_cv_search_log10 -if test "$ac_res" != no; then : - test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" - -else - as_fn_error $? "cannot find function log10" "$LINENO" 5 -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing XML_StopParser" >&5 -$as_echo_n "checking for library containing XML_StopParser... " >&6; } -if ${ac_cv_search_XML_StopParser+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_func_search_save_LIBS=$LIBS -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char XML_StopParser (); -int -main () -{ -return XML_StopParser (); - ; - return 0; -} -_ACEOF -for ac_lib in '' expat; do - if test -z "$ac_lib"; then - ac_res="none required" - else - ac_res=-l$ac_lib - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - fi - if ac_fn_c_try_link "$LINENO"; then : - ac_cv_search_XML_StopParser=$ac_res -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext - if ${ac_cv_search_XML_StopParser+:} false; then : - break -fi -done -if ${ac_cv_search_XML_StopParser+:} false; then : - -else - ac_cv_search_XML_StopParser=no -fi -rm conftest.$ac_ext -LIBS=$ac_func_search_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_XML_StopParser" >&5 -$as_echo "$ac_cv_search_XML_StopParser" >&6; } -ac_res=$ac_cv_search_XML_StopParser -if test "$ac_res" != no; then : - test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" - -else - as_fn_error $? "cannot find EXPAT function XML_StopParser" "$LINENO" 5 -fi - - -# Checks for header files. - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 -$as_echo_n "checking for grep that handles long lines and -e... " >&6; } -if ${ac_cv_path_GREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$GREP"; then - ac_path_GREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in grep ggrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue -# Check for GNU ac_path_GREP and select it if it is found. - # Check for GNU $ac_path_GREP -case `"$ac_path_GREP" --version 2>&1` in -*GNU*) - ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'GREP' >> "conftest.nl" - "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_GREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_GREP="$ac_path_GREP" - ac_path_GREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_GREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_GREP"; then - as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_GREP=$GREP -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 -$as_echo "$ac_cv_path_GREP" >&6; } - GREP="$ac_cv_path_GREP" - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 -$as_echo_n "checking for egrep... " >&6; } -if ${ac_cv_path_EGREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 - then ac_cv_path_EGREP="$GREP -E" - else - if test -z "$EGREP"; then - ac_path_EGREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in egrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue -# Check for GNU ac_path_EGREP and select it if it is found. - # Check for GNU $ac_path_EGREP -case `"$ac_path_EGREP" --version 2>&1` in -*GNU*) - ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'EGREP' >> "conftest.nl" - "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_EGREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_EGREP="$ac_path_EGREP" - ac_path_EGREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_EGREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_EGREP"; then - as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_EGREP=$EGREP -fi - - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 -$as_echo "$ac_cv_path_EGREP" >&6; } - EGREP="$ac_cv_path_EGREP" - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 -$as_echo_n "checking for ANSI C header files... " >&6; } -if ${ac_cv_header_stdc+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#include -#include - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_header_stdc=yes -else - ac_cv_header_stdc=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -if test $ac_cv_header_stdc = yes; then - # SunOS 4.x string.h does not declare mem*, contrary to ANSI. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "memchr" >/dev/null 2>&1; then : - -else - ac_cv_header_stdc=no -fi -rm -f conftest* - -fi - -if test $ac_cv_header_stdc = yes; then - # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "free" >/dev/null 2>&1; then : - -else - ac_cv_header_stdc=no -fi -rm -f conftest* - -fi - -if test $ac_cv_header_stdc = yes; then - # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. - if test "$cross_compiling" = yes; then : - : -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#if ((' ' & 0x0FF) == 0x020) -# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') -# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) -#else -# define ISLOWER(c) \ - (('a' <= (c) && (c) <= 'i') \ - || ('j' <= (c) && (c) <= 'r') \ - || ('s' <= (c) && (c) <= 'z')) -# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) -#endif - -#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) -int -main () -{ - int i; - for (i = 0; i < 256; i++) - if (XOR (islower (i), ISLOWER (i)) - || toupper (i) != TOUPPER (i)) - return 2; - return 0; -} -_ACEOF -if ac_fn_c_try_run "$LINENO"; then : - -else - ac_cv_header_stdc=no -fi -rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ - conftest.$ac_objext conftest.beam conftest.$ac_ext -fi - -fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 -$as_echo "$ac_cv_header_stdc" >&6; } -if test $ac_cv_header_stdc = yes; then - -$as_echo "#define STDC_HEADERS 1" >>confdefs.h - -fi - -# On IRIX 5.3, sys/types and inttypes.h are conflicting. -for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ - inttypes.h stdint.h unistd.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default -" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - - -for ac_header in float.h inttypes.h stddef.h stdlib.h string.h strings.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - - -# Checks for the CUNIT unit-testing package -LD_CUNIT= -{ $as_echo "$as_me:${as_lineno-$LINENO}: Checking for the CUNIT unit-testing package." >&5 -$as_echo "$as_me: Checking for the CUNIT unit-testing package." >&6;} -ac_fn_c_check_header_mongrel "$LINENO" "CUnit/CUnit.h" "ac_cv_header_CUnit_CUnit_h" "$ac_includes_default" -if test "x$ac_cv_header_CUnit_CUnit_h" = xyes; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CU_initialize_registry in -lcunit" >&5 -$as_echo_n "checking for CU_initialize_registry in -lcunit... " >&6; } -if ${ac_cv_lib_cunit_CU_initialize_registry+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-lcunit $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char CU_initialize_registry (); -int -main () -{ -return CU_initialize_registry (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_cunit_CU_initialize_registry=yes -else - ac_cv_lib_cunit_CU_initialize_registry=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_cunit_CU_initialize_registry" >&5 -$as_echo "$ac_cv_lib_cunit_CU_initialize_registry" >&6; } -if test "x$ac_cv_lib_cunit_CU_initialize_registry" = xyes; then : - LD_CUNIT=-lcunit -fi - -fi - - - -if test "$LD_CUNIT"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: CUNIT found. Enabling unit-tests." >&5 -$as_echo "$as_me: CUNIT found. Enabling unit-tests." >&6;} -else - { $as_echo "$as_me:${as_lineno-$LINENO}: CUNIT not found. Disabling unit-tests." >&5 -$as_echo "$as_me: CUNIT not found. Disabling unit-tests." >&6;} -fi - if test "$LD_CUNIT"; then - HAVE_CUNIT_TRUE= - HAVE_CUNIT_FALSE='#' -else - HAVE_CUNIT_TRUE='#' - HAVE_CUNIT_FALSE= -fi - - -# Checks for typedefs, structures, and compiler characteristics. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 -$as_echo_n "checking for an ANSI C-conforming const... " >&6; } -if ${ac_cv_c_const+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -/* FIXME: Include the comments suggested by Paul. */ -#ifndef __cplusplus - /* Ultrix mips cc rejects this. */ - typedef int charset[2]; - const charset cs; - /* SunOS 4.1.1 cc rejects this. */ - char const *const *pcpcc; - char **ppc; - /* NEC SVR4.0.2 mips cc rejects this. */ - struct point {int x, y;}; - static struct point const zero = {0,0}; - /* AIX XL C 1.02.0.0 rejects this. - It does not let you subtract one const X* pointer from another in - an arm of an if-expression whose if-part is not a constant - expression */ - const char *g = "string"; - pcpcc = &g + (g ? g-g : 0); - /* HPUX 7.0 cc rejects these. */ - ++pcpcc; - ppc = (char**) pcpcc; - pcpcc = (char const *const *) ppc; - { /* SCO 3.2v4 cc rejects this. */ - char *t; - char const *s = 0 ? (char *) 0 : (char const *) 0; - - *t++ = 0; - if (s) return 0; - } - { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ - int x[] = {25, 17}; - const int *foo = &x[0]; - ++foo; - } - { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ - typedef const int *iptr; - iptr p = 0; - ++p; - } - { /* AIX XL C 1.02.0.0 rejects this saying - "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ - struct s { int j; const int *ap[3]; }; - struct s *b; b->j = 5; - } - { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ - const int foo = 10; - if (!foo) return 0; - } - return !cs[0] && !zero.x; -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_c_const=yes -else - ac_cv_c_const=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 -$as_echo "$ac_cv_c_const" >&6; } -if test $ac_cv_c_const = no; then - -$as_echo "#define const /**/" >>confdefs.h - -fi - -ac_fn_c_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" -if test "x$ac_cv_type_size_t" = xyes; then : - -else - -cat >>confdefs.h <<_ACEOF -#define size_t unsigned int -_ACEOF - -fi - - -# Checks for library functions. -for ac_func in floor memmove memset modf pow strcasecmp strdup strpbrk -do : - as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` -ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" -if eval test \"x\$"$as_ac_var"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 -_ACEOF - -fi -done - - -case `pwd` in - *\ * | *\ *) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 -$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; -esac - - - -macro_version='2.4.2' -macro_revision='1.3337' - - - - - - - - - - - - - -ltmain="$ac_aux_dir/ltmain.sh" - -# Make sure we can run config.sub. -$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || - as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 -$as_echo_n "checking build system type... " >&6; } -if ${ac_cv_build+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_build_alias=$build_alias -test "x$ac_build_alias" = x && - ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` -test "x$ac_build_alias" = x && - as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 -ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || - as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 -$as_echo "$ac_cv_build" >&6; } -case $ac_cv_build in -*-*-*) ;; -*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; -esac -build=$ac_cv_build -ac_save_IFS=$IFS; IFS='-' -set x $ac_cv_build -shift -build_cpu=$1 -build_vendor=$2 -shift; shift -# Remember, the first character of IFS is used to create $*, -# except with old shells: -build_os=$* -IFS=$ac_save_IFS -case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 -$as_echo_n "checking host system type... " >&6; } -if ${ac_cv_host+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test "x$host_alias" = x; then - ac_cv_host=$ac_cv_build -else - ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || - as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 -$as_echo "$ac_cv_host" >&6; } -case $ac_cv_host in -*-*-*) ;; -*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; -esac -host=$ac_cv_host -ac_save_IFS=$IFS; IFS='-' -set x $ac_cv_host -shift -host_cpu=$1 -host_vendor=$2 -shift; shift -# Remember, the first character of IFS is used to create $*, -# except with old shells: -host_os=$* -IFS=$ac_save_IFS -case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac - - -# Backslashify metacharacters that are still active within -# double-quoted strings. -sed_quote_subst='s/\(["`$\\]\)/\\\1/g' - -# Same as above, but do not quote variable references. -double_quote_subst='s/\(["`\\]\)/\\\1/g' - -# Sed substitution to delay expansion of an escaped shell variable in a -# double_quote_subst'ed string. -delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' - -# Sed substitution to delay expansion of an escaped single quote. -delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' - -# Sed substitution to avoid accidental globbing in evaled expressions -no_glob_subst='s/\*/\\\*/g' - -ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO -ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 -$as_echo_n "checking how to print strings... " >&6; } -# Test print first, because it will be a builtin if present. -if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' -elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='printf %s\n' -else - # Use this function as a fallback that always works. - func_fallback_echo () - { - eval 'cat <<_LTECHO_EOF -$1 -_LTECHO_EOF' - } - ECHO='func_fallback_echo' -fi - -# func_echo_all arg... -# Invoke $ECHO with all args, space-separated. -func_echo_all () -{ - $ECHO "" -} - -case "$ECHO" in - printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 -$as_echo "printf" >&6; } ;; - print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 -$as_echo "print -r" >&6; } ;; - *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5 -$as_echo "cat" >&6; } ;; -esac - - - - - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 -$as_echo_n "checking for a sed that does not truncate output... " >&6; } -if ${ac_cv_path_SED+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ - for ac_i in 1 2 3 4 5 6 7; do - ac_script="$ac_script$as_nl$ac_script" - done - echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed - { ac_script=; unset ac_script;} - if test -z "$SED"; then - ac_path_SED_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in sed gsed; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue -# Check for GNU ac_path_SED and select it if it is found. - # Check for GNU $ac_path_SED -case `"$ac_path_SED" --version 2>&1` in -*GNU*) - ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo '' >> "conftest.nl" - "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_SED_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_SED="$ac_path_SED" - ac_path_SED_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_SED_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_SED"; then - as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 - fi -else - ac_cv_path_SED=$SED -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 -$as_echo "$ac_cv_path_SED" >&6; } - SED="$ac_cv_path_SED" - rm -f conftest.sed - -test -z "$SED" && SED=sed -Xsed="$SED -e 1s/^X//" - - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 -$as_echo_n "checking for fgrep... " >&6; } -if ${ac_cv_path_FGREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 - then ac_cv_path_FGREP="$GREP -F" - else - if test -z "$FGREP"; then - ac_path_FGREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in fgrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue -# Check for GNU ac_path_FGREP and select it if it is found. - # Check for GNU $ac_path_FGREP -case `"$ac_path_FGREP" --version 2>&1` in -*GNU*) - ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'FGREP' >> "conftest.nl" - "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_FGREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_FGREP="$ac_path_FGREP" - ac_path_FGREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_FGREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_FGREP"; then - as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_FGREP=$FGREP -fi - - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 -$as_echo "$ac_cv_path_FGREP" >&6; } - FGREP="$ac_cv_path_FGREP" - - -test -z "$GREP" && GREP=grep - - - - - - - - - - - - - - - - - - - -# Check whether --with-gnu-ld was given. -if test "${with_gnu_ld+set}" = set; then : - withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes -else - with_gnu_ld=no -fi - -ac_prog=ld -if test "$GCC" = yes; then - # Check if gcc -print-prog-name=ld gives a path. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 -$as_echo_n "checking for ld used by $CC... " >&6; } - case $host in - *-*-mingw*) - # gcc leaves a trailing carriage return which upsets mingw - ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; - *) - ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; - esac - case $ac_prog in - # Accept absolute paths. - [\\/]* | ?:[\\/]*) - re_direlt='/[^/][^/]*/\.\./' - # Canonicalize the pathname of ld - ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` - while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do - ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` - done - test -z "$LD" && LD="$ac_prog" - ;; - "") - # If it fails, then pretend we aren't using GCC. - ac_prog=ld - ;; - *) - # If it is relative, then search for the first ld in PATH. - with_gnu_ld=unknown - ;; - esac -elif test "$with_gnu_ld" = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 -$as_echo_n "checking for GNU ld... " >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 -$as_echo_n "checking for non-GNU ld... " >&6; } -fi -if ${lt_cv_path_LD+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$LD"; then - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then - lt_cv_path_LD="$ac_dir/$ac_prog" - # Check to see if the program is GNU ld. I'd rather use --version, - # but apparently some variants of GNU ld only accept -v. - # Break only if it was the GNU/non-GNU ld that we prefer. - case `"$lt_cv_path_LD" -v 2>&1 &5 -$as_echo "$LD" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi -test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 -$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } -if ${lt_cv_prog_gnu_ld+:} false; then : - $as_echo_n "(cached) " >&6 -else - # I'd rather use --version here, but apparently some GNU lds only accept -v. -case `$LD -v 2>&1 &5 -$as_echo "$lt_cv_prog_gnu_ld" >&6; } -with_gnu_ld=$lt_cv_prog_gnu_ld - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 -$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } -if ${lt_cv_path_NM+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$NM"; then - # Let the user override the test. - lt_cv_path_NM="$NM" -else - lt_nm_to_check="${ac_tool_prefix}nm" - if test -n "$ac_tool_prefix" && test "$build" = "$host"; then - lt_nm_to_check="$lt_nm_to_check nm" - fi - for lt_tmp_nm in $lt_nm_to_check; do - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - tmp_nm="$ac_dir/$lt_tmp_nm" - if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then - # Check to see if the nm accepts a BSD-compat flag. - # Adding the `sed 1q' prevents false positives on HP-UX, which says: - # nm: unknown option "B" ignored - # Tru64's nm complains that /dev/null is an invalid object file - case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in - */dev/null* | *'Invalid file or object type'*) - lt_cv_path_NM="$tmp_nm -B" - break - ;; - *) - case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in - */dev/null*) - lt_cv_path_NM="$tmp_nm -p" - break - ;; - *) - lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but - continue # so that we can try to find one that supports BSD flags - ;; - esac - ;; - esac - fi - done - IFS="$lt_save_ifs" - done - : ${lt_cv_path_NM=no} -fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 -$as_echo "$lt_cv_path_NM" >&6; } -if test "$lt_cv_path_NM" != "no"; then - NM="$lt_cv_path_NM" -else - # Didn't find any BSD compatible name lister, look for dumpbin. - if test -n "$DUMPBIN"; then : - # Let the user override the test. - else - if test -n "$ac_tool_prefix"; then - for ac_prog in dumpbin "link -dump" - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_DUMPBIN+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$DUMPBIN"; then - ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -DUMPBIN=$ac_cv_prog_DUMPBIN -if test -n "$DUMPBIN"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 -$as_echo "$DUMPBIN" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$DUMPBIN" && break - done -fi -if test -z "$DUMPBIN"; then - ac_ct_DUMPBIN=$DUMPBIN - for ac_prog in dumpbin "link -dump" -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_DUMPBIN"; then - ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN -if test -n "$ac_ct_DUMPBIN"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 -$as_echo "$ac_ct_DUMPBIN" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_DUMPBIN" && break -done - - if test "x$ac_ct_DUMPBIN" = x; then - DUMPBIN=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - DUMPBIN=$ac_ct_DUMPBIN - fi -fi - - case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in - *COFF*) - DUMPBIN="$DUMPBIN -symbols" - ;; - *) - DUMPBIN=: - ;; - esac - fi - - if test "$DUMPBIN" != ":"; then - NM="$DUMPBIN" - fi -fi -test -z "$NM" && NM=nm - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 -$as_echo_n "checking the name lister ($NM) interface... " >&6; } -if ${lt_cv_nm_interface+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_nm_interface="BSD nm" - echo "int some_variable = 0;" > conftest.$ac_ext - (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5) - (eval "$ac_compile" 2>conftest.err) - cat conftest.err >&5 - (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5) - (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) - cat conftest.err >&5 - (eval echo "\"\$as_me:$LINENO: output\"" >&5) - cat conftest.out >&5 - if $GREP 'External.*some_variable' conftest.out > /dev/null; then - lt_cv_nm_interface="MS dumpbin" - fi - rm -f conftest* -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 -$as_echo "$lt_cv_nm_interface" >&6; } - -# find the maximum length of command line arguments -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 -$as_echo_n "checking the maximum length of command line arguments... " >&6; } -if ${lt_cv_sys_max_cmd_len+:} false; then : - $as_echo_n "(cached) " >&6 -else - i=0 - teststring="ABCD" - - case $build_os in - msdosdjgpp*) - # On DJGPP, this test can blow up pretty badly due to problems in libc - # (any single argument exceeding 2000 bytes causes a buffer overrun - # during glob expansion). Even if it were fixed, the result of this - # check would be larger than it should be. - lt_cv_sys_max_cmd_len=12288; # 12K is about right - ;; - - gnu*) - # Under GNU Hurd, this test is not required because there is - # no limit to the length of command line arguments. - # Libtool will interpret -1 as no limit whatsoever - lt_cv_sys_max_cmd_len=-1; - ;; - - cygwin* | mingw* | cegcc*) - # On Win9x/ME, this test blows up -- it succeeds, but takes - # about 5 minutes as the teststring grows exponentially. - # Worse, since 9x/ME are not pre-emptively multitasking, - # you end up with a "frozen" computer, even though with patience - # the test eventually succeeds (with a max line length of 256k). - # Instead, let's just punt: use the minimum linelength reported by - # all of the supported platforms: 8192 (on NT/2K/XP). - lt_cv_sys_max_cmd_len=8192; - ;; - - mint*) - # On MiNT this can take a long time and run out of memory. - lt_cv_sys_max_cmd_len=8192; - ;; - - amigaos*) - # On AmigaOS with pdksh, this test takes hours, literally. - # So we just punt and use a minimum line length of 8192. - lt_cv_sys_max_cmd_len=8192; - ;; - - netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) - # This has been around since 386BSD, at least. Likely further. - if test -x /sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` - elif test -x /usr/sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` - else - lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs - fi - # And add a safety zone - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - ;; - - interix*) - # We know the value 262144 and hardcode it with a safety zone (like BSD) - lt_cv_sys_max_cmd_len=196608 - ;; - - os2*) - # The test takes a long time on OS/2. - lt_cv_sys_max_cmd_len=8192 - ;; - - osf*) - # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure - # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not - # nice to cause kernel panics so lets avoid the loop below. - # First set a reasonable default. - lt_cv_sys_max_cmd_len=16384 - # - if test -x /sbin/sysconfig; then - case `/sbin/sysconfig -q proc exec_disable_arg_limit` in - *1*) lt_cv_sys_max_cmd_len=-1 ;; - esac - fi - ;; - sco3.2v5*) - lt_cv_sys_max_cmd_len=102400 - ;; - sysv5* | sco5v6* | sysv4.2uw2*) - kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` - if test -n "$kargmax"; then - lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` - else - lt_cv_sys_max_cmd_len=32768 - fi - ;; - *) - lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` - if test -n "$lt_cv_sys_max_cmd_len"; then - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - else - # Make teststring a little bigger before we do anything with it. - # a 1K string should be a reasonable start. - for i in 1 2 3 4 5 6 7 8 ; do - teststring=$teststring$teststring - done - SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} - # If test is not a shell built-in, we'll probably end up computing a - # maximum length that is only half of the actual maximum length, but - # we can't tell. - while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ - = "X$teststring$teststring"; } >/dev/null 2>&1 && - test $i != 17 # 1/2 MB should be enough - do - i=`expr $i + 1` - teststring=$teststring$teststring - done - # Only check the string length outside the loop. - lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` - teststring= - # Add a significant safety factor because C++ compilers can tack on - # massive amounts of additional arguments before passing them to the - # linker. It appears as though 1/2 is a usable value. - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` - fi - ;; - esac - -fi - -if test -n $lt_cv_sys_max_cmd_len ; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 -$as_echo "$lt_cv_sys_max_cmd_len" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 -$as_echo "none" >&6; } -fi -max_cmd_len=$lt_cv_sys_max_cmd_len - - - - - - -: ${CP="cp -f"} -: ${MV="mv -f"} -: ${RM="rm -f"} - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 -$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } -# Try some XSI features -xsi_shell=no -( _lt_dummy="a/b/c" - test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ - = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 -$as_echo "$xsi_shell" >&6; } - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 -$as_echo_n "checking whether the shell understands \"+=\"... " >&6; } -lt_shell_append=no -( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ - >/dev/null 2>&1 \ - && lt_shell_append=yes -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 -$as_echo "$lt_shell_append" >&6; } - - -if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - lt_unset=unset -else - lt_unset=false -fi - - - - - -# test EBCDIC or ASCII -case `echo X|tr X '\101'` in - A) # ASCII based system - # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr - lt_SP2NL='tr \040 \012' - lt_NL2SP='tr \015\012 \040\040' - ;; - *) # EBCDIC based system - lt_SP2NL='tr \100 \n' - lt_NL2SP='tr \r\n \100\100' - ;; -esac - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -if ${lt_cv_to_host_file_cmd+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $host in - *-*-mingw* ) - case $build in - *-*-mingw* ) # actually msys - lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 - ;; - *-*-cygwin* ) - lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 - ;; - * ) # otherwise, assume *nix - lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 - ;; - esac - ;; - *-*-cygwin* ) - case $build in - *-*-mingw* ) # actually msys - lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin - ;; - *-*-cygwin* ) - lt_cv_to_host_file_cmd=func_convert_file_noop - ;; - * ) # otherwise, assume *nix - lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin - ;; - esac - ;; - * ) # unhandled hosts (and "normal" native builds) - lt_cv_to_host_file_cmd=func_convert_file_noop - ;; -esac - -fi - -to_host_file_cmd=$lt_cv_to_host_file_cmd -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -$as_echo "$lt_cv_to_host_file_cmd" >&6; } - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -if ${lt_cv_to_tool_file_cmd+:} false; then : - $as_echo_n "(cached) " >&6 -else - #assume ordinary cross tools, or native build. -lt_cv_to_tool_file_cmd=func_convert_file_noop -case $host in - *-*-mingw* ) - case $build in - *-*-mingw* ) # actually msys - lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 - ;; - esac - ;; -esac - -fi - -to_tool_file_cmd=$lt_cv_to_tool_file_cmd -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -$as_echo "$lt_cv_to_tool_file_cmd" >&6; } - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 -$as_echo_n "checking for $LD option to reload object files... " >&6; } -if ${lt_cv_ld_reload_flag+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_ld_reload_flag='-r' -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 -$as_echo "$lt_cv_ld_reload_flag" >&6; } -reload_flag=$lt_cv_ld_reload_flag -case $reload_flag in -"" | " "*) ;; -*) reload_flag=" $reload_flag" ;; -esac -reload_cmds='$LD$reload_flag -o $output$reload_objs' -case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - if test "$GCC" != yes; then - reload_cmds=false - fi - ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' - else - reload_cmds='$LD$reload_flag -o $output$reload_objs' - fi - ;; -esac - - - - - - - - - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. -set dummy ${ac_tool_prefix}objdump; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_OBJDUMP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$OBJDUMP"; then - ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -OBJDUMP=$ac_cv_prog_OBJDUMP -if test -n "$OBJDUMP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 -$as_echo "$OBJDUMP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_OBJDUMP"; then - ac_ct_OBJDUMP=$OBJDUMP - # Extract the first word of "objdump", so it can be a program name with args. -set dummy objdump; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_OBJDUMP"; then - ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_OBJDUMP="objdump" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP -if test -n "$ac_ct_OBJDUMP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 -$as_echo "$ac_ct_OBJDUMP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_OBJDUMP" = x; then - OBJDUMP="false" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - OBJDUMP=$ac_ct_OBJDUMP - fi -else - OBJDUMP="$ac_cv_prog_OBJDUMP" -fi - -test -z "$OBJDUMP" && OBJDUMP=objdump - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 -$as_echo_n "checking how to recognize dependent libraries... " >&6; } -if ${lt_cv_deplibs_check_method+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_file_magic_cmd='$MAGIC_CMD' -lt_cv_file_magic_test_file= -lt_cv_deplibs_check_method='unknown' -# Need to set the preceding variable on all platforms that support -# interlibrary dependencies. -# 'none' -- dependencies not supported. -# `unknown' -- same as none, but documents that we really don't know. -# 'pass_all' -- all dependencies passed with no checks. -# 'test_compile' -- check by making test program. -# 'file_magic [[regex]]' -- check by looking for files in library path -# which responds to the $file_magic_cmd with a given extended regex. -# If you have `file' or equivalent on your system and you're not sure -# whether `pass_all' will *always* work, you probably want this one. - -case $host_os in -aix[4-9]*) - lt_cv_deplibs_check_method=pass_all - ;; - -beos*) - lt_cv_deplibs_check_method=pass_all - ;; - -bsdi[45]*) - lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' - lt_cv_file_magic_cmd='/usr/bin/file -L' - lt_cv_file_magic_test_file=/shlib/libc.so - ;; - -cygwin*) - # func_win32_libid is a shell function defined in ltmain.sh - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - ;; - -mingw* | pw32*) - # Base MSYS/MinGW do not provide the 'file' command needed by - # func_win32_libid shell function, so use a weaker test based on 'objdump', - # unless we find 'file', for example because we are cross-compiling. - # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. - if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else - # Keep this pattern in sync with the one in func_win32_libid. - lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; - -cegcc*) - # use the weaker test based on 'objdump'. See mingw*. - lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' - lt_cv_file_magic_cmd='$OBJDUMP -f' - ;; - -darwin* | rhapsody*) - lt_cv_deplibs_check_method=pass_all - ;; - -freebsd* | dragonfly*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then - case $host_cpu in - i*86 ) - # Not sure whether the presence of OpenBSD here was a mistake. - # Let's accept both of them until this is cleared up. - lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' - lt_cv_file_magic_cmd=/usr/bin/file - lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` - ;; - esac - else - lt_cv_deplibs_check_method=pass_all - fi - ;; - -gnu*) - lt_cv_deplibs_check_method=pass_all - ;; - -haiku*) - lt_cv_deplibs_check_method=pass_all - ;; - -hpux10.20* | hpux11*) - lt_cv_file_magic_cmd=/usr/bin/file - case $host_cpu in - ia64*) - lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' - lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so - ;; - hppa*64*) - lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]' - lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl - ;; - *) - lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library' - lt_cv_file_magic_test_file=/usr/lib/libc.sl - ;; - esac - ;; - -interix[3-9]*) - # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' - ;; - -irix5* | irix6* | nonstopux*) - case $LD in - *-32|*"-32 ") libmagic=32-bit;; - *-n32|*"-n32 ") libmagic=N32;; - *-64|*"-64 ") libmagic=64-bit;; - *) libmagic=never-match;; - esac - lt_cv_deplibs_check_method=pass_all - ;; - -# This must be glibc/ELF. -linux* | k*bsd*-gnu | kopensolaris*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - -netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' - fi - ;; - -newos6*) - lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' - lt_cv_file_magic_cmd=/usr/bin/file - lt_cv_file_magic_test_file=/usr/lib/libnls.so - ;; - -*nto* | *qnx*) - lt_cv_deplibs_check_method=pass_all - ;; - -openbsd*) - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' - fi - ;; - -osf3* | osf4* | osf5*) - lt_cv_deplibs_check_method=pass_all - ;; - -rdos*) - lt_cv_deplibs_check_method=pass_all - ;; - -solaris*) - lt_cv_deplibs_check_method=pass_all - ;; - -sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) - lt_cv_deplibs_check_method=pass_all - ;; - -sysv4 | sysv4.3*) - case $host_vendor in - motorola) - lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' - lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` - ;; - ncr) - lt_cv_deplibs_check_method=pass_all - ;; - sequent) - lt_cv_file_magic_cmd='/bin/file' - lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' - ;; - sni) - lt_cv_file_magic_cmd='/bin/file' - lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" - lt_cv_file_magic_test_file=/lib/libc.so - ;; - siemens) - lt_cv_deplibs_check_method=pass_all - ;; - pc) - lt_cv_deplibs_check_method=pass_all - ;; - esac - ;; - -tpf*) - lt_cv_deplibs_check_method=pass_all - ;; -esac - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 -$as_echo "$lt_cv_deplibs_check_method" >&6; } - -file_magic_glob= -want_nocaseglob=no -if test "$build" = "$host"; then - case $host_os in - mingw* | pw32*) - if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then - want_nocaseglob=yes - else - file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` - fi - ;; - esac -fi - -file_magic_cmd=$lt_cv_file_magic_cmd -deplibs_check_method=$lt_cv_deplibs_check_method -test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - - - - - - - - - - - - - - - - - - - - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_DLLTOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$DLLTOOL"; then - ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -DLLTOOL=$ac_cv_prog_DLLTOOL -if test -n "$DLLTOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -$as_echo "$DLLTOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_DLLTOOL"; then - ac_ct_DLLTOOL=$DLLTOOL - # Extract the first word of "dlltool", so it can be a program name with args. -set dummy dlltool; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_DLLTOOL"; then - ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_DLLTOOL="dlltool" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -if test -n "$ac_ct_DLLTOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -$as_echo "$ac_ct_DLLTOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_DLLTOOL" = x; then - DLLTOOL="false" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - DLLTOOL=$ac_ct_DLLTOOL - fi -else - DLLTOOL="$ac_cv_prog_DLLTOOL" -fi - -test -z "$DLLTOOL" && DLLTOOL=dlltool - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_sharedlib_from_linklib_cmd='unknown' - -case $host_os in -cygwin* | mingw* | pw32* | cegcc*) - # two different shell functions defined in ltmain.sh - # decide which to use based on capabilities of $DLLTOOL - case `$DLLTOOL --help 2>&1` in - *--identify-strict*) - lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib - ;; - *) - lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback - ;; - esac - ;; -*) - # fallback: assume linklib IS sharedlib - lt_cv_sharedlib_from_linklib_cmd="$ECHO" - ;; -esac - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO - - - - - - - -if test -n "$ac_tool_prefix"; then - for ac_prog in ar - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_AR+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$AR"; then - ac_cv_prog_AR="$AR" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -AR=$ac_cv_prog_AR -if test -n "$AR"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 -$as_echo "$AR" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$AR" && break - done -fi -if test -z "$AR"; then - ac_ct_AR=$AR - for ac_prog in ar -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_AR+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_AR"; then - ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_AR=$ac_cv_prog_ac_ct_AR -if test -n "$ac_ct_AR"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 -$as_echo "$ac_ct_AR" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_AR" && break -done - - if test "x$ac_ct_AR" = x; then - AR="false" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - AR=$ac_ct_AR - fi -fi - -: ${AR=ar} -: ${AR_FLAGS=cru} - - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -$as_echo_n "checking for archiver @FILE support... " >&6; } -if ${lt_cv_ar_at_file+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_ar_at_file=no - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - echo conftest.$ac_objext > conftest.lst - lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' - { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 - (eval $lt_ar_try) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if test "$ac_status" -eq 0; then - # Ensure the archiver fails upon bogus file names. - rm -f conftest.$ac_objext libconftest.a - { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 - (eval $lt_ar_try) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if test "$ac_status" -ne 0; then - lt_cv_ar_at_file=@ - fi - fi - rm -f conftest.* libconftest.a - -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -$as_echo "$lt_cv_ar_at_file" >&6; } - -if test "x$lt_cv_ar_at_file" = xno; then - archiver_list_spec= -else - archiver_list_spec=$lt_cv_ar_at_file -fi - - - - - - - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. -set dummy ${ac_tool_prefix}strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$STRIP"; then - ac_cv_prog_STRIP="$STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_STRIP="${ac_tool_prefix}strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -STRIP=$ac_cv_prog_STRIP -if test -n "$STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 -$as_echo "$STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_STRIP"; then - ac_ct_STRIP=$STRIP - # Extract the first word of "strip", so it can be a program name with args. -set dummy strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_STRIP"; then - ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_STRIP="strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP -if test -n "$ac_ct_STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 -$as_echo "$ac_ct_STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_STRIP" = x; then - STRIP=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - STRIP=$ac_ct_STRIP - fi -else - STRIP="$ac_cv_prog_STRIP" -fi - -test -z "$STRIP" && STRIP=: - - - - - - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. -set dummy ${ac_tool_prefix}ranlib; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_RANLIB+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$RANLIB"; then - ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -RANLIB=$ac_cv_prog_RANLIB -if test -n "$RANLIB"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 -$as_echo "$RANLIB" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_RANLIB"; then - ac_ct_RANLIB=$RANLIB - # Extract the first word of "ranlib", so it can be a program name with args. -set dummy ranlib; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_RANLIB+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_RANLIB"; then - ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_RANLIB="ranlib" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB -if test -n "$ac_ct_RANLIB"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 -$as_echo "$ac_ct_RANLIB" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_RANLIB" = x; then - RANLIB=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - RANLIB=$ac_ct_RANLIB - fi -else - RANLIB="$ac_cv_prog_RANLIB" -fi - -test -z "$RANLIB" && RANLIB=: - - - - - - -# Determine commands to create old-style static archives. -old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' -old_postinstall_cmds='chmod 644 $oldlib' -old_postuninstall_cmds= - -if test -n "$RANLIB"; then - case $host_os in - openbsd*) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" - ;; - *) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" - ;; - esac - old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" -fi - -case $host_os in - darwin*) - lock_old_archive_extraction=yes ;; - *) - lock_old_archive_extraction=no ;; -esac - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# If no C compiler was specified, use CC. -LTCC=${LTCC-"$CC"} - -# If no C compiler flags were specified, use CFLAGS. -LTCFLAGS=${LTCFLAGS-"$CFLAGS"} - -# Allow CC to be a program name with arguments. -compiler=$CC - - -# Check for command to grab the raw symbol name followed by C symbol from nm. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 -$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } -if ${lt_cv_sys_global_symbol_pipe+:} false; then : - $as_echo_n "(cached) " >&6 -else - -# These are sane defaults that work on at least a few old systems. -# [They come from Ultrix. What could be older than Ultrix?!! ;)] - -# Character class describing NM global symbol codes. -symcode='[BCDEGRST]' - -# Regexp to match symbols that can be accessed directly from C. -sympat='\([_A-Za-z][_A-Za-z0-9]*\)' - -# Define system-specific variables. -case $host_os in -aix*) - symcode='[BCDT]' - ;; -cygwin* | mingw* | pw32* | cegcc*) - symcode='[ABCDGISTW]' - ;; -hpux*) - if test "$host_cpu" = ia64; then - symcode='[ABCDEGRST]' - fi - ;; -irix* | nonstopux*) - symcode='[BCDEGRST]' - ;; -osf*) - symcode='[BCDEGQRST]' - ;; -solaris*) - symcode='[BDRT]' - ;; -sco3.2v5*) - symcode='[DT]' - ;; -sysv4.2uw2*) - symcode='[DT]' - ;; -sysv5* | sco5v6* | unixware* | OpenUNIX*) - symcode='[ABDT]' - ;; -sysv4) - symcode='[DFNSTU]' - ;; -esac - -# If we're using GNU nm, then use its standard symbol codes. -case `$NM -V 2>&1` in -*GNU* | *'with BFD'*) - symcode='[ABCDGIRSTW]' ;; -esac - -# Transform an extracted symbol line into a proper C declaration. -# Some systems (esp. on ia64) link data and code symbols differently, -# so use this general approach. -lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - -# Transform an extracted symbol line into symbol name and symbol address -lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - -# Handle CRLF in mingw tool chain -opt_cr= -case $build_os in -mingw*) - opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp - ;; -esac - -# Try without a prefix underscore, then with it. -for ac_symprfx in "" "_"; do - - # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. - symxfrm="\\1 $ac_symprfx\\2 \\2" - - # Write the raw and C identifiers. - if test "$lt_cv_nm_interface" = "MS dumpbin"; then - # Fake it for dumpbin and say T for any non-static function - # and D for any global variable. - # Also find C++ and __fastcall symbols from MSVC++, - # which start with @ or ?. - lt_cv_sys_global_symbol_pipe="$AWK '"\ -" {last_section=section; section=\$ 3};"\ -" /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ -" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ -" \$ 0!~/External *\|/{next};"\ -" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ -" {if(hide[section]) next};"\ -" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ -" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ -" s[1]~/^[@?]/{print s[1], s[1]; next};"\ -" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ -" ' prfx=^$ac_symprfx" - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi - lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no - - rm -f conftest* - cat > conftest.$ac_ext <<_LT_EOF -#ifdef __cplusplus -extern "C" { -#endif -char nm_test_var; -void nm_test_func(void); -void nm_test_func(void){} -#ifdef __cplusplus -} -#endif -int main(){nm_test_var='a';nm_test_func();return(0);} -_LT_EOF - - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - # Now try to grab the symbols. - nlist=conftest.nm - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5 - (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && test -s "$nlist"; then - # Try sorting and uniquifying the output. - if sort "$nlist" | uniq > "$nlist"T; then - mv -f "$nlist"T "$nlist" - else - rm -f "$nlist"T - fi - - # Make sure that we snagged all the symbols we need. - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -/* DATA imports from DLLs on WIN32 con't be const, because runtime - relocations are performed -- see ld's documentation on pseudo-relocs. */ -# define LT_DLSYM_CONST -#elif defined(__osf__) -/* This system does not cope well with relocations in const data. */ -# define LT_DLSYM_CONST -#else -# define LT_DLSYM_CONST const -#endif - -#ifdef __cplusplus -extern "C" { -#endif - -_LT_EOF - # Now generate the symbol file. - eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' - - cat <<_LT_EOF >> conftest.$ac_ext - -/* The mapping between symbol names and symbols. */ -LT_DLSYM_CONST struct { - const char *name; - void *address; -} -lt__PROGRAM__LTX_preloaded_symbols[] = -{ - { "@PROGRAM@", (void *) 0 }, -_LT_EOF - $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext - cat <<\_LT_EOF >> conftest.$ac_ext - {0, (void *) 0} -}; - -/* This works around a problem in FreeBSD linker */ -#ifdef FREEBSD_WORKAROUND -static const void *lt_preloaded_setup() { - return lt__PROGRAM__LTX_preloaded_symbols; -} -#endif - -#ifdef __cplusplus -} -#endif -_LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext - lt_globsym_save_LIBS=$LIBS - lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi - LIBS=$lt_globsym_save_LIBS - CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi - else - echo "cannot find nm_test_var in $nlist" >&5 - fi - else - echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 - fi - else - echo "$progname: failed program was:" >&5 - cat conftest.$ac_ext >&5 - fi - rm -rf conftest* conftst* - - # Do not use the global_symbol_pipe unless it works. - if test "$pipe_works" = yes; then - break - else - lt_cv_sys_global_symbol_pipe= - fi -done - -fi - -if test -z "$lt_cv_sys_global_symbol_pipe"; then - lt_cv_sys_global_symbol_to_cdecl= -fi -if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 -$as_echo "failed" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 -$as_echo "ok" >&6; } -fi - -# Response file support. -if test "$lt_cv_nm_interface" = "MS dumpbin"; then - nm_file_list_spec='@' -elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then - nm_file_list_spec='@' -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -$as_echo_n "checking for sysroot... " >&6; } - -# Check whether --with-sysroot was given. -if test "${with_sysroot+set}" = set; then : - withval=$with_sysroot; -else - with_sysroot=no -fi - - -lt_sysroot= -case ${with_sysroot} in #( - yes) - if test "$GCC" = yes; then - lt_sysroot=`$CC --print-sysroot 2>/dev/null` - fi - ;; #( - /*) - lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` - ;; #( - no|'') - ;; #( - *) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5 -$as_echo "${with_sysroot}" >&6; } - as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 - ;; -esac - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -$as_echo "${lt_sysroot:-no}" >&6; } - - - - - -# Check whether --enable-libtool-lock was given. -if test "${enable_libtool_lock+set}" = set; then : - enableval=$enable_libtool_lock; -fi - -test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes - -# Some flags need to be propagated to the compiler or linker for good -# libtool support. -case $host in -ia64-*-hpux*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - case `/usr/bin/file conftest.$ac_objext` in - *ELF-32*) - HPUX_IA64_MODE="32" - ;; - *ELF-64*) - HPUX_IA64_MODE="64" - ;; - esac - fi - rm -rf conftest* - ;; -*-*-irix6*) - # Find out which ABI we are using. - echo '#line '$LINENO' "configure"' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - if test "$lt_cv_prog_gnu_ld" = yes; then - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -melf32bsmip" - ;; - *N32*) - LD="${LD-ld} -melf32bmipn32" - ;; - *64-bit*) - LD="${LD-ld} -melf64bmip" - ;; - esac - else - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -32" - ;; - *N32*) - LD="${LD-ld} -n32" - ;; - *64-bit*) - LD="${LD-ld} -64" - ;; - esac - fi - fi - rm -rf conftest* - ;; - -x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ -s390*-*linux*|s390*-*tpf*|sparc*-*linux*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - case `/usr/bin/file conftest.o` in - *32-bit*) - case $host in - x86_64-*kfreebsd*-gnu) - LD="${LD-ld} -m elf_i386_fbsd" - ;; - x86_64-*linux*) - LD="${LD-ld} -m elf_i386" - ;; - ppc64-*linux*|powerpc64-*linux*) - LD="${LD-ld} -m elf32ppclinux" - ;; - s390x-*linux*) - LD="${LD-ld} -m elf_s390" - ;; - sparc64-*linux*) - LD="${LD-ld} -m elf32_sparc" - ;; - esac - ;; - *64-bit*) - case $host in - x86_64-*kfreebsd*-gnu) - LD="${LD-ld} -m elf_x86_64_fbsd" - ;; - x86_64-*linux*) - LD="${LD-ld} -m elf_x86_64" - ;; - ppc*-*linux*|powerpc*-*linux*) - LD="${LD-ld} -m elf64ppc" - ;; - s390*-*linux*|s390*-*tpf*) - LD="${LD-ld} -m elf64_s390" - ;; - sparc*-*linux*) - LD="${LD-ld} -m elf64_sparc" - ;; - esac - ;; - esac - fi - rm -rf conftest* - ;; - -*-*-sco3.2v5*) - # On SCO OpenServer 5, we need -belf to get full-featured binaries. - SAVE_CFLAGS="$CFLAGS" - CFLAGS="$CFLAGS -belf" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 -$as_echo_n "checking whether the C compiler needs -belf... " >&6; } -if ${lt_cv_cc_needs_belf+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - lt_cv_cc_needs_belf=yes -else - lt_cv_cc_needs_belf=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 -$as_echo "$lt_cv_cc_needs_belf" >&6; } - if test x"$lt_cv_cc_needs_belf" != x"yes"; then - # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf - CFLAGS="$SAVE_CFLAGS" - fi - ;; -*-*solaris*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - case `/usr/bin/file conftest.o` in - *64-bit*) - case $lt_cv_prog_gnu_ld in - yes*) - case $host in - i?86-*-solaris*) - LD="${LD-ld} -m elf_x86_64" - ;; - sparc*-*-solaris*) - LD="${LD-ld} -m elf64_sparc" - ;; - esac - # GNU ld 2.21 introduced _sol2 emulations. Use them if available. - if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then - LD="${LD-ld}_sol2" - fi - ;; - *) - if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then - LD="${LD-ld} -64" - fi - ;; - esac - ;; - esac - fi - rm -rf conftest* - ;; -esac - -need_locks="$enable_libtool_lock" - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -set dummy ${ac_tool_prefix}mt; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_MANIFEST_TOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$MANIFEST_TOOL"; then - ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -if test -n "$MANIFEST_TOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -$as_echo "$MANIFEST_TOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_MANIFEST_TOOL"; then - ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL - # Extract the first word of "mt", so it can be a program name with args. -set dummy mt; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_MANIFEST_TOOL"; then - ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -if test -n "$ac_ct_MANIFEST_TOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_MANIFEST_TOOL" = x; then - MANIFEST_TOOL=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL - fi -else - MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -fi - -test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -if ${lt_cv_path_mainfest_tool+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_path_mainfest_tool=no - echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 - $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out - cat conftest.err >&5 - if $GREP 'Manifest Tool' conftest.out > /dev/null; then - lt_cv_path_mainfest_tool=yes - fi - rm -f conftest* -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -$as_echo "$lt_cv_path_mainfest_tool" >&6; } -if test "x$lt_cv_path_mainfest_tool" != xyes; then - MANIFEST_TOOL=: -fi - - - - - - - case $host_os in - rhapsody* | darwin*) - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. -set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_DSYMUTIL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$DSYMUTIL"; then - ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -DSYMUTIL=$ac_cv_prog_DSYMUTIL -if test -n "$DSYMUTIL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 -$as_echo "$DSYMUTIL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_DSYMUTIL"; then - ac_ct_DSYMUTIL=$DSYMUTIL - # Extract the first word of "dsymutil", so it can be a program name with args. -set dummy dsymutil; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_DSYMUTIL"; then - ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL -if test -n "$ac_ct_DSYMUTIL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 -$as_echo "$ac_ct_DSYMUTIL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_DSYMUTIL" = x; then - DSYMUTIL=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - DSYMUTIL=$ac_ct_DSYMUTIL - fi -else - DSYMUTIL="$ac_cv_prog_DSYMUTIL" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. -set dummy ${ac_tool_prefix}nmedit; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_NMEDIT+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$NMEDIT"; then - ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -NMEDIT=$ac_cv_prog_NMEDIT -if test -n "$NMEDIT"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 -$as_echo "$NMEDIT" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_NMEDIT"; then - ac_ct_NMEDIT=$NMEDIT - # Extract the first word of "nmedit", so it can be a program name with args. -set dummy nmedit; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_NMEDIT"; then - ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_NMEDIT="nmedit" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT -if test -n "$ac_ct_NMEDIT"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 -$as_echo "$ac_ct_NMEDIT" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_NMEDIT" = x; then - NMEDIT=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - NMEDIT=$ac_ct_NMEDIT - fi -else - NMEDIT="$ac_cv_prog_NMEDIT" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. -set dummy ${ac_tool_prefix}lipo; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_LIPO+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$LIPO"; then - ac_cv_prog_LIPO="$LIPO" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_LIPO="${ac_tool_prefix}lipo" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -LIPO=$ac_cv_prog_LIPO -if test -n "$LIPO"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 -$as_echo "$LIPO" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_LIPO"; then - ac_ct_LIPO=$LIPO - # Extract the first word of "lipo", so it can be a program name with args. -set dummy lipo; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_LIPO+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_LIPO"; then - ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_LIPO="lipo" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO -if test -n "$ac_ct_LIPO"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 -$as_echo "$ac_ct_LIPO" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_LIPO" = x; then - LIPO=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - LIPO=$ac_ct_LIPO - fi -else - LIPO="$ac_cv_prog_LIPO" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. -set dummy ${ac_tool_prefix}otool; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_OTOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$OTOOL"; then - ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_OTOOL="${ac_tool_prefix}otool" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -OTOOL=$ac_cv_prog_OTOOL -if test -n "$OTOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 -$as_echo "$OTOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_OTOOL"; then - ac_ct_OTOOL=$OTOOL - # Extract the first word of "otool", so it can be a program name with args. -set dummy otool; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_OTOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_OTOOL"; then - ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_OTOOL="otool" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL -if test -n "$ac_ct_OTOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 -$as_echo "$ac_ct_OTOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_OTOOL" = x; then - OTOOL=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - OTOOL=$ac_ct_OTOOL - fi -else - OTOOL="$ac_cv_prog_OTOOL" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. -set dummy ${ac_tool_prefix}otool64; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_OTOOL64+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$OTOOL64"; then - ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -OTOOL64=$ac_cv_prog_OTOOL64 -if test -n "$OTOOL64"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 -$as_echo "$OTOOL64" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_OTOOL64"; then - ac_ct_OTOOL64=$OTOOL64 - # Extract the first word of "otool64", so it can be a program name with args. -set dummy otool64; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_OTOOL64"; then - ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_OTOOL64="otool64" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 -if test -n "$ac_ct_OTOOL64"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 -$as_echo "$ac_ct_OTOOL64" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_OTOOL64" = x; then - OTOOL64=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - OTOOL64=$ac_ct_OTOOL64 - fi -else - OTOOL64="$ac_cv_prog_OTOOL64" -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 -$as_echo_n "checking for -single_module linker flag... " >&6; } -if ${lt_cv_apple_cc_single_mod+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_apple_cc_single_mod=no - if test -z "${LT_MULTI_MODULE}"; then - # By default we will add the -single_module flag. You can override - # by either setting the environment variable LT_MULTI_MODULE - # non-empty at configure time, or by adding -multi_module to the - # link flags. - rm -rf libconftest.dylib* - echo "int foo(void){return 1;}" > conftest.c - echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ --dynamiclib -Wl,-single_module conftest.c" >&5 - $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ - -dynamiclib -Wl,-single_module conftest.c 2>conftest.err - _lt_result=$? - # If there is a non-empty error log, and "single_module" - # appears in it, assume the flag caused a linker warning - if test -s conftest.err && $GREP single_module conftest.err; then - cat conftest.err >&5 - # Otherwise, if the output was created with a 0 exit code from - # the compiler, it worked. - elif test -f libconftest.dylib && test $_lt_result -eq 0; then - lt_cv_apple_cc_single_mod=yes - else - cat conftest.err >&5 - fi - rm -rf libconftest.dylib* - rm -f conftest.* - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 -$as_echo "$lt_cv_apple_cc_single_mod" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 -$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } -if ${lt_cv_ld_exported_symbols_list+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_ld_exported_symbols_list=no - save_LDFLAGS=$LDFLAGS - echo "_main" > conftest.sym - LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - lt_cv_ld_exported_symbols_list=yes -else - lt_cv_ld_exported_symbols_list=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS="$save_LDFLAGS" - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 -$as_echo "$lt_cv_ld_exported_symbols_list" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5 -$as_echo_n "checking for -force_load linker flag... " >&6; } -if ${lt_cv_ld_force_load+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_ld_force_load=no - cat > conftest.c << _LT_EOF -int forced_loaded() { return 2;} -_LT_EOF - echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 - echo "$RANLIB libconftest.a" >&5 - $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF -int main() { return 0;} -_LT_EOF - echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 - $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err - _lt_result=$? - if test -s conftest.err && $GREP force_load conftest.err; then - cat conftest.err >&5 - elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then - lt_cv_ld_force_load=yes - else - cat conftest.err >&5 - fi - rm -f conftest.err libconftest.a conftest conftest.c - rm -rf conftest.dSYM - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5 -$as_echo "$lt_cv_ld_force_load" >&6; } - case $host_os in - rhapsody* | darwin1.[012]) - _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; - darwin1.*) - _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - darwin*) # darwin 5.x on - # if running on 10.5 or later, the deployment target defaults - # to the OS version, if on x86, and 10.4, the deployment - # target defaults to 10.4. Don't you love it? - case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in - 10.0,*86*-darwin8*|10.0,*-darwin[91]*) - _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - 10.[012]*) - _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - 10.*) - _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - esac - ;; - esac - if test "$lt_cv_apple_cc_single_mod" = "yes"; then - _lt_dar_single_mod='$single_module' - fi - if test "$lt_cv_ld_exported_symbols_list" = "yes"; then - _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' - else - _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' - fi - if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then - _lt_dsymutil='~$DSYMUTIL $lib || :' - else - _lt_dsymutil= - fi - ;; - esac - -for ac_header in dlfcn.h -do : - ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default -" -if test "x$ac_cv_header_dlfcn_h" = xyes; then : - cat >>confdefs.h <<_ACEOF -#define HAVE_DLFCN_H 1 -_ACEOF - -fi - -done - - - -func_stripname_cnf () -{ - case ${2} in - .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; - *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; - esac -} # func_stripname_cnf - - - - - - -# Set options - - - - enable_dlopen=no - - - enable_win32_dll=no - - - # Check whether --enable-shared was given. -if test "${enable_shared+set}" = set; then : - enableval=$enable_shared; p=${PACKAGE-default} - case $enableval in - yes) enable_shared=yes ;; - no) enable_shared=no ;; - *) - enable_shared=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_shared=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac -else - enable_shared=yes -fi - - - - - - - - - - # Check whether --enable-static was given. -if test "${enable_static+set}" = set; then : - enableval=$enable_static; p=${PACKAGE-default} - case $enableval in - yes) enable_static=yes ;; - no) enable_static=no ;; - *) - enable_static=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_static=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac -else - enable_static=yes -fi - - - - - - - - - - -# Check whether --with-pic was given. -if test "${with_pic+set}" = set; then : - withval=$with_pic; lt_p=${PACKAGE-default} - case $withval in - yes|no) pic_mode=$withval ;; - *) - pic_mode=default - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for lt_pkg in $withval; do - IFS="$lt_save_ifs" - if test "X$lt_pkg" = "X$lt_p"; then - pic_mode=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac -else - pic_mode=default -fi - - -test -z "$pic_mode" && pic_mode=default - - - - - - - - # Check whether --enable-fast-install was given. -if test "${enable_fast_install+set}" = set; then : - enableval=$enable_fast_install; p=${PACKAGE-default} - case $enableval in - yes) enable_fast_install=yes ;; - no) enable_fast_install=no ;; - *) - enable_fast_install=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_fast_install=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac -else - enable_fast_install=yes -fi - - - - - - - - - - - -# This can be used to rebuild libtool when needed -LIBTOOL_DEPS="$ltmain" - -# Always use our own libtool. -LIBTOOL='$(SHELL) $(top_builddir)/libtool' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -test -z "$LN_S" && LN_S="ln -s" - - - - - - - - - - - - - - -if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 -$as_echo_n "checking for objdir... " >&6; } -if ${lt_cv_objdir+:} false; then : - $as_echo_n "(cached) " >&6 -else - rm -f .libs 2>/dev/null -mkdir .libs 2>/dev/null -if test -d .libs; then - lt_cv_objdir=.libs -else - # MS-DOS does not allow filenames that begin with a dot. - lt_cv_objdir=_libs -fi -rmdir .libs 2>/dev/null -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 -$as_echo "$lt_cv_objdir" >&6; } -objdir=$lt_cv_objdir - - - - - -cat >>confdefs.h <<_ACEOF -#define LT_OBJDIR "$lt_cv_objdir/" -_ACEOF - - - - -case $host_os in -aix3*) - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. - if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi - ;; -esac - -# Global variables: -ofile=libtool -can_build_shared=yes - -# All known linkers require a `.a' archive for static linking (except MSVC, -# which needs '.lib'). -libext=a - -with_gnu_ld="$lt_cv_prog_gnu_ld" - -old_CC="$CC" -old_CFLAGS="$CFLAGS" - -# Set sane defaults for various variables -test -z "$CC" && CC=cc -test -z "$LTCC" && LTCC=$CC -test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS -test -z "$LD" && LD=ld -test -z "$ac_objext" && ac_objext=o - -for cc_temp in $compiler""; do - case $cc_temp in - compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; - distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; - \-*) ;; - *) break;; - esac -done -cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - - -# Only perform the check for file, if the check method requires it -test -z "$MAGIC_CMD" && MAGIC_CMD=file -case $deplibs_check_method in -file_magic*) - if test "$file_magic_cmd" = '$MAGIC_CMD'; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 -$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } -if ${lt_cv_path_MAGIC_CMD+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $MAGIC_CMD in -[\\/*] | ?:[\\/]*) - lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; -*) - lt_save_MAGIC_CMD="$MAGIC_CMD" - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" - for ac_dir in $ac_dummy; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f $ac_dir/${ac_tool_prefix}file; then - lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` - MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : - else - cat <<_LT_EOF 1>&2 - -*** Warning: the command libtool uses to detect shared libraries, -*** $file_magic_cmd, produces output that libtool cannot recognize. -*** The result is that libtool may fail to recognize shared libraries -*** as such. This will affect the creation of libtool libraries that -*** depend on shared libraries, but programs linked with such libtool -*** libraries will work regardless of this problem. Nevertheless, you -*** may want to report the problem to your system manager and/or to -*** bug-libtool@gnu.org - -_LT_EOF - fi ;; - esac - fi - break - fi - done - IFS="$lt_save_ifs" - MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; -esac -fi - -MAGIC_CMD="$lt_cv_path_MAGIC_CMD" -if test -n "$MAGIC_CMD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 -$as_echo "$MAGIC_CMD" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - - - -if test -z "$lt_cv_path_MAGIC_CMD"; then - if test -n "$ac_tool_prefix"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 -$as_echo_n "checking for file... " >&6; } -if ${lt_cv_path_MAGIC_CMD+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $MAGIC_CMD in -[\\/*] | ?:[\\/]*) - lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; -*) - lt_save_MAGIC_CMD="$MAGIC_CMD" - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" - for ac_dir in $ac_dummy; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f $ac_dir/file; then - lt_cv_path_MAGIC_CMD="$ac_dir/file" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` - MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : - else - cat <<_LT_EOF 1>&2 - -*** Warning: the command libtool uses to detect shared libraries, -*** $file_magic_cmd, produces output that libtool cannot recognize. -*** The result is that libtool may fail to recognize shared libraries -*** as such. This will affect the creation of libtool libraries that -*** depend on shared libraries, but programs linked with such libtool -*** libraries will work regardless of this problem. Nevertheless, you -*** may want to report the problem to your system manager and/or to -*** bug-libtool@gnu.org - -_LT_EOF - fi ;; - esac - fi - break - fi - done - IFS="$lt_save_ifs" - MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; -esac -fi - -MAGIC_CMD="$lt_cv_path_MAGIC_CMD" -if test -n "$MAGIC_CMD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 -$as_echo "$MAGIC_CMD" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - else - MAGIC_CMD=: - fi -fi - - fi - ;; -esac - -# Use C for the default configuration in the libtool script - -lt_save_CC="$CC" -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -# Source file extension for C test sources. -ac_ext=c - -# Object file extension for compiled C test sources. -objext=o -objext=$objext - -# Code to be used in simple compile tests -lt_simple_compile_test_code="int some_variable = 0;" - -# Code to be used in simple link tests -lt_simple_link_test_code='int main(){return(0);}' - - - - - - - -# If no C compiler was specified, use CC. -LTCC=${LTCC-"$CC"} - -# If no C compiler flags were specified, use CFLAGS. -LTCFLAGS=${LTCFLAGS-"$CFLAGS"} - -# Allow CC to be a program name with arguments. -compiler=$CC - -# Save the default compiler, since it gets overwritten when the other -# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. -compiler_DEFAULT=$CC - -# save warnings/boilerplate of simple test code -ac_outfile=conftest.$ac_objext -echo "$lt_simple_compile_test_code" >conftest.$ac_ext -eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_compiler_boilerplate=`cat conftest.err` -$RM conftest* - -ac_outfile=conftest.$ac_objext -echo "$lt_simple_link_test_code" >conftest.$ac_ext -eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_linker_boilerplate=`cat conftest.err` -$RM -r conftest* - - -## CAVEAT EMPTOR: -## There is no encapsulation within the following macros, do not change -## the running order or otherwise move them around unless you know exactly -## what you are doing... -if test -n "$compiler"; then - -lt_prog_compiler_no_builtin_flag= - -if test "$GCC" = yes; then - case $cc_basename in - nvcc*) - lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; - *) - lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;; - esac - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 -$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } -if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_rtti_exceptions=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - lt_compiler_flag="-fno-rtti -fno-exceptions" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - # The option is referenced via a variable to avoid confusing sed. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>conftest.err) - ac_status=$? - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s "$ac_outfile"; then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings other than the usual output. - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_rtti_exceptions=yes - fi - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 -$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } - -if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then - lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" -else - : -fi - -fi - - - - - - - lt_prog_compiler_wl= -lt_prog_compiler_pic= -lt_prog_compiler_static= - - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_static='-static' - - case $host_os in - aix*) - # All AIX code is PIC. - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static='-Bstatic' - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - lt_prog_compiler_pic='-fPIC' - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but - # adding the `-m68020' flag to GCC prevents building anything better, - # like `-m68040'. - lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' - ;; - esac - ;; - - beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) - # PIC is the default for these OSes. - ;; - - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - lt_prog_compiler_pic='-DDLL_EXPORT' - ;; - - darwin* | rhapsody*) - # PIC is the default on this platform - # Common symbols not allowed in MH_DYLIB files - lt_prog_compiler_pic='-fno-common' - ;; - - haiku*) - # PIC is the default for Haiku. - # The "-static" flag exists, but is broken. - lt_prog_compiler_static= - ;; - - hpux*) - # PIC is the default for 64-bit PA HP-UX, but not for 32-bit - # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag - # sets the default TLS model and affects inlining. - case $host_cpu in - hppa*64*) - # +Z the default - ;; - *) - lt_prog_compiler_pic='-fPIC' - ;; - esac - ;; - - interix[3-9]*) - # Interix 3.x gcc -fpic/-fPIC options generate broken code. - # Instead, we relocate shared libraries at runtime. - ;; - - msdosdjgpp*) - # Just because we use GCC doesn't mean we suddenly get shared libraries - # on systems that don't support them. - lt_prog_compiler_can_build_shared=no - enable_shared=no - ;; - - *nto* | *qnx*) - # QNX uses GNU C++, but need to define -shared option too, otherwise - # it will coredump. - lt_prog_compiler_pic='-fPIC -shared' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - lt_prog_compiler_pic=-Kconform_pic - fi - ;; - - *) - lt_prog_compiler_pic='-fPIC' - ;; - esac - - case $cc_basename in - nvcc*) # Cuda Compiler Driver 2.2 - lt_prog_compiler_wl='-Xlinker ' - if test -n "$lt_prog_compiler_pic"; then - lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic" - fi - ;; - esac - else - # PORTME Check for flag to pass linker flags through the system compiler. - case $host_os in - aix*) - lt_prog_compiler_wl='-Wl,' - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static='-Bstatic' - else - lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' - fi - ;; - - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - lt_prog_compiler_pic='-DDLL_EXPORT' - ;; - - hpux9* | hpux10* | hpux11*) - lt_prog_compiler_wl='-Wl,' - # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but - # not for PA HP-UX. - case $host_cpu in - hppa*64*|ia64*) - # +Z the default - ;; - *) - lt_prog_compiler_pic='+Z' - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? - lt_prog_compiler_static='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) - lt_prog_compiler_wl='-Wl,' - # PIC (with -KPIC) is the default. - lt_prog_compiler_static='-non_shared' - ;; - - linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in - # old Intel for x86_64 which still supported -KPIC. - ecc*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-static' - ;; - # icc used to be incompatible with GCC. - # ICC 10 doesn't accept -KPIC any more. - icc* | ifort*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-fPIC' - lt_prog_compiler_static='-static' - ;; - # Lahey Fortran 8.1. - lf95*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; - nagfor*) - # NAG Fortran compiler - lt_prog_compiler_wl='-Wl,-Wl,,' - lt_prog_compiler_pic='-PIC' - lt_prog_compiler_static='-Bstatic' - ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-fpic' - lt_prog_compiler_static='-Bstatic' - ;; - ccc*) - lt_prog_compiler_wl='-Wl,' - # All Alpha code is PIC. - lt_prog_compiler_static='-non_shared' - ;; - xl* | bgxl* | bgf* | mpixl*) - # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-qpic' - lt_prog_compiler_static='-qstaticlink' - ;; - *) - case `$CC -V 2>&1 | sed 5q` in - *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*) - # Sun Fortran 8.3 passes all unrecognized flags to the linker - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - lt_prog_compiler_wl='' - ;; - *Sun\ F* | *Sun*Fortran*) - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - lt_prog_compiler_wl='-Qoption ld ' - ;; - *Sun\ C*) - # Sun C 5.9 - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - lt_prog_compiler_wl='-Wl,' - ;; - *Intel*\ [CF]*Compiler*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-fPIC' - lt_prog_compiler_static='-static' - ;; - *Portland\ Group*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-fpic' - lt_prog_compiler_static='-Bstatic' - ;; - esac - ;; - esac - ;; - - newsos6) - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - ;; - - *nto* | *qnx*) - # QNX uses GNU C++, but need to define -shared option too, otherwise - # it will coredump. - lt_prog_compiler_pic='-fPIC -shared' - ;; - - osf3* | osf4* | osf5*) - lt_prog_compiler_wl='-Wl,' - # All OSF/1 code is PIC. - lt_prog_compiler_static='-non_shared' - ;; - - rdos*) - lt_prog_compiler_static='-non_shared' - ;; - - solaris*) - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in - f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; - esac - ;; - - sunos4*) - lt_prog_compiler_wl='-Qoption ld ' - lt_prog_compiler_pic='-PIC' - lt_prog_compiler_static='-Bstatic' - ;; - - sysv4 | sysv4.2uw2* | sysv4.3*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - ;; - - sysv4*MP*) - if test -d /usr/nec ;then - lt_prog_compiler_pic='-Kconform_pic' - lt_prog_compiler_static='-Bstatic' - fi - ;; - - sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - ;; - - unicos*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_can_build_shared=no - ;; - - uts4*) - lt_prog_compiler_pic='-pic' - lt_prog_compiler_static='-Bstatic' - ;; - - *) - lt_prog_compiler_can_build_shared=no - ;; - esac - fi - -case $host_os in - # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - lt_prog_compiler_pic= - ;; - *) - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; -esac - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -if ${lt_cv_prog_compiler_pic+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -$as_echo "$lt_cv_prog_compiler_pic" >&6; } -lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - -# -# Check to make sure the PIC flag actually works. -# -if test -n "$lt_prog_compiler_pic"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 -$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } -if ${lt_cv_prog_compiler_pic_works+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_pic_works=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - lt_compiler_flag="$lt_prog_compiler_pic -DPIC" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - # The option is referenced via a variable to avoid confusing sed. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>conftest.err) - ac_status=$? - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s "$ac_outfile"; then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings other than the usual output. - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_pic_works=yes - fi - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 -$as_echo "$lt_cv_prog_compiler_pic_works" >&6; } - -if test x"$lt_cv_prog_compiler_pic_works" = xyes; then - case $lt_prog_compiler_pic in - "" | " "*) ;; - *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; - esac -else - lt_prog_compiler_pic= - lt_prog_compiler_can_build_shared=no -fi - -fi - - - - - - - - - - - -# -# Check to make sure the static flag actually works. -# -wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 -$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } -if ${lt_cv_prog_compiler_static_works+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_static_works=no - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $lt_tmp_static_flag" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then - # The linker can only warn and ignore the option if not recognized - # So say no if there are warnings - if test -s conftest.err; then - # Append any errors to the config.log. - cat conftest.err 1>&5 - $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_static_works=yes - fi - else - lt_cv_prog_compiler_static_works=yes - fi - fi - $RM -r conftest* - LDFLAGS="$save_LDFLAGS" - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 -$as_echo "$lt_cv_prog_compiler_static_works" >&6; } - -if test x"$lt_cv_prog_compiler_static_works" = xyes; then - : -else - lt_prog_compiler_static= -fi - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 -$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } -if ${lt_cv_prog_compiler_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_c_o=no - $RM -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - lt_cv_prog_compiler_c_o=yes - fi - fi - chmod u+w . 2>&5 - $RM conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files - $RM out/* && rmdir out - cd .. - $RM -r conftest - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 -$as_echo "$lt_cv_prog_compiler_c_o" >&6; } - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 -$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } -if ${lt_cv_prog_compiler_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_c_o=no - $RM -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - lt_cv_prog_compiler_c_o=yes - fi - fi - chmod u+w . 2>&5 - $RM conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files - $RM out/* && rmdir out - cd .. - $RM -r conftest - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 -$as_echo "$lt_cv_prog_compiler_c_o" >&6; } - - - - -hard_links="nottested" -if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 -$as_echo_n "checking if we can lock with hard links... " >&6; } - hard_links=yes - $RM conftest* - ln conftest.a conftest.b 2>/dev/null && hard_links=no - touch conftest.a - ln conftest.a conftest.b 2>&5 || hard_links=no - ln conftest.a conftest.b 2>/dev/null && hard_links=no - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 -$as_echo "$hard_links" >&6; } - if test "$hard_links" = no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} - need_locks=warn - fi -else - need_locks=no -fi - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 -$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } - - runpath_var= - allow_undefined_flag= - always_export_symbols=no - archive_cmds= - archive_expsym_cmds= - compiler_needs_object=no - enable_shared_with_static_runtimes=no - export_dynamic_flag_spec= - export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - hardcode_automatic=no - hardcode_direct=no - hardcode_direct_absolute=no - hardcode_libdir_flag_spec= - hardcode_libdir_separator= - hardcode_minus_L=no - hardcode_shlibpath_var=unsupported - inherit_rpath=no - link_all_deplibs=unknown - module_cmds= - module_expsym_cmds= - old_archive_from_new_cmds= - old_archive_from_expsyms_cmds= - thread_safe_flag_spec= - whole_archive_flag_spec= - # include_expsyms should be a list of space-separated symbols to be *always* - # included in the symbol list - include_expsyms= - # exclude_expsyms can be an extended regexp of symbols to exclude - # it will be wrapped by ` (' and `)$', so one must not match beginning or - # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', - # as well as any symbol that contains `d'. - exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if - # the symbol is explicitly referenced. Since portable code cannot - # rely on this symbol name, it's probably fine to never include it in - # preloaded symbol tables. - # Exclude shared library initialization/finalization symbols. - extract_expsyms_cmds= - - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; - interix*) - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; - openbsd*) - with_gnu_ld=no - ;; - esac - - ld_shlibs=yes - - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no - if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility - # with the native linker. However, as the warning in the GNU ld - # block says, versions before 2.19.5* couldn't really create working - # shared libraries, regardless of the interface used. - case `$LD -v 2>&1` in - *\ \(GNU\ Binutils\)\ 2.19.5*) ;; - *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; - *\ \(GNU\ Binutils\)\ [3-9]*) ;; - *) - lt_use_gnu_ld_interface=yes - ;; - esac - ;; - *) - lt_use_gnu_ld_interface=yes - ;; - esac - fi - - if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty - wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - export_dynamic_flag_spec='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then - whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - whole_archive_flag_spec= - fi - supports_anon_versioning=no - case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... - *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... - *\ 2.11.*) ;; # other 2.11 versions - *) supports_anon_versioning=yes ;; - esac - - # See if GNU ld supports shared libraries. - case $host_os in - aix[3-9]*) - # On AIX/PPC, the GNU linker is very broken - if test "$host_cpu" != ia64; then - ld_shlibs=no - cat <<_LT_EOF 1>&2 - -*** Warning: the GNU linker, at least up to release 2.19, is reported -*** to be unable to reliably create shared libraries on AIX. -*** Therefore, libtool is disabling shared libraries support. If you -*** really care for shared libraries, you may want to install binutils -*** 2.20 or above, or modify your PATH so that a non-GNU linker is found. -*** You will then need to restart the configuration process. - -_LT_EOF - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='' - ;; - m68k) - archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - hardcode_libdir_flag_spec='-L$libdir' - hardcode_minus_L=yes - ;; - esac - ;; - - beos*) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - allow_undefined_flag=unsupported - # Joseph Beckenbach says some releases of gcc - # support --undefined. This deserves some investigation. FIXME - archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - ld_shlibs=no - fi - ;; - - cygwin* | mingw* | pw32* | cegcc*) - # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec='-L$libdir' - export_dynamic_flag_spec='${wl}--export-all-symbols' - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes - export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' - exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - # If the export-symbols file already is a .def file (1st line - # is EXPORTS), use it as is; otherwise, prepend... - archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - cp $export_symbols $output_objdir/$soname.def; - else - echo EXPORTS > $output_objdir/$soname.def; - cat $export_symbols >> $output_objdir/$soname.def; - fi~ - $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs=no - fi - ;; - - haiku*) - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - link_all_deplibs=yes - ;; - - interix[3-9]*) - hardcode_direct=no - hardcode_shlibpath_var=no - hardcode_libdir_flag_spec='${wl}-rpath,$libdir' - export_dynamic_flag_spec='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no - if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ - && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler - whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers - whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; - efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 - tmp_addflag=' -i_dynamic -nofor_main' ;; - ifc* | ifort*) # Intel Fortran compiler - tmp_addflag=' -nofor_main' ;; - lf95*) # Lahey Fortran 8.1 - whole_archive_flag_spec= - tmp_sharedflag='--shared' ;; - xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 - whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 - whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac - archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ - $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ - $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac - else - ld_shlibs=no - fi - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - - solaris*) - if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then - ld_shlibs=no - cat <<_LT_EOF 1>&2 - -*** Warning: The releases 2.8.* of the GNU linker cannot reliably -*** create shared libraries on Solaris systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.9.1 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) - case `$LD -v 2>&1` in - *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) - ld_shlibs=no - cat <<_LT_EOF 1>&2 - -*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not -*** reliably create shared libraries on SCO systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.16.91.0.3 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - ;; - *) - # For security reasons, it is highly recommended that you always - # use absolute paths for naming shared libraries, and exclude the - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi - ;; - esac - ;; - - sunos4*) - archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' - wlarc= - hardcode_direct=yes - hardcode_shlibpath_var=no - ;; - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi - ;; - esac - - if test "$ld_shlibs" = no; then - runpath_var= - hardcode_libdir_flag_spec= - export_dynamic_flag_spec= - whole_archive_flag_spec= - fi - else - # PORTME fill in a description of your system's linker (not GNU ld) - case $host_os in - aix3*) - allow_undefined_flag=unsupported - always_export_symbols=yes - archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - hardcode_minus_L=yes - if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - hardcode_direct=unsupported - fi - ;; - - aix[4-9]*) - if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' - no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. - # -C means demangle to AIX nm, but means don't demangle with GNU nm - # Also, AIX nm treats weak defined symbols like other global - # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then - export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else - export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we - # need to do runtime linking. - case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) - for ld_flag in $LDFLAGS; do - if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done - ;; - esac - - exp_sym_flag='-bexport' - no_entry_flag='-bnoentry' - fi - - # When large executables or shared objects are built, AIX ld can - # have problems creating the table of contents. If linking a library - # or program results in "error TOC overflow" add -mminimal-toc to - # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not - # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. - - archive_cmds='' - hardcode_direct=yes - hardcode_direct_absolute=yes - hardcode_libdir_separator=':' - link_all_deplibs=yes - file_list_spec='${wl}-f,' - - if test "$GCC" = yes; then - case $host_os in aix4.[012]|aix4.[012].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ - collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then - # We have reworked collect2 - : - else - # We have old collect2 - hardcode_direct=unsupported - # It fails to find uninstalled libraries when the uninstalled - # path is not listed in the libpath. Setting hardcode_minus_L - # to unsupported forces relinking - hardcode_minus_L=yes - hardcode_libdir_flag_spec='-L$libdir' - hardcode_libdir_separator= - fi - ;; - esac - shared_flag='-shared' - if test "$aix_use_runtimelinking" = yes; then - shared_flag="$shared_flag "'${wl}-G' - fi - else - # not using gcc - if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else - if test "$aix_use_runtimelinking" = yes; then - shared_flag='${wl}-G' - else - shared_flag='${wl}-bM:SRE' - fi - fi - fi - - export_dynamic_flag_spec='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - always_export_symbols=yes - if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. - if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath -else - if ${lt_cv_aix_libpath_+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - - lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\([^ ]*\) *$/\1/ - p - } - }' - lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - # Check for a 64-bit object if we didn't find anything. - if test -z "$lt_cv_aix_libpath_"; then - lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath_"; then - lt_cv_aix_libpath_="/usr/lib:/lib" - fi - -fi - - aix_libpath=$lt_cv_aix_libpath_ -fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else - if test "$host_cpu" = ia64; then - hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' - allow_undefined_flag="-z nodefs" - archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. - if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath -else - if ${lt_cv_aix_libpath_+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - - lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\([^ ]*\) *$/\1/ - p - } - }' - lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - # Check for a 64-bit object if we didn't find anything. - if test -z "$lt_cv_aix_libpath_"; then - lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath_"; then - lt_cv_aix_libpath_="/usr/lib:/lib" - fi - -fi - - aix_libpath=$lt_cv_aix_libpath_ -fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. - no_undefined_flag=' ${wl}-bernotok' - allow_undefined_flag=' ${wl}-berok' - if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. - whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - whole_archive_flag_spec='$convenience' - fi - archive_cmds_need_lc=yes - # This is similar to how AIX traditionally builds its shared libraries. - archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='' - ;; - m68k) - archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - hardcode_libdir_flag_spec='-L$libdir' - hardcode_minus_L=yes - ;; - esac - ;; - - bsdi[45]*) - export_dynamic_flag_spec=-rdynamic - ;; - - cygwin* | mingw* | pw32* | cegcc*) - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. - case $cc_basename in - cl*) - # Native MSVC - hardcode_libdir_flag_spec=' ' - allow_undefined_flag=unsupported - always_export_symbols=yes - file_list_spec='@' - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' - archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; - else - sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; - fi~ - $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ - linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, )='true' - enable_shared_with_static_runtimes=yes - exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' - export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' - # Don't use ranlib - old_postinstall_cmds='chmod 644 $oldlib' - postlink_cmds='lt_outputfile="@OUTPUT@"~ - lt_tool_outputfile="@TOOL_OUTPUT@"~ - case $lt_outputfile in - *.exe|*.EXE) ;; - *) - lt_outputfile="$lt_outputfile.exe" - lt_tool_outputfile="$lt_tool_outputfile.exe" - ;; - esac~ - if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then - $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; - $RM "$lt_outputfile.manifest"; - fi' - ;; - *) - # Assume MSVC wrapper - hardcode_libdir_flag_spec=' ' - allow_undefined_flag=unsupported - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. - old_archive_from_new_cmds='true' - # FIXME: Should let the user specify the lib program. - old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' - enable_shared_with_static_runtimes=yes - ;; - esac - ;; - - darwin* | rhapsody*) - - - archive_cmds_need_lc=no - hardcode_direct=no - hardcode_automatic=yes - hardcode_shlibpath_var=unsupported - if test "$lt_cv_ld_force_load" = "yes"; then - whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - - else - whole_archive_flag_spec='' - fi - link_all_deplibs=yes - allow_undefined_flag="$_lt_dar_allow_undefined" - case $cc_basename in - ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac - if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all - archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" - module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" - archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" - module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - - else - ld_shlibs=no - fi - - ;; - - dgux*) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec='-L$libdir' - hardcode_shlibpath_var=no - ;; - - # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor - # support. Future versions do this automatically, but an explicit c++rt0.o - # does not break anything, and helps significantly (at the cost of a little - # extra space). - freebsd2.2*) - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no - ;; - - # Unfortunately, older versions of FreeBSD 2 do not have this feature. - freebsd2.*) - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=yes - hardcode_minus_L=yes - hardcode_shlibpath_var=no - ;; - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) - archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no - ;; - - hpux9*) - if test "$GCC" = yes; then - archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi - hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_direct=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes - export_dynamic_flag_spec='${wl}-E' - ;; - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then - archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi - if test "$with_gnu_ld" = no; then - hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_direct=yes - hardcode_direct_absolute=yes - export_dynamic_flag_spec='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes - fi - ;; - - hpux11*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) - archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - - # Older versions of the 11.00 compiler do not understand -b yet - # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5 -$as_echo_n "checking if $CC understands -b... " >&6; } -if ${lt_cv_prog_compiler__b+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler__b=no - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -b" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then - # The linker can only warn and ignore the option if not recognized - # So say no if there are warnings - if test -s conftest.err; then - # Append any errors to the config.log. - cat conftest.err 1>&5 - $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler__b=yes - fi - else - lt_cv_prog_compiler__b=yes - fi - fi - $RM -r conftest* - LDFLAGS="$save_LDFLAGS" - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 -$as_echo "$lt_cv_prog_compiler__b" >&6; } - -if test x"$lt_cv_prog_compiler__b" = xyes; then - archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' -fi - - ;; - esac - fi - if test "$with_gnu_ld" = no; then - hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - - case $host_cpu in - hppa*64*|ia64*) - hardcode_direct=no - hardcode_shlibpath_var=no - ;; - *) - hardcode_direct=yes - hardcode_direct_absolute=yes - export_dynamic_flag_spec='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes - ;; - esac - fi - ;; - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. - # This should be the same for all languages, so no per-tag cache variable. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -if ${lt_cv_irix_exported_symbol+:} false; then : - $as_echo_n "(cached) " >&6 -else - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -int foo (void) { return 0; } -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - lt_cv_irix_exported_symbol=yes -else - lt_cv_irix_exported_symbol=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS="$save_LDFLAGS" -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -$as_echo "$lt_cv_irix_exported_symbol" >&6; } - if test "$lt_cv_irix_exported_symbol" = yes; then - archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - archive_cmds_need_lc='no' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - inherit_rpath=yes - link_all_deplibs=yes - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out - else - archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF - fi - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no - ;; - - newsos6) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=yes - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_shlibpath_var=no - ;; - - *nto* | *qnx*) - ;; - - openbsd*) - if test -f /usr/libexec/ld.so; then - hardcode_direct=yes - hardcode_shlibpath_var=no - hardcode_direct_absolute=yes - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' - hardcode_libdir_flag_spec='${wl}-rpath,$libdir' - export_dynamic_flag_spec='${wl}-E' - else - case $host_os in - openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec='-R$libdir' - ;; - *) - archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='${wl}-rpath,$libdir' - ;; - esac - fi - else - ld_shlibs=no - fi - ;; - - os2*) - hardcode_libdir_flag_spec='-L$libdir' - hardcode_minus_L=yes - allow_undefined_flag=unsupported - archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' - old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - allow_undefined_flag=' -expect_unresolved \*' - archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - archive_cmds_need_lc='no' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' - archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ - $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - hardcode_libdir_flag_spec='-rpath $libdir' - fi - archive_cmds_need_lc='no' - hardcode_libdir_separator=: - ;; - - solaris*) - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' - archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' - archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) - wlarc='${wl}' - archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi - hardcode_libdir_flag_spec='-R$libdir' - hardcode_shlibpath_var=no - case $host_os in - solaris2.[0-5] | solaris2.[0-5].*) ;; - *) - # The compiler driver will combine and reorder linker options, - # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) - if test "$GCC" = yes; then - whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - whole_archive_flag_spec='-z allextract$convenience -z defaultextract' - fi - ;; - esac - link_all_deplibs=yes - ;; - - sunos4*) - if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. - archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi - hardcode_libdir_flag_spec='-L$libdir' - hardcode_direct=yes - hardcode_minus_L=yes - hardcode_shlibpath_var=no - ;; - - sysv4) - case $host_vendor in - sni) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=yes # is this really true??? - ;; - siemens) - ## LD is ld it makes a PLAMLIB - ## CC just makes a GrossModule. - archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' - reload_cmds='$CC -r -o $output$reload_objs' - hardcode_direct=no - ;; - motorola) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=no #Motorola manual says yes, but my tests say they lie - ;; - esac - runpath_var='LD_RUN_PATH' - hardcode_shlibpath_var=no - ;; - - sysv4.3*) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_shlibpath_var=no - export_dynamic_flag_spec='-Bexport' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_shlibpath_var=no - runpath_var=LD_RUN_PATH - hardcode_runpath_var=yes - ld_shlibs=yes - fi - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) - no_undefined_flag='${wl}-z,text' - archive_cmds_need_lc=no - hardcode_shlibpath_var=no - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) - # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. - no_undefined_flag='${wl}-z,text' - allow_undefined_flag='${wl}-z,nodefs' - archive_cmds_need_lc=no - hardcode_shlibpath_var=no - hardcode_libdir_flag_spec='${wl}-R,$libdir' - hardcode_libdir_separator=':' - link_all_deplibs=yes - export_dynamic_flag_spec='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - uts4*) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec='-L$libdir' - hardcode_shlibpath_var=no - ;; - - *) - ld_shlibs=no - ;; - esac - - if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) - export_dynamic_flag_spec='${wl}-Blargedynsym' - ;; - esac - fi - fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 -$as_echo "$ld_shlibs" >&6; } -test "$ld_shlibs" = no && can_build_shared=no - -with_gnu_ld=$with_gnu_ld - - - - - - - - - - - - - - - -# -# Do we need to explicitly link libc? -# -case "x$archive_cmds_need_lc" in -x|xyes) - # Assume -lc should be added - archive_cmds_need_lc=yes - - if test "$enable_shared" = yes && test "$GCC" = yes; then - case $archive_cmds in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. - ;; - '$CC '*) - # Test whether the compiler implicitly links with -lc since on some - # systems, -lgcc has to come before -lc. If gcc already passes -lc - # to ld, don't add -lc before -lgcc. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 -$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } -if ${lt_cv_archive_cmds_need_lc+:} false; then : - $as_echo_n "(cached) " >&6 -else - $RM conftest* - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } 2>conftest.err; then - soname=conftest - lib=conftest - libobjs=conftest.$ac_objext - deplibs= - wl=$lt_prog_compiler_wl - pic_flag=$lt_prog_compiler_pic - compiler_flags=-v - linker_flags=-v - verstring= - output_objdir=. - libname=conftest - lt_save_allow_undefined_flag=$allow_undefined_flag - allow_undefined_flag= - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 - (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - then - lt_cv_archive_cmds_need_lc=no - else - lt_cv_archive_cmds_need_lc=yes - fi - allow_undefined_flag=$lt_save_allow_undefined_flag - else - cat conftest.err 1>&5 - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5 -$as_echo "$lt_cv_archive_cmds_need_lc" >&6; } - archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc - ;; - esac - fi - ;; -esac - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 -$as_echo_n "checking dynamic linker characteristics... " >&6; } - -if test "$GCC" = yes; then - case $host_os in - darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; - *) lt_awk_arg="/^libraries:/" ;; - esac - case $host_os in - mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;; - *) lt_sed_strip_eq="s,=/,/,g" ;; - esac - lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` - case $lt_search_path_spec in - *\;*) - # if the path contains ";" then we assume it to be the separator - # otherwise default to the standard path separator (i.e. ":") - it is - # assumed that no part of a normal pathname contains ";" but that should - # okay in the real world where ";" in dirpaths is itself problematic. - lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` - ;; - *) - lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` - ;; - esac - # Ok, now we have the path, separated by spaces, we can step through it - # and add multilib dir if necessary. - lt_tmp_lt_search_path_spec= - lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` - for lt_sys_path in $lt_search_path_spec; do - if test -d "$lt_sys_path/$lt_multi_os_dir"; then - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" - else - test -d "$lt_sys_path" && \ - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" - fi - done - lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' -BEGIN {RS=" "; FS="/|\n";} { - lt_foo=""; - lt_count=0; - for (lt_i = NF; lt_i > 0; lt_i--) { - if ($lt_i != "" && $lt_i != ".") { - if ($lt_i == "..") { - lt_count++; - } else { - if (lt_count == 0) { - lt_foo="/" $lt_i lt_foo; - } else { - lt_count--; - } - } - } - } - if (lt_foo != "") { lt_freq[lt_foo]++; } - if (lt_freq[lt_foo] == 1) { print lt_foo; } -}'` - # AWK program above erroneously prepends '/' to C:/dos/paths - # for these hosts. - case $host_os in - mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ - $SED 's,/\([A-Za-z]:\),\1,g'` ;; - esac - sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` -else - sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -fi -library_names_spec= -libname_spec='lib$name' -soname_spec= -shrext_cmds=".so" -postinstall_cmds= -postuninstall_cmds= -finish_cmds= -finish_eval= -shlibpath_var= -shlibpath_overrides_runpath=unknown -version_type=none -dynamic_linker="$host_os ld.so" -sys_lib_dlsearch_path_spec="/lib /usr/lib" -need_lib_prefix=unknown -hardcode_into_libs=no - -# when you set need_version to no, make sure it does not cause -set_version -# flags to be left without arguments -need_version=unknown - -case $host_os in -aix3*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. - soname_spec='${libname}${release}${shared_ext}$major' - ;; - -aix[4-9]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes - if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 - library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with - # the line `#! .'. This would cause the generated library to - # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[01] | aix4.[01].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' - echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac - # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. - if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib.so - # instead of lib.a to let people know that these are not - # typical AIX shared libraries. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. - library_names_spec='${libname}${release}.a $libname.a' - soname_spec='${libname}${release}${shared_ext}$major' - fi - shlibpath_var=LIBPATH - fi - ;; - -amigaos*) - case $host_cpu in - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. - finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - -beos*) - library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; - -bsdi[45]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" - sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" - # the default ld.so.conf also contains /usr/contrib/lib and - # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow - # libtool to hard-code these into programs - ;; - -cygwin* | mingw* | pw32* | cegcc*) - version_type=windows - shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - - case $GCC,$cc_basename in - yes,*) - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ - chmod a+x \$dldir/$dlname~ - if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then - eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; - fi' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $RM \$dlpath' - shlibpath_overrides_runpath=yes - - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' - soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix - soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' - ;; - - *,cl*) - # Native MSVC - libname_spec='$name' - soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) - sys_lib_search_path_spec= - lt_save_ifs=$IFS - IFS=';' - for lt_path in $LIB - do - IFS=$lt_save_ifs - # Let DOS variable expansion print the short 8.3 style file name. - lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` - sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" - done - IFS=$lt_save_ifs - # Convert to MSYS style. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` - ;; - cygwin*) - # Convert to unix form, then to dos form, then back to unix form - # but this time dos style (no spaces!) so that the unix form looks - # like /cygdrive/c/PROGRA~1:/cygdr... - sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` - sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) - sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` - else - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - fi - # FIXME: find the short name or the path components, as spaces are - # common. (e.g. "Program Files" -> "PROGRA~1") - ;; - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $RM \$dlpath' - shlibpath_overrides_runpath=yes - dynamic_linker='Win32 link.exe' - ;; - - *) - # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; - -darwin* | rhapsody*) - dynamic_linker="$host_os dyld" - version_type=darwin - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' - soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' - - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" - sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' - ;; - -dgux*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -freebsd* | dragonfly*) - # DragonFly does not have aout. When/if they implement a new - # versioning mechanism, adjust this. - if test -x /usr/bin/objformat; then - objformat=`/usr/bin/objformat` - else - case $host_os in - freebsd[23].*) objformat=aout ;; - *) objformat=elf ;; - esac - fi - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac - shlibpath_var=LD_LIBRARY_PATH - case $host_os in - freebsd2.*) - shlibpath_overrides_runpath=yes - ;; - freebsd3.[01]* | freebsdelf3.[01]*) - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ - freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - *) # from 4.6 on, and DragonFly - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - esac - ;; - -gnu*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -hpux9* | hpux10* | hpux11*) - # Give a soname corresponding to the major version so that dld.sl refuses to - # link against other versions. - version_type=sunos - need_lib_prefix=no - need_version=no - case $host_cpu in - ia64*) - shrext_cmds='.so' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" - fi - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - *) - shrext_cmds='.sl' - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... - postinstall_cmds='chmod 555 $lib' - # or fails outright, so override atomically: - install_override_mode=555 - ;; - -interix[3-9]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -irix5* | irix6* | nonstopux*) - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) - if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix - fi ;; - esac - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= - ;; - *) - case $LD in # libtool.m4 will add one of these switches to LD - *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") - libsuff= shlibsuff= libmagic=32-bit;; - *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") - libsuff=32 shlibsuff=N32 libmagic=N32;; - *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") - libsuff=64 shlibsuff=64 libmagic=64-bit;; - *) libsuff= shlibsuff= libmagic=never-match;; - esac - ;; - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no - sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" - sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -# No shared lib support for Linux oldld, aout, or coff. -linux*oldld* | linux*aout* | linux*coff*) - dynamic_linker=no - ;; - -# This must be glibc/ELF. -linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - - # Some binutils ld are patched to set DT_RUNPATH - if ${lt_cv_shlibpath_overrides_runpath+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_shlibpath_overrides_runpath=no - save_LDFLAGS=$LDFLAGS - save_libdir=$libdir - eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ - LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : - lt_cv_shlibpath_overrides_runpath=yes -fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS=$save_LDFLAGS - libdir=$save_libdir - -fi - - shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath - - # This implies no fast_install, which is unacceptable. - # Some rework will be needed to allow for fast_install - # before this can be enabled. - hardcode_into_libs=yes - - # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` - sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on - # powerpc, because MkLinux only supported shared libraries with the - # GNU dynamic linker. Since this was broken with cross compilers, - # most powerpc-linux boxes support dynamic linking these days and - # people can always --disable-shared, the test was removed, and we - # assume the GNU/Linux dynamic linker is in use. - dynamic_linker='GNU/Linux ld.so' - ;; - -netbsd*) - version_type=sunos - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - -newsos6) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; - -*nto* | *qnx*) - version_type=qnx - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - -openbsd*) - version_type=sunos - sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no - # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. - case $host_os in - openbsd3.3 | openbsd3.3.*) need_version=yes ;; - *) need_version=no ;; - esac - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - case $host_os in - openbsd2.[89] | openbsd2.[89].*) - shlibpath_overrides_runpath=no - ;; - *) - shlibpath_overrides_runpath=yes - ;; - esac - else - shlibpath_overrides_runpath=yes - fi - ;; - -os2*) - libname_spec='$name' - shrext_cmds=".dll" - need_lib_prefix=no - library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' - shlibpath_var=LIBPATH - ;; - -osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" - sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - -rdos*) - dynamic_linker=no - ;; - -solaris*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - # ldd complains unless libraries are executable - postinstall_cmds='chmod +x $lib' - ;; - -sunos4*) - version_type=sunos - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes - ;; - -sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) - shlibpath_overrides_runpath=no - need_lib_prefix=no - runpath_var=LD_RUN_PATH - ;; - siemens) - need_lib_prefix=no - ;; - motorola) - need_lib_prefix=no - need_version=no - shlibpath_overrides_runpath=no - sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' - ;; - esac - ;; - -sysv4*MP*) - if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' - soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - -sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) - version_type=freebsd-elf - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' - case $host_os in - sco3.2v5*) - sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" - ;; - esac - fi - sys_lib_dlsearch_path_spec='/usr/lib' - ;; - -tpf*) - # TPF is a cross-target only. Preferred cross-host = GNU/Linux. - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -uts4*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -*) - dynamic_linker=no - ;; -esac -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 -$as_echo "$dynamic_linker" >&6; } -test "$dynamic_linker" = no && can_build_shared=no - -variables_saved_for_relink="PATH $shlibpath_var $runpath_var" -if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" -fi - -if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then - sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" -fi -if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then - sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 -$as_echo_n "checking how to hardcode library paths into programs... " >&6; } -hardcode_action= -if test -n "$hardcode_libdir_flag_spec" || - test -n "$runpath_var" || - test "X$hardcode_automatic" = "Xyes" ; then - - # We can hardcode non-existent directories. - if test "$hardcode_direct" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one - ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && - test "$hardcode_minus_L" != no; then - # Linking always hardcodes the temporary library directory. - hardcode_action=relink - else - # We can link without hardcoding, and we can hardcode nonexisting dirs. - hardcode_action=immediate - fi -else - # We cannot hardcode anything, or else we can only hardcode existing - # directories. - hardcode_action=unsupported -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 -$as_echo "$hardcode_action" >&6; } - -if test "$hardcode_action" = relink || - test "$inherit_rpath" = yes; then - # Fast installation is not supported - enable_fast_install=no -elif test "$shlibpath_overrides_runpath" = yes || - test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless -fi - - - - - - - if test "x$enable_dlopen" != xyes; then - enable_dlopen=unknown - enable_dlopen_self=unknown - enable_dlopen_self_static=unknown -else - lt_cv_dlopen=no - lt_cv_dlopen_libs= - - case $host_os in - beos*) - lt_cv_dlopen="load_add_on" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ;; - - mingw* | pw32* | cegcc*) - lt_cv_dlopen="LoadLibrary" - lt_cv_dlopen_libs= - ;; - - cygwin*) - lt_cv_dlopen="dlopen" - lt_cv_dlopen_libs= - ;; - - darwin*) - # if libdl is installed we need to link against it - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 -$as_echo_n "checking for dlopen in -ldl... " >&6; } -if ${ac_cv_lib_dl_dlopen+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-ldl $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dlopen (); -int -main () -{ -return dlopen (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_dl_dlopen=yes -else - ac_cv_lib_dl_dlopen=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 -$as_echo "$ac_cv_lib_dl_dlopen" >&6; } -if test "x$ac_cv_lib_dl_dlopen" = xyes; then : - lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" -else - - lt_cv_dlopen="dyld" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - -fi - - ;; - - *) - ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" -if test "x$ac_cv_func_shl_load" = xyes; then : - lt_cv_dlopen="shl_load" -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 -$as_echo_n "checking for shl_load in -ldld... " >&6; } -if ${ac_cv_lib_dld_shl_load+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-ldld $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char shl_load (); -int -main () -{ -return shl_load (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_dld_shl_load=yes -else - ac_cv_lib_dld_shl_load=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 -$as_echo "$ac_cv_lib_dld_shl_load" >&6; } -if test "x$ac_cv_lib_dld_shl_load" = xyes; then : - lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" -else - ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" -if test "x$ac_cv_func_dlopen" = xyes; then : - lt_cv_dlopen="dlopen" -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 -$as_echo_n "checking for dlopen in -ldl... " >&6; } -if ${ac_cv_lib_dl_dlopen+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-ldl $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dlopen (); -int -main () -{ -return dlopen (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_dl_dlopen=yes -else - ac_cv_lib_dl_dlopen=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 -$as_echo "$ac_cv_lib_dl_dlopen" >&6; } -if test "x$ac_cv_lib_dl_dlopen" = xyes; then : - lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 -$as_echo_n "checking for dlopen in -lsvld... " >&6; } -if ${ac_cv_lib_svld_dlopen+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-lsvld $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dlopen (); -int -main () -{ -return dlopen (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_svld_dlopen=yes -else - ac_cv_lib_svld_dlopen=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 -$as_echo "$ac_cv_lib_svld_dlopen" >&6; } -if test "x$ac_cv_lib_svld_dlopen" = xyes; then : - lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 -$as_echo_n "checking for dld_link in -ldld... " >&6; } -if ${ac_cv_lib_dld_dld_link+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-ldld $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dld_link (); -int -main () -{ -return dld_link (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_dld_dld_link=yes -else - ac_cv_lib_dld_dld_link=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 -$as_echo "$ac_cv_lib_dld_dld_link" >&6; } -if test "x$ac_cv_lib_dld_dld_link" = xyes; then : - lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" -fi - - -fi - - -fi - - -fi - - -fi - - -fi - - ;; - esac - - if test "x$lt_cv_dlopen" != xno; then - enable_dlopen=yes - else - enable_dlopen=no - fi - - case $lt_cv_dlopen in - dlopen) - save_CPPFLAGS="$CPPFLAGS" - test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" - - save_LDFLAGS="$LDFLAGS" - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" - - save_LIBS="$LIBS" - LIBS="$lt_cv_dlopen_libs $LIBS" - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 -$as_echo_n "checking whether a program can dlopen itself... " >&6; } -if ${lt_cv_dlopen_self+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test "$cross_compiling" = yes; then : - lt_cv_dlopen_self=cross -else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 - lt_status=$lt_dlunknown - cat > conftest.$ac_ext <<_LT_EOF -#line $LINENO "configure" -#include "confdefs.h" - -#if HAVE_DLFCN_H -#include -#endif - -#include - -#ifdef RTLD_GLOBAL -# define LT_DLGLOBAL RTLD_GLOBAL -#else -# ifdef DL_GLOBAL -# define LT_DLGLOBAL DL_GLOBAL -# else -# define LT_DLGLOBAL 0 -# endif -#endif - -/* We may have to define LT_DLLAZY_OR_NOW in the command line if we - find out it does not work in some platform. */ -#ifndef LT_DLLAZY_OR_NOW -# ifdef RTLD_LAZY -# define LT_DLLAZY_OR_NOW RTLD_LAZY -# else -# ifdef DL_LAZY -# define LT_DLLAZY_OR_NOW DL_LAZY -# else -# ifdef RTLD_NOW -# define LT_DLLAZY_OR_NOW RTLD_NOW -# else -# ifdef DL_NOW -# define LT_DLLAZY_OR_NOW DL_NOW -# else -# define LT_DLLAZY_OR_NOW 0 -# endif -# endif -# endif -# endif -#endif - -/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ -#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -int fnord () __attribute__((visibility("default"))); -#endif - -int fnord () { return 42; } -int main () -{ - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); - int status = $lt_dlunknown; - - if (self) - { - if (dlsym (self,"fnord")) status = $lt_dlno_uscore; - else - { - if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; - else puts (dlerror ()); - } - /* dlclose (self); */ - } - else - puts (dlerror ()); - - return status; -} -_LT_EOF - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&5 2>/dev/null - lt_status=$? - case x$lt_status in - x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; - x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; - x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; - esac - else : - # compilation failed - lt_cv_dlopen_self=no - fi -fi -rm -fr conftest* - - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 -$as_echo "$lt_cv_dlopen_self" >&6; } - - if test "x$lt_cv_dlopen_self" = xyes; then - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 -$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } -if ${lt_cv_dlopen_self_static+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test "$cross_compiling" = yes; then : - lt_cv_dlopen_self_static=cross -else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 - lt_status=$lt_dlunknown - cat > conftest.$ac_ext <<_LT_EOF -#line $LINENO "configure" -#include "confdefs.h" - -#if HAVE_DLFCN_H -#include -#endif - -#include - -#ifdef RTLD_GLOBAL -# define LT_DLGLOBAL RTLD_GLOBAL -#else -# ifdef DL_GLOBAL -# define LT_DLGLOBAL DL_GLOBAL -# else -# define LT_DLGLOBAL 0 -# endif -#endif - -/* We may have to define LT_DLLAZY_OR_NOW in the command line if we - find out it does not work in some platform. */ -#ifndef LT_DLLAZY_OR_NOW -# ifdef RTLD_LAZY -# define LT_DLLAZY_OR_NOW RTLD_LAZY -# else -# ifdef DL_LAZY -# define LT_DLLAZY_OR_NOW DL_LAZY -# else -# ifdef RTLD_NOW -# define LT_DLLAZY_OR_NOW RTLD_NOW -# else -# ifdef DL_NOW -# define LT_DLLAZY_OR_NOW DL_NOW -# else -# define LT_DLLAZY_OR_NOW 0 -# endif -# endif -# endif -# endif -#endif - -/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ -#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -int fnord () __attribute__((visibility("default"))); -#endif - -int fnord () { return 42; } -int main () -{ - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); - int status = $lt_dlunknown; - - if (self) - { - if (dlsym (self,"fnord")) status = $lt_dlno_uscore; - else - { - if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; - else puts (dlerror ()); - } - /* dlclose (self); */ - } - else - puts (dlerror ()); - - return status; -} -_LT_EOF - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&5 2>/dev/null - lt_status=$? - case x$lt_status in - x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; - x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; - x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; - esac - else : - # compilation failed - lt_cv_dlopen_self_static=no - fi -fi -rm -fr conftest* - - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 -$as_echo "$lt_cv_dlopen_self_static" >&6; } - fi - - CPPFLAGS="$save_CPPFLAGS" - LDFLAGS="$save_LDFLAGS" - LIBS="$save_LIBS" - ;; - esac - - case $lt_cv_dlopen_self in - yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; - *) enable_dlopen_self=unknown ;; - esac - - case $lt_cv_dlopen_self_static in - yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; - *) enable_dlopen_self_static=unknown ;; - esac -fi - - - - - - - - - - - - - - - - - -striplib= -old_striplib= -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 -$as_echo_n "checking whether stripping libraries is possible... " >&6; } -if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then - test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" - test -z "$striplib" && striplib="$STRIP --strip-unneeded" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else -# FIXME - insert some real tests, host_os isn't really good enough - case $host_os in - darwin*) - if test -n "$STRIP" ; then - striplib="$STRIP -x" - old_striplib="$STRIP -S" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - fi - ;; - *) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - ;; - esac -fi - - - - - - - - - - - - - # Report which library types will actually be built - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 -$as_echo_n "checking if libtool supports shared libraries... " >&6; } - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 -$as_echo "$can_build_shared" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 -$as_echo_n "checking whether to build shared libraries... " >&6; } - test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) - test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - - aix[4-9]*) - if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then - test "$enable_shared" = yes && enable_static=no - fi - ;; - esac - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 -$as_echo "$enable_shared" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 -$as_echo_n "checking whether to build static libraries... " >&6; } - # Make sure either enable_shared or enable_static is yes. - test "$enable_shared" = yes || enable_static=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 -$as_echo "$enable_static" >&6; } - - - - -fi -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - -CC="$lt_save_CC" - - - - - - ac_ext=${ac_fc_srcext-f} -ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' -ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_fc_compiler_gnu - - -if test -z "$FC" || test "X$FC" = "Xno"; then - _lt_disable_FC=yes -fi - -archive_cmds_need_lc_FC=no -allow_undefined_flag_FC= -always_export_symbols_FC=no -archive_expsym_cmds_FC= -export_dynamic_flag_spec_FC= -hardcode_direct_FC=no -hardcode_direct_absolute_FC=no -hardcode_libdir_flag_spec_FC= -hardcode_libdir_separator_FC= -hardcode_minus_L_FC=no -hardcode_automatic_FC=no -inherit_rpath_FC=no -module_cmds_FC= -module_expsym_cmds_FC= -link_all_deplibs_FC=unknown -old_archive_cmds_FC=$old_archive_cmds -reload_flag_FC=$reload_flag -reload_cmds_FC=$reload_cmds -no_undefined_flag_FC= -whole_archive_flag_spec_FC= -enable_shared_with_static_runtimes_FC=no - -# Source file extension for fc test sources. -ac_ext=${ac_fc_srcext-f} - -# Object file extension for compiled fc test sources. -objext=o -objext_FC=$objext - -# No sense in running all these tests if we already determined that -# the FC compiler isn't working. Some variables (like enable_shared) -# are currently assumed to apply to all compilers on this platform, -# and will be corrupted by setting them based on a non-working compiler. -if test "$_lt_disable_FC" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="\ - subroutine t - return - end -" - - # Code to be used in simple link tests - lt_simple_link_test_code="\ - program t - end -" - - # ltmain only uses $CC for tagged configurations so make sure $CC is set. - - - - - - -# If no C compiler was specified, use CC. -LTCC=${LTCC-"$CC"} - -# If no C compiler flags were specified, use CFLAGS. -LTCFLAGS=${LTCFLAGS-"$CFLAGS"} - -# Allow CC to be a program name with arguments. -compiler=$CC - - - # save warnings/boilerplate of simple test code - ac_outfile=conftest.$ac_objext -echo "$lt_simple_compile_test_code" >conftest.$ac_ext -eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_compiler_boilerplate=`cat conftest.err` -$RM conftest* - - ac_outfile=conftest.$ac_objext -echo "$lt_simple_link_test_code" >conftest.$ac_ext -eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_linker_boilerplate=`cat conftest.err` -$RM -r conftest* - - - # Allow CC to be a program name with arguments. - lt_save_CC="$CC" - lt_save_GCC=$GCC - lt_save_CFLAGS=$CFLAGS - CC=${FC-"f95"} - CFLAGS=$FCFLAGS - compiler=$CC - GCC=$ac_cv_fc_compiler_gnu - - compiler_FC=$CC - for cc_temp in $compiler""; do - case $cc_temp in - compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; - distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; - \-*) ;; - *) break;; - esac -done -cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - - - if test -n "$compiler"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 -$as_echo_n "checking if libtool supports shared libraries... " >&6; } - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 -$as_echo "$can_build_shared" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 -$as_echo_n "checking whether to build shared libraries... " >&6; } - test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) - test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - aix[4-9]*) - if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then - test "$enable_shared" = yes && enable_static=no - fi - ;; - esac - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 -$as_echo "$enable_shared" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 -$as_echo_n "checking whether to build static libraries... " >&6; } - # Make sure either enable_shared or enable_static is yes. - test "$enable_shared" = yes || enable_static=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 -$as_echo "$enable_static" >&6; } - - GCC_FC="$ac_cv_fc_compiler_gnu" - LD_FC="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change - ## the running order or otherwise move them around unless you know exactly - ## what you are doing... - # Dependencies to place before and after the object being linked: -predep_objects_FC= -postdep_objects_FC= -predeps_FC= -postdeps_FC= -compiler_lib_search_path_FC= - -cat > conftest.$ac_ext <<_LT_EOF - subroutine foo - implicit none - integer a - a=0 - return - end -_LT_EOF - - -_lt_libdeps_save_CFLAGS=$CFLAGS -case "$CC $CFLAGS " in #( -*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; -*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; -*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;; -esac - -if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - # Parse the compiler output and extract the necessary - # objects, libraries and library flags. - - # Sentinel used to keep track of whether or not we are before - # the conftest object file. - pre_test_object_deps_done=no - - for p in `eval "$output_verbose_link_cmd"`; do - case ${prev}${p} in - - -L* | -R* | -l*) - # Some compilers place space between "-{L,R}" and the path. - # Remove the space. - if test $p = "-L" || - test $p = "-R"; then - prev=$p - continue - fi - - # Expand the sysroot to ease extracting the directories later. - if test -z "$prev"; then - case $p in - -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; - -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; - -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; - esac - fi - case $p in - =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; - esac - if test "$pre_test_object_deps_done" = no; then - case ${prev} in - -L | -R) - # Internal compiler library paths should come after those - # provided the user. The postdeps already come after the - # user supplied libs so there is no need to process them. - if test -z "$compiler_lib_search_path_FC"; then - compiler_lib_search_path_FC="${prev}${p}" - else - compiler_lib_search_path_FC="${compiler_lib_search_path_FC} ${prev}${p}" - fi - ;; - # The "-l" case would never come before the object being - # linked, so don't bother handling this case. - esac - else - if test -z "$postdeps_FC"; then - postdeps_FC="${prev}${p}" - else - postdeps_FC="${postdeps_FC} ${prev}${p}" - fi - fi - prev= - ;; - - *.lto.$objext) ;; # Ignore GCC LTO objects - *.$objext) - # This assumes that the test object file only shows up - # once in the compiler output. - if test "$p" = "conftest.$objext"; then - pre_test_object_deps_done=yes - continue - fi - - if test "$pre_test_object_deps_done" = no; then - if test -z "$predep_objects_FC"; then - predep_objects_FC="$p" - else - predep_objects_FC="$predep_objects_FC $p" - fi - else - if test -z "$postdep_objects_FC"; then - postdep_objects_FC="$p" - else - postdep_objects_FC="$postdep_objects_FC $p" - fi - fi - ;; - - *) ;; # Ignore the rest. - - esac - done - - # Clean up. - rm -f a.out a.exe -else - echo "libtool.m4: error: problem compiling FC test program" -fi - -$RM -f confest.$objext -CFLAGS=$_lt_libdeps_save_CFLAGS - -# PORTME: override above test on systems where it is broken - - -case " $postdeps_FC " in -*" -lc "*) archive_cmds_need_lc_FC=no ;; -esac - compiler_lib_search_dirs_FC= -if test -n "${compiler_lib_search_path_FC}"; then - compiler_lib_search_dirs_FC=`echo " ${compiler_lib_search_path_FC}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - lt_prog_compiler_wl_FC= -lt_prog_compiler_pic_FC= -lt_prog_compiler_static_FC= - - - if test "$GCC" = yes; then - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_static_FC='-static' - - case $host_os in - aix*) - # All AIX code is PIC. - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static_FC='-Bstatic' - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - lt_prog_compiler_pic_FC='-fPIC' - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but - # adding the `-m68020' flag to GCC prevents building anything better, - # like `-m68040'. - lt_prog_compiler_pic_FC='-m68020 -resident32 -malways-restore-a4' - ;; - esac - ;; - - beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) - # PIC is the default for these OSes. - ;; - - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - lt_prog_compiler_pic_FC='-DDLL_EXPORT' - ;; - - darwin* | rhapsody*) - # PIC is the default on this platform - # Common symbols not allowed in MH_DYLIB files - lt_prog_compiler_pic_FC='-fno-common' - ;; - - haiku*) - # PIC is the default for Haiku. - # The "-static" flag exists, but is broken. - lt_prog_compiler_static_FC= - ;; - - hpux*) - # PIC is the default for 64-bit PA HP-UX, but not for 32-bit - # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag - # sets the default TLS model and affects inlining. - case $host_cpu in - hppa*64*) - # +Z the default - ;; - *) - lt_prog_compiler_pic_FC='-fPIC' - ;; - esac - ;; - - interix[3-9]*) - # Interix 3.x gcc -fpic/-fPIC options generate broken code. - # Instead, we relocate shared libraries at runtime. - ;; - - msdosdjgpp*) - # Just because we use GCC doesn't mean we suddenly get shared libraries - # on systems that don't support them. - lt_prog_compiler_can_build_shared_FC=no - enable_shared=no - ;; - - *nto* | *qnx*) - # QNX uses GNU C++, but need to define -shared option too, otherwise - # it will coredump. - lt_prog_compiler_pic_FC='-fPIC -shared' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - lt_prog_compiler_pic_FC=-Kconform_pic - fi - ;; - - *) - lt_prog_compiler_pic_FC='-fPIC' - ;; - esac - - case $cc_basename in - nvcc*) # Cuda Compiler Driver 2.2 - lt_prog_compiler_wl_FC='-Xlinker ' - if test -n "$lt_prog_compiler_pic_FC"; then - lt_prog_compiler_pic_FC="-Xcompiler $lt_prog_compiler_pic_FC" - fi - ;; - esac - else - # PORTME Check for flag to pass linker flags through the system compiler. - case $host_os in - aix*) - lt_prog_compiler_wl_FC='-Wl,' - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static_FC='-Bstatic' - else - lt_prog_compiler_static_FC='-bnso -bI:/lib/syscalls.exp' - fi - ;; - - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - lt_prog_compiler_pic_FC='-DDLL_EXPORT' - ;; - - hpux9* | hpux10* | hpux11*) - lt_prog_compiler_wl_FC='-Wl,' - # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but - # not for PA HP-UX. - case $host_cpu in - hppa*64*|ia64*) - # +Z the default - ;; - *) - lt_prog_compiler_pic_FC='+Z' - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? - lt_prog_compiler_static_FC='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) - lt_prog_compiler_wl_FC='-Wl,' - # PIC (with -KPIC) is the default. - lt_prog_compiler_static_FC='-non_shared' - ;; - - linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in - # old Intel for x86_64 which still supported -KPIC. - ecc*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-static' - ;; - # icc used to be incompatible with GCC. - # ICC 10 doesn't accept -KPIC any more. - icc* | ifort*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-fPIC' - lt_prog_compiler_static_FC='-static' - ;; - # Lahey Fortran 8.1. - lf95*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='--shared' - lt_prog_compiler_static_FC='--static' - ;; - nagfor*) - # NAG Fortran compiler - lt_prog_compiler_wl_FC='-Wl,-Wl,,' - lt_prog_compiler_pic_FC='-PIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-fpic' - lt_prog_compiler_static_FC='-Bstatic' - ;; - ccc*) - lt_prog_compiler_wl_FC='-Wl,' - # All Alpha code is PIC. - lt_prog_compiler_static_FC='-non_shared' - ;; - xl* | bgxl* | bgf* | mpixl*) - # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-qpic' - lt_prog_compiler_static_FC='-qstaticlink' - ;; - *) - case `$CC -V 2>&1 | sed 5q` in - *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*) - # Sun Fortran 8.3 passes all unrecognized flags to the linker - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - lt_prog_compiler_wl_FC='' - ;; - *Sun\ F* | *Sun*Fortran*) - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - lt_prog_compiler_wl_FC='-Qoption ld ' - ;; - *Sun\ C*) - # Sun C 5.9 - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - lt_prog_compiler_wl_FC='-Wl,' - ;; - *Intel*\ [CF]*Compiler*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-fPIC' - lt_prog_compiler_static_FC='-static' - ;; - *Portland\ Group*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-fpic' - lt_prog_compiler_static_FC='-Bstatic' - ;; - esac - ;; - esac - ;; - - newsos6) - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - *nto* | *qnx*) - # QNX uses GNU C++, but need to define -shared option too, otherwise - # it will coredump. - lt_prog_compiler_pic_FC='-fPIC -shared' - ;; - - osf3* | osf4* | osf5*) - lt_prog_compiler_wl_FC='-Wl,' - # All OSF/1 code is PIC. - lt_prog_compiler_static_FC='-non_shared' - ;; - - rdos*) - lt_prog_compiler_static_FC='-non_shared' - ;; - - solaris*) - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - case $cc_basename in - f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl_FC='-Qoption ld ';; - *) - lt_prog_compiler_wl_FC='-Wl,';; - esac - ;; - - sunos4*) - lt_prog_compiler_wl_FC='-Qoption ld ' - lt_prog_compiler_pic_FC='-PIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - sysv4 | sysv4.2uw2* | sysv4.3*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - sysv4*MP*) - if test -d /usr/nec ;then - lt_prog_compiler_pic_FC='-Kconform_pic' - lt_prog_compiler_static_FC='-Bstatic' - fi - ;; - - sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - unicos*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_can_build_shared_FC=no - ;; - - uts4*) - lt_prog_compiler_pic_FC='-pic' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - *) - lt_prog_compiler_can_build_shared_FC=no - ;; - esac - fi - -case $host_os in - # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - lt_prog_compiler_pic_FC= - ;; - *) - lt_prog_compiler_pic_FC="$lt_prog_compiler_pic_FC" - ;; -esac - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -if ${lt_cv_prog_compiler_pic_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_pic_FC=$lt_prog_compiler_pic_FC -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_FC" >&5 -$as_echo "$lt_cv_prog_compiler_pic_FC" >&6; } -lt_prog_compiler_pic_FC=$lt_cv_prog_compiler_pic_FC - -# -# Check to make sure the PIC flag actually works. -# -if test -n "$lt_prog_compiler_pic_FC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_FC works" >&5 -$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_FC works... " >&6; } -if ${lt_cv_prog_compiler_pic_works_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_pic_works_FC=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - lt_compiler_flag="$lt_prog_compiler_pic_FC" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - # The option is referenced via a variable to avoid confusing sed. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>conftest.err) - ac_status=$? - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s "$ac_outfile"; then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings other than the usual output. - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_pic_works_FC=yes - fi - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_FC" >&5 -$as_echo "$lt_cv_prog_compiler_pic_works_FC" >&6; } - -if test x"$lt_cv_prog_compiler_pic_works_FC" = xyes; then - case $lt_prog_compiler_pic_FC in - "" | " "*) ;; - *) lt_prog_compiler_pic_FC=" $lt_prog_compiler_pic_FC" ;; - esac -else - lt_prog_compiler_pic_FC= - lt_prog_compiler_can_build_shared_FC=no -fi - -fi - - - - - -# -# Check to make sure the static flag actually works. -# -wl=$lt_prog_compiler_wl_FC eval lt_tmp_static_flag=\"$lt_prog_compiler_static_FC\" -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 -$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } -if ${lt_cv_prog_compiler_static_works_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_static_works_FC=no - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $lt_tmp_static_flag" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then - # The linker can only warn and ignore the option if not recognized - # So say no if there are warnings - if test -s conftest.err; then - # Append any errors to the config.log. - cat conftest.err 1>&5 - $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_static_works_FC=yes - fi - else - lt_cv_prog_compiler_static_works_FC=yes - fi - fi - $RM -r conftest* - LDFLAGS="$save_LDFLAGS" - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_FC" >&5 -$as_echo "$lt_cv_prog_compiler_static_works_FC" >&6; } - -if test x"$lt_cv_prog_compiler_static_works_FC" = xyes; then - : -else - lt_prog_compiler_static_FC= -fi - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 -$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } -if ${lt_cv_prog_compiler_c_o_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_c_o_FC=no - $RM -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - lt_cv_prog_compiler_c_o_FC=yes - fi - fi - chmod u+w . 2>&5 - $RM conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files - $RM out/* && rmdir out - cd .. - $RM -r conftest - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_FC" >&5 -$as_echo "$lt_cv_prog_compiler_c_o_FC" >&6; } - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 -$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } -if ${lt_cv_prog_compiler_c_o_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_c_o_FC=no - $RM -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - lt_cv_prog_compiler_c_o_FC=yes - fi - fi - chmod u+w . 2>&5 - $RM conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files - $RM out/* && rmdir out - cd .. - $RM -r conftest - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_FC" >&5 -$as_echo "$lt_cv_prog_compiler_c_o_FC" >&6; } - - - - -hard_links="nottested" -if test "$lt_cv_prog_compiler_c_o_FC" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 -$as_echo_n "checking if we can lock with hard links... " >&6; } - hard_links=yes - $RM conftest* - ln conftest.a conftest.b 2>/dev/null && hard_links=no - touch conftest.a - ln conftest.a conftest.b 2>&5 || hard_links=no - ln conftest.a conftest.b 2>/dev/null && hard_links=no - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 -$as_echo "$hard_links" >&6; } - if test "$hard_links" = no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} - need_locks=warn - fi -else - need_locks=no -fi - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 -$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } - - runpath_var= - allow_undefined_flag_FC= - always_export_symbols_FC=no - archive_cmds_FC= - archive_expsym_cmds_FC= - compiler_needs_object_FC=no - enable_shared_with_static_runtimes_FC=no - export_dynamic_flag_spec_FC= - export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - hardcode_automatic_FC=no - hardcode_direct_FC=no - hardcode_direct_absolute_FC=no - hardcode_libdir_flag_spec_FC= - hardcode_libdir_separator_FC= - hardcode_minus_L_FC=no - hardcode_shlibpath_var_FC=unsupported - inherit_rpath_FC=no - link_all_deplibs_FC=unknown - module_cmds_FC= - module_expsym_cmds_FC= - old_archive_from_new_cmds_FC= - old_archive_from_expsyms_cmds_FC= - thread_safe_flag_spec_FC= - whole_archive_flag_spec_FC= - # include_expsyms should be a list of space-separated symbols to be *always* - # included in the symbol list - include_expsyms_FC= - # exclude_expsyms can be an extended regexp of symbols to exclude - # it will be wrapped by ` (' and `)$', so one must not match beginning or - # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', - # as well as any symbol that contains `d'. - exclude_expsyms_FC='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if - # the symbol is explicitly referenced. Since portable code cannot - # rely on this symbol name, it's probably fine to never include it in - # preloaded symbol tables. - # Exclude shared library initialization/finalization symbols. - extract_expsyms_cmds= - - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; - interix*) - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; - openbsd*) - with_gnu_ld=no - ;; - esac - - ld_shlibs_FC=yes - - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no - if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility - # with the native linker. However, as the warning in the GNU ld - # block says, versions before 2.19.5* couldn't really create working - # shared libraries, regardless of the interface used. - case `$LD -v 2>&1` in - *\ \(GNU\ Binutils\)\ 2.19.5*) ;; - *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; - *\ \(GNU\ Binutils\)\ [3-9]*) ;; - *) - lt_use_gnu_ld_interface=yes - ;; - esac - ;; - *) - lt_use_gnu_ld_interface=yes - ;; - esac - fi - - if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty - wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - export_dynamic_flag_spec_FC='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then - whole_archive_flag_spec_FC="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - whole_archive_flag_spec_FC= - fi - supports_anon_versioning=no - case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... - *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... - *\ 2.11.*) ;; # other 2.11 versions - *) supports_anon_versioning=yes ;; - esac - - # See if GNU ld supports shared libraries. - case $host_os in - aix[3-9]*) - # On AIX/PPC, the GNU linker is very broken - if test "$host_cpu" != ia64; then - ld_shlibs_FC=no - cat <<_LT_EOF 1>&2 - -*** Warning: the GNU linker, at least up to release 2.19, is reported -*** to be unable to reliably create shared libraries on AIX. -*** Therefore, libtool is disabling shared libraries support. If you -*** really care for shared libraries, you may want to install binutils -*** 2.20 or above, or modify your PATH so that a non-GNU linker is found. -*** You will then need to restart the configuration process. - -_LT_EOF - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='' - ;; - m68k) - archive_cmds_FC='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_minus_L_FC=yes - ;; - esac - ;; - - beos*) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - allow_undefined_flag_FC=unsupported - # Joseph Beckenbach says some releases of gcc - # support --undefined. This deserves some investigation. FIXME - archive_cmds_FC='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - ld_shlibs_FC=no - fi - ;; - - cygwin* | mingw* | pw32* | cegcc*) - # _LT_TAGVAR(hardcode_libdir_flag_spec, FC) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec_FC='-L$libdir' - export_dynamic_flag_spec_FC='${wl}--export-all-symbols' - allow_undefined_flag_FC=unsupported - always_export_symbols_FC=no - enable_shared_with_static_runtimes_FC=yes - export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' - exclude_expsyms_FC='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - # If the export-symbols file already is a .def file (1st line - # is EXPORTS), use it as is; otherwise, prepend... - archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - cp $export_symbols $output_objdir/$soname.def; - else - echo EXPORTS > $output_objdir/$soname.def; - cat $export_symbols >> $output_objdir/$soname.def; - fi~ - $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs_FC=no - fi - ;; - - haiku*) - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - link_all_deplibs_FC=yes - ;; - - interix[3-9]*) - hardcode_direct_FC=no - hardcode_shlibpath_var_FC=no - hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' - export_dynamic_flag_spec_FC='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - archive_expsym_cmds_FC='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no - if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ - && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler - whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers - whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; - efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 - tmp_addflag=' -i_dynamic -nofor_main' ;; - ifc* | ifort*) # Intel Fortran compiler - tmp_addflag=' -nofor_main' ;; - lf95*) # Lahey Fortran 8.1 - whole_archive_flag_spec_FC= - tmp_sharedflag='--shared' ;; - xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 - whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object_FC=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 - whole_archive_flag_spec_FC='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object_FC=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac - archive_cmds_FC='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ - $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - whole_archive_flag_spec_FC='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - archive_cmds_FC='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ - $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac - else - ld_shlibs_FC=no - fi - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds_FC='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - - solaris*) - if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then - ld_shlibs_FC=no - cat <<_LT_EOF 1>&2 - -*** Warning: The releases 2.8.* of the GNU linker cannot reliably -*** create shared libraries on Solaris systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.9.1 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) - case `$LD -v 2>&1` in - *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) - ld_shlibs_FC=no - cat <<_LT_EOF 1>&2 - -*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not -*** reliably create shared libraries on SCO systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.16.91.0.3 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - ;; - *) - # For security reasons, it is highly recommended that you always - # use absolute paths for naming shared libraries, and exclude the - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi - ;; - esac - ;; - - sunos4*) - archive_cmds_FC='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' - wlarc= - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi - ;; - esac - - if test "$ld_shlibs_FC" = no; then - runpath_var= - hardcode_libdir_flag_spec_FC= - export_dynamic_flag_spec_FC= - whole_archive_flag_spec_FC= - fi - else - # PORTME fill in a description of your system's linker (not GNU ld) - case $host_os in - aix3*) - allow_undefined_flag_FC=unsupported - always_export_symbols_FC=yes - archive_expsym_cmds_FC='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - hardcode_minus_L_FC=yes - if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - hardcode_direct_FC=unsupported - fi - ;; - - aix[4-9]*) - if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' - no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. - # -C means demangle to AIX nm, but means don't demangle with GNU nm - # Also, AIX nm treats weak defined symbols like other global - # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then - export_symbols_cmds_FC='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else - export_symbols_cmds_FC='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we - # need to do runtime linking. - case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) - for ld_flag in $LDFLAGS; do - if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done - ;; - esac - - exp_sym_flag='-bexport' - no_entry_flag='-bnoentry' - fi - - # When large executables or shared objects are built, AIX ld can - # have problems creating the table of contents. If linking a library - # or program results in "error TOC overflow" add -mminimal-toc to - # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not - # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. - - archive_cmds_FC='' - hardcode_direct_FC=yes - hardcode_direct_absolute_FC=yes - hardcode_libdir_separator_FC=':' - link_all_deplibs_FC=yes - file_list_spec_FC='${wl}-f,' - - if test "$GCC" = yes; then - case $host_os in aix4.[012]|aix4.[012].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ - collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then - # We have reworked collect2 - : - else - # We have old collect2 - hardcode_direct_FC=unsupported - # It fails to find uninstalled libraries when the uninstalled - # path is not listed in the libpath. Setting hardcode_minus_L - # to unsupported forces relinking - hardcode_minus_L_FC=yes - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_libdir_separator_FC= - fi - ;; - esac - shared_flag='-shared' - if test "$aix_use_runtimelinking" = yes; then - shared_flag="$shared_flag "'${wl}-G' - fi - else - # not using gcc - if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else - if test "$aix_use_runtimelinking" = yes; then - shared_flag='${wl}-G' - else - shared_flag='${wl}-bM:SRE' - fi - fi - fi - - export_dynamic_flag_spec_FC='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - always_export_symbols_FC=yes - if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - allow_undefined_flag_FC='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. - if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath -else - if ${lt_cv_aix_libpath__FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat > conftest.$ac_ext <<_ACEOF - program main - - end -_ACEOF -if ac_fn_fc_try_link "$LINENO"; then : - - lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\([^ ]*\) *$/\1/ - p - } - }' - lt_cv_aix_libpath__FC=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - # Check for a 64-bit object if we didn't find anything. - if test -z "$lt_cv_aix_libpath__FC"; then - lt_cv_aix_libpath__FC=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath__FC"; then - lt_cv_aix_libpath__FC="/usr/lib:/lib" - fi - -fi - - aix_libpath=$lt_cv_aix_libpath__FC -fi - - hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds_FC='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else - if test "$host_cpu" = ia64; then - hardcode_libdir_flag_spec_FC='${wl}-R $libdir:/usr/lib:/lib' - allow_undefined_flag_FC="-z nodefs" - archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. - if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath -else - if ${lt_cv_aix_libpath__FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat > conftest.$ac_ext <<_ACEOF - program main - - end -_ACEOF -if ac_fn_fc_try_link "$LINENO"; then : - - lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\([^ ]*\) *$/\1/ - p - } - }' - lt_cv_aix_libpath__FC=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - # Check for a 64-bit object if we didn't find anything. - if test -z "$lt_cv_aix_libpath__FC"; then - lt_cv_aix_libpath__FC=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath__FC"; then - lt_cv_aix_libpath__FC="/usr/lib:/lib" - fi - -fi - - aix_libpath=$lt_cv_aix_libpath__FC -fi - - hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. - no_undefined_flag_FC=' ${wl}-bernotok' - allow_undefined_flag_FC=' ${wl}-berok' - if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. - whole_archive_flag_spec_FC='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - whole_archive_flag_spec_FC='$convenience' - fi - archive_cmds_need_lc_FC=yes - # This is similar to how AIX traditionally builds its shared libraries. - archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='' - ;; - m68k) - archive_cmds_FC='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_minus_L_FC=yes - ;; - esac - ;; - - bsdi[45]*) - export_dynamic_flag_spec_FC=-rdynamic - ;; - - cygwin* | mingw* | pw32* | cegcc*) - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. - case $cc_basename in - cl*) - # Native MSVC - hardcode_libdir_flag_spec_FC=' ' - allow_undefined_flag_FC=unsupported - always_export_symbols_FC=yes - file_list_spec_FC='@' - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds_FC='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' - archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; - else - sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; - fi~ - $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ - linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, FC)='true' - enable_shared_with_static_runtimes_FC=yes - exclude_expsyms_FC='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' - export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' - # Don't use ranlib - old_postinstall_cmds_FC='chmod 644 $oldlib' - postlink_cmds_FC='lt_outputfile="@OUTPUT@"~ - lt_tool_outputfile="@TOOL_OUTPUT@"~ - case $lt_outputfile in - *.exe|*.EXE) ;; - *) - lt_outputfile="$lt_outputfile.exe" - lt_tool_outputfile="$lt_tool_outputfile.exe" - ;; - esac~ - if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then - $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; - $RM "$lt_outputfile.manifest"; - fi' - ;; - *) - # Assume MSVC wrapper - hardcode_libdir_flag_spec_FC=' ' - allow_undefined_flag_FC=unsupported - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds_FC='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. - old_archive_from_new_cmds_FC='true' - # FIXME: Should let the user specify the lib program. - old_archive_cmds_FC='lib -OUT:$oldlib$oldobjs$old_deplibs' - enable_shared_with_static_runtimes_FC=yes - ;; - esac - ;; - - darwin* | rhapsody*) - - - archive_cmds_need_lc_FC=no - hardcode_direct_FC=no - hardcode_automatic_FC=yes - hardcode_shlibpath_var_FC=unsupported - if test "$lt_cv_ld_force_load" = "yes"; then - whole_archive_flag_spec_FC='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - compiler_needs_object_FC=yes - else - whole_archive_flag_spec_FC='' - fi - link_all_deplibs_FC=yes - allow_undefined_flag_FC="$_lt_dar_allow_undefined" - case $cc_basename in - ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac - if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all - archive_cmds_FC="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" - module_cmds_FC="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" - archive_expsym_cmds_FC="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" - module_expsym_cmds_FC="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - - else - ld_shlibs_FC=no - fi - - ;; - - dgux*) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_shlibpath_var_FC=no - ;; - - # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor - # support. Future versions do this automatically, but an explicit c++rt0.o - # does not break anything, and helps significantly (at the cost of a little - # extra space). - freebsd2.2*) - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' - hardcode_libdir_flag_spec_FC='-R$libdir' - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - # Unfortunately, older versions of FreeBSD 2 do not have this feature. - freebsd2.*) - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=yes - hardcode_minus_L_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) - archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec_FC='-R$libdir' - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - hpux9*) - if test "$GCC" = yes; then - archive_cmds_FC='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds_FC='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi - hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_direct_FC=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_FC=yes - export_dynamic_flag_spec_FC='${wl}-E' - ;; - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then - archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi - if test "$with_gnu_ld" = no; then - hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_direct_FC=yes - hardcode_direct_absolute_FC=yes - export_dynamic_flag_spec_FC='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_FC=yes - fi - ;; - - hpux11*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) - archive_cmds_FC='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) - archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - fi - if test "$with_gnu_ld" = no; then - hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - - case $host_cpu in - hppa*64*|ia64*) - hardcode_direct_FC=no - hardcode_shlibpath_var_FC=no - ;; - *) - hardcode_direct_FC=yes - hardcode_direct_absolute_FC=yes - export_dynamic_flag_spec_FC='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_FC=yes - ;; - esac - fi - ;; - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. - # This should be the same for all languages, so no per-tag cache variable. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -if ${lt_cv_irix_exported_symbol+:} false; then : - $as_echo_n "(cached) " >&6 -else - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat > conftest.$ac_ext <<_ACEOF - - subroutine foo - end -_ACEOF -if ac_fn_fc_try_link "$LINENO"; then : - lt_cv_irix_exported_symbol=yes -else - lt_cv_irix_exported_symbol=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS="$save_LDFLAGS" -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -$as_echo "$lt_cv_irix_exported_symbol" >&6; } - if test "$lt_cv_irix_exported_symbol" = yes; then - archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - archive_cmds_need_lc_FC='no' - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - inherit_rpath_FC=yes - link_all_deplibs_FC=yes - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out - else - archive_cmds_FC='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF - fi - hardcode_libdir_flag_spec_FC='-R$libdir' - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - newsos6) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=yes - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_shlibpath_var_FC=no - ;; - - *nto* | *qnx*) - ;; - - openbsd*) - if test -f /usr/libexec/ld.so; then - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - hardcode_direct_absolute_FC=yes - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' - hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' - export_dynamic_flag_spec_FC='${wl}-E' - else - case $host_os in - openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec_FC='-R$libdir' - ;; - *) - archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' - ;; - esac - fi - else - ld_shlibs_FC=no - fi - ;; - - os2*) - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_minus_L_FC=yes - allow_undefined_flag_FC=unsupported - archive_cmds_FC='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' - old_archive_from_new_cmds_FC='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) - if test "$GCC" = yes; then - allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - allow_undefined_flag_FC=' -expect_unresolved \*' - archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - archive_cmds_need_lc_FC='no' - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds_FC='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag_FC=' -expect_unresolved \*' - archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds_FC='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ - $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - hardcode_libdir_flag_spec_FC='-rpath $libdir' - fi - archive_cmds_need_lc_FC='no' - hardcode_libdir_separator_FC=: - ;; - - solaris*) - no_undefined_flag_FC=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' - archive_cmds_FC='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' - archive_cmds_FC='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) - wlarc='${wl}' - archive_cmds_FC='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi - hardcode_libdir_flag_spec_FC='-R$libdir' - hardcode_shlibpath_var_FC=no - case $host_os in - solaris2.[0-5] | solaris2.[0-5].*) ;; - *) - # The compiler driver will combine and reorder linker options, - # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) - if test "$GCC" = yes; then - whole_archive_flag_spec_FC='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - whole_archive_flag_spec_FC='-z allextract$convenience -z defaultextract' - fi - ;; - esac - link_all_deplibs_FC=yes - ;; - - sunos4*) - if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. - archive_cmds_FC='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_direct_FC=yes - hardcode_minus_L_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - sysv4) - case $host_vendor in - sni) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=yes # is this really true??? - ;; - siemens) - ## LD is ld it makes a PLAMLIB - ## CC just makes a GrossModule. - archive_cmds_FC='$LD -G -o $lib $libobjs $deplibs $linker_flags' - reload_cmds_FC='$CC -r -o $output$reload_objs' - hardcode_direct_FC=no - ;; - motorola) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=no #Motorola manual says yes, but my tests say they lie - ;; - esac - runpath_var='LD_RUN_PATH' - hardcode_shlibpath_var_FC=no - ;; - - sysv4.3*) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_shlibpath_var_FC=no - export_dynamic_flag_spec_FC='-Bexport' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_shlibpath_var_FC=no - runpath_var=LD_RUN_PATH - hardcode_runpath_var=yes - ld_shlibs_FC=yes - fi - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) - no_undefined_flag_FC='${wl}-z,text' - archive_cmds_need_lc_FC=no - hardcode_shlibpath_var_FC=no - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) - # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. - no_undefined_flag_FC='${wl}-z,text' - allow_undefined_flag_FC='${wl}-z,nodefs' - archive_cmds_need_lc_FC=no - hardcode_shlibpath_var_FC=no - hardcode_libdir_flag_spec_FC='${wl}-R,$libdir' - hardcode_libdir_separator_FC=':' - link_all_deplibs_FC=yes - export_dynamic_flag_spec_FC='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - uts4*) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_shlibpath_var_FC=no - ;; - - *) - ld_shlibs_FC=no - ;; - esac - - if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) - export_dynamic_flag_spec_FC='${wl}-Blargedynsym' - ;; - esac - fi - fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_FC" >&5 -$as_echo "$ld_shlibs_FC" >&6; } -test "$ld_shlibs_FC" = no && can_build_shared=no - -with_gnu_ld_FC=$with_gnu_ld - - - - - - -# -# Do we need to explicitly link libc? -# -case "x$archive_cmds_need_lc_FC" in -x|xyes) - # Assume -lc should be added - archive_cmds_need_lc_FC=yes - - if test "$enable_shared" = yes && test "$GCC" = yes; then - case $archive_cmds_FC in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. - ;; - '$CC '*) - # Test whether the compiler implicitly links with -lc since on some - # systems, -lgcc has to come before -lc. If gcc already passes -lc - # to ld, don't add -lc before -lgcc. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 -$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } -if ${lt_cv_archive_cmds_need_lc_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - $RM conftest* - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } 2>conftest.err; then - soname=conftest - lib=conftest - libobjs=conftest.$ac_objext - deplibs= - wl=$lt_prog_compiler_wl_FC - pic_flag=$lt_prog_compiler_pic_FC - compiler_flags=-v - linker_flags=-v - verstring= - output_objdir=. - libname=conftest - lt_save_allow_undefined_flag=$allow_undefined_flag_FC - allow_undefined_flag_FC= - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_FC 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 - (eval $archive_cmds_FC 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - then - lt_cv_archive_cmds_need_lc_FC=no - else - lt_cv_archive_cmds_need_lc_FC=yes - fi - allow_undefined_flag_FC=$lt_save_allow_undefined_flag - else - cat conftest.err 1>&5 - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc_FC" >&5 -$as_echo "$lt_cv_archive_cmds_need_lc_FC" >&6; } - archive_cmds_need_lc_FC=$lt_cv_archive_cmds_need_lc_FC - ;; - esac - fi - ;; -esac - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 -$as_echo_n "checking dynamic linker characteristics... " >&6; } - -library_names_spec= -libname_spec='lib$name' -soname_spec= -shrext_cmds=".so" -postinstall_cmds= -postuninstall_cmds= -finish_cmds= -finish_eval= -shlibpath_var= -shlibpath_overrides_runpath=unknown -version_type=none -dynamic_linker="$host_os ld.so" -sys_lib_dlsearch_path_spec="/lib /usr/lib" -need_lib_prefix=unknown -hardcode_into_libs=no - -# when you set need_version to no, make sure it does not cause -set_version -# flags to be left without arguments -need_version=unknown - -case $host_os in -aix3*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. - soname_spec='${libname}${release}${shared_ext}$major' - ;; - -aix[4-9]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes - if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 - library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with - # the line `#! .'. This would cause the generated library to - # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[01] | aix4.[01].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' - echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac - # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. - if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib.so - # instead of lib.a to let people know that these are not - # typical AIX shared libraries. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. - library_names_spec='${libname}${release}.a $libname.a' - soname_spec='${libname}${release}${shared_ext}$major' - fi - shlibpath_var=LIBPATH - fi - ;; - -amigaos*) - case $host_cpu in - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. - finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - -beos*) - library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; - -bsdi[45]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" - sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" - # the default ld.so.conf also contains /usr/contrib/lib and - # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow - # libtool to hard-code these into programs - ;; - -cygwin* | mingw* | pw32* | cegcc*) - version_type=windows - shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - - case $GCC,$cc_basename in - yes,*) - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ - chmod a+x \$dldir/$dlname~ - if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then - eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; - fi' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $RM \$dlpath' - shlibpath_overrides_runpath=yes - - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' - soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix - soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' - ;; - - *,cl*) - # Native MSVC - libname_spec='$name' - soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) - sys_lib_search_path_spec= - lt_save_ifs=$IFS - IFS=';' - for lt_path in $LIB - do - IFS=$lt_save_ifs - # Let DOS variable expansion print the short 8.3 style file name. - lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` - sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" - done - IFS=$lt_save_ifs - # Convert to MSYS style. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` - ;; - cygwin*) - # Convert to unix form, then to dos form, then back to unix form - # but this time dos style (no spaces!) so that the unix form looks - # like /cygdrive/c/PROGRA~1:/cygdr... - sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` - sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) - sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` - else - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - fi - # FIXME: find the short name or the path components, as spaces are - # common. (e.g. "Program Files" -> "PROGRA~1") - ;; - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $RM \$dlpath' - shlibpath_overrides_runpath=yes - dynamic_linker='Win32 link.exe' - ;; - - *) - # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; - -darwin* | rhapsody*) - dynamic_linker="$host_os dyld" - version_type=darwin - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' - soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' - - sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' - ;; - -dgux*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -freebsd* | dragonfly*) - # DragonFly does not have aout. When/if they implement a new - # versioning mechanism, adjust this. - if test -x /usr/bin/objformat; then - objformat=`/usr/bin/objformat` - else - case $host_os in - freebsd[23].*) objformat=aout ;; - *) objformat=elf ;; - esac - fi - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac - shlibpath_var=LD_LIBRARY_PATH - case $host_os in - freebsd2.*) - shlibpath_overrides_runpath=yes - ;; - freebsd3.[01]* | freebsdelf3.[01]*) - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ - freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - *) # from 4.6 on, and DragonFly - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - esac - ;; - -gnu*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -hpux9* | hpux10* | hpux11*) - # Give a soname corresponding to the major version so that dld.sl refuses to - # link against other versions. - version_type=sunos - need_lib_prefix=no - need_version=no - case $host_cpu in - ia64*) - shrext_cmds='.so' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" - fi - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - *) - shrext_cmds='.sl' - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... - postinstall_cmds='chmod 555 $lib' - # or fails outright, so override atomically: - install_override_mode=555 - ;; - -interix[3-9]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -irix5* | irix6* | nonstopux*) - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) - if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix - fi ;; - esac - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= - ;; - *) - case $LD in # libtool.m4 will add one of these switches to LD - *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") - libsuff= shlibsuff= libmagic=32-bit;; - *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") - libsuff=32 shlibsuff=N32 libmagic=N32;; - *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") - libsuff=64 shlibsuff=64 libmagic=64-bit;; - *) libsuff= shlibsuff= libmagic=never-match;; - esac - ;; - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no - sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" - sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -# No shared lib support for Linux oldld, aout, or coff. -linux*oldld* | linux*aout* | linux*coff*) - dynamic_linker=no - ;; - -# This must be glibc/ELF. -linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - - # Some binutils ld are patched to set DT_RUNPATH - if ${lt_cv_shlibpath_overrides_runpath+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_shlibpath_overrides_runpath=no - save_LDFLAGS=$LDFLAGS - save_libdir=$libdir - eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_FC\"; \ - LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_FC\"" - cat > conftest.$ac_ext <<_ACEOF - program main - - end -_ACEOF -if ac_fn_fc_try_link "$LINENO"; then : - if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : - lt_cv_shlibpath_overrides_runpath=yes -fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS=$save_LDFLAGS - libdir=$save_libdir - -fi - - shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath - - # This implies no fast_install, which is unacceptable. - # Some rework will be needed to allow for fast_install - # before this can be enabled. - hardcode_into_libs=yes - - # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` - sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on - # powerpc, because MkLinux only supported shared libraries with the - # GNU dynamic linker. Since this was broken with cross compilers, - # most powerpc-linux boxes support dynamic linking these days and - # people can always --disable-shared, the test was removed, and we - # assume the GNU/Linux dynamic linker is in use. - dynamic_linker='GNU/Linux ld.so' - ;; - -netbsd*) - version_type=sunos - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - -newsos6) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; - -*nto* | *qnx*) - version_type=qnx - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - -openbsd*) - version_type=sunos - sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no - # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. - case $host_os in - openbsd3.3 | openbsd3.3.*) need_version=yes ;; - *) need_version=no ;; - esac - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - case $host_os in - openbsd2.[89] | openbsd2.[89].*) - shlibpath_overrides_runpath=no - ;; - *) - shlibpath_overrides_runpath=yes - ;; - esac - else - shlibpath_overrides_runpath=yes - fi - ;; - -os2*) - libname_spec='$name' - shrext_cmds=".dll" - need_lib_prefix=no - library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' - shlibpath_var=LIBPATH - ;; - -osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" - sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - -rdos*) - dynamic_linker=no - ;; - -solaris*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - # ldd complains unless libraries are executable - postinstall_cmds='chmod +x $lib' - ;; - -sunos4*) - version_type=sunos - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes - ;; - -sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) - shlibpath_overrides_runpath=no - need_lib_prefix=no - runpath_var=LD_RUN_PATH - ;; - siemens) - need_lib_prefix=no - ;; - motorola) - need_lib_prefix=no - need_version=no - shlibpath_overrides_runpath=no - sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' - ;; - esac - ;; - -sysv4*MP*) - if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' - soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - -sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) - version_type=freebsd-elf - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' - case $host_os in - sco3.2v5*) - sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" - ;; - esac - fi - sys_lib_dlsearch_path_spec='/usr/lib' - ;; - -tpf*) - # TPF is a cross-target only. Preferred cross-host = GNU/Linux. - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -uts4*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -*) - dynamic_linker=no - ;; -esac -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 -$as_echo "$dynamic_linker" >&6; } -test "$dynamic_linker" = no && can_build_shared=no - -variables_saved_for_relink="PATH $shlibpath_var $runpath_var" -if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" -fi - -if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then - sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" -fi -if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then - sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 -$as_echo_n "checking how to hardcode library paths into programs... " >&6; } -hardcode_action_FC= -if test -n "$hardcode_libdir_flag_spec_FC" || - test -n "$runpath_var_FC" || - test "X$hardcode_automatic_FC" = "Xyes" ; then - - # We can hardcode non-existent directories. - if test "$hardcode_direct_FC" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one - ## test "$_LT_TAGVAR(hardcode_shlibpath_var, FC)" != no && - test "$hardcode_minus_L_FC" != no; then - # Linking always hardcodes the temporary library directory. - hardcode_action_FC=relink - else - # We can link without hardcoding, and we can hardcode nonexisting dirs. - hardcode_action_FC=immediate - fi -else - # We cannot hardcode anything, or else we can only hardcode existing - # directories. - hardcode_action_FC=unsupported -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_FC" >&5 -$as_echo "$hardcode_action_FC" >&6; } - -if test "$hardcode_action_FC" = relink || - test "$inherit_rpath_FC" = yes; then - # Fast installation is not supported - enable_fast_install=no -elif test "$shlibpath_overrides_runpath" = yes || - test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless -fi - - - - - - - - fi # test -n "$compiler" - - GCC=$lt_save_GCC - CC=$lt_save_CC - CFLAGS=$lt_save_CFLAGS -fi # test "$_lt_disable_FC" != yes - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - - - - - - - - - - ac_config_commands="$ac_config_commands libtool" - - - - -# Only expand once: - - - -ac_config_files="$ac_config_files Makefile lib/Makefile lib/xmlFailures/Makefile lib/xmlSuccesses/Makefile prog/Makefile" - -cat >confcache <<\_ACEOF -# This file is a shell script that caches the results of configure -# tests run on this system so they can be shared between configure -# scripts and configure runs, see configure's option --config-cache. -# It is not useful on other systems. If it contains results you don't -# want to keep, you may remove or edit it. -# -# config.status only pays attention to the cache file if you give it -# the --recheck option to rerun configure. -# -# `ac_cv_env_foo' variables (set or unset) will be overridden when -# loading this file, other *unset* `ac_cv_foo' will be assigned the -# following values. - -_ACEOF - -# The following way of writing the cache mishandles newlines in values, -# but we know of no workaround that is simple, portable, and efficient. -# So, we kill variables containing newlines. -# Ultrix sh set writes to stderr and can't be redirected directly, -# and sets the high bit in the cache file unless we assign to the vars. -( - for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - - (set) 2>&1 | - case $as_nl`(ac_space=' '; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - # `set' does not quote correctly, so add quotes: double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \. - sed -n \ - "s/'/'\\\\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" - ;; #( - *) - # `set' quotes correctly as required by POSIX, so do not add quotes. - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) | - sed ' - /^ac_cv_env_/b end - t clear - :clear - s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ - t end - s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ - :end' >>confcache -if diff "$cache_file" confcache >/dev/null 2>&1; then :; else - if test -w "$cache_file"; then - if test "x$cache_file" != "x/dev/null"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 -$as_echo "$as_me: updating cache $cache_file" >&6;} - if test ! -f "$cache_file" || test -h "$cache_file"; then - cat confcache >"$cache_file" - else - case $cache_file in #( - */* | ?:*) - mv -f confcache "$cache_file"$$ && - mv -f "$cache_file"$$ "$cache_file" ;; #( - *) - mv -f confcache "$cache_file" ;; - esac - fi - fi - else - { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 -$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} - fi -fi -rm -f confcache - -test "x$prefix" = xNONE && prefix=$ac_default_prefix -# Let make expand exec_prefix. -test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' - -DEFS=-DHAVE_CONFIG_H - -ac_libobjs= -ac_ltlibobjs= -U= -for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue - # 1. Remove the extension, and $U if already installed. - ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' - ac_i=`$as_echo "$ac_i" | sed "$ac_script"` - # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR - # will be set to the directory where LIBOBJS objects are built. - as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" - as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' -done -LIBOBJS=$ac_libobjs - -LTLIBOBJS=$ac_ltlibobjs - - - if test -n "$EXEEXT"; then - am__EXEEXT_TRUE= - am__EXEEXT_FALSE='#' -else - am__EXEEXT_TRUE='#' - am__EXEEXT_FALSE= -fi - -if test -z "${DEBUG_TRUE}" && test -z "${DEBUG_FALSE}"; then - as_fn_error $? "conditional \"DEBUG\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${ENABLE_UDUNITS_1_TRUE}" && test -z "${ENABLE_UDUNITS_1_FALSE}"; then - as_fn_error $? "conditional \"ENABLE_UDUNITS_1\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${ENABLE_UDUNITS_1_TRUE}" && test -z "${ENABLE_UDUNITS_1_FALSE}"; then - as_fn_error $? "conditional \"ENABLE_UDUNITS_1\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then - as_fn_error $? "conditional \"AMDEP\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then - as_fn_error $? "conditional \"am__fastdepCC\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${HAVE_CUNIT_TRUE}" && test -z "${HAVE_CUNIT_FALSE}"; then - as_fn_error $? "conditional \"HAVE_CUNIT\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi - -: "${CONFIG_STATUS=./config.status}" -ac_write_fail=0 -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files $CONFIG_STATUS" -{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 -$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} -as_write_fail=0 -cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 -#! $SHELL -# Generated by $as_me. -# Run this file to recreate the current configuration. -# Compiler output produced by configure, useful for debugging -# configure, is in config.log if it exists. - -debug=false -ac_cs_recheck=false -ac_cs_silent=false - -SHELL=\${CONFIG_SHELL-$SHELL} -export SHELL -_ASEOF -cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -p' - fi -else - as_ln_s='cp -p' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -exec 6>&1 -## ----------------------------------- ## -## Main body of $CONFIG_STATUS script. ## -## ----------------------------------- ## -_ASEOF -test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# Save the log message, to keep $0 and so on meaningful, and to -# report actual input values of CONFIG_FILES etc. instead of their -# values after options handling. -ac_log=" -This file was extended by UDUNITS $as_me 2.2.17, which was -generated by GNU Autoconf 2.68. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS - CONFIG_LINKS = $CONFIG_LINKS - CONFIG_COMMANDS = $CONFIG_COMMANDS - $ $0 $@ - -on `(hostname || uname -n) 2>/dev/null | sed 1q` -" - -_ACEOF - -case $ac_config_files in *" -"*) set x $ac_config_files; shift; ac_config_files=$*;; -esac - -case $ac_config_headers in *" -"*) set x $ac_config_headers; shift; ac_config_headers=$*;; -esac - - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# Files that config.status was made for. -config_files="$ac_config_files" -config_headers="$ac_config_headers" -config_commands="$ac_config_commands" - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -ac_cs_usage="\ -\`$as_me' instantiates files and other configuration actions -from templates according to the current configuration. Unless the files -and actions are specified as TAGs, all are instantiated by default. - -Usage: $0 [OPTION]... [TAG]... - - -h, --help print this help, then exit - -V, --version print version number and configuration settings, then exit - --config print configuration, then exit - -q, --quiet, --silent - do not print progress messages - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - --header=FILE[:TEMPLATE] - instantiate the configuration header FILE - -Configuration files: -$config_files - -Configuration headers: -$config_headers - -Configuration commands: -$config_commands - -Report bugs to ." - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" -ac_cs_version="\\ -UDUNITS config.status 2.2.17 -configured by $0, generated by GNU Autoconf 2.68, - with options \\"\$ac_cs_config\\" - -Copyright (C) 2010 Free Software Foundation, Inc. -This config.status script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it." - -ac_pwd='$ac_pwd' -srcdir='$srcdir' -INSTALL='$INSTALL' -MKDIR_P='$MKDIR_P' -AWK='$AWK' -test -n "\$AWK" || AWK=awk -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# The default lists apply if the user does not specify any file. -ac_need_defaults=: -while test $# != 0 -do - case $1 in - --*=?*) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` - ac_shift=: - ;; - --*=) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg= - ac_shift=: - ;; - *) - ac_option=$1 - ac_optarg=$2 - ac_shift=shift - ;; - esac - - case $ac_option in - # Handling of the options. - -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) - ac_cs_recheck=: ;; - --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - $as_echo "$ac_cs_version"; exit ;; - --config | --confi | --conf | --con | --co | --c ) - $as_echo "$ac_cs_config"; exit ;; - --debug | --debu | --deb | --de | --d | -d ) - debug=: ;; - --file | --fil | --fi | --f ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - '') as_fn_error $? "missing file argument" ;; - esac - as_fn_append CONFIG_FILES " '$ac_optarg'" - ac_need_defaults=false;; - --header | --heade | --head | --hea ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - as_fn_append CONFIG_HEADERS " '$ac_optarg'" - ac_need_defaults=false;; - --he | --h) - # Conflict between --help and --header - as_fn_error $? "ambiguous option: \`$1' -Try \`$0 --help' for more information.";; - --help | --hel | -h ) - $as_echo "$ac_cs_usage"; exit ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil | --si | --s) - ac_cs_silent=: ;; - - # This is an error. - -*) as_fn_error $? "unrecognized option: \`$1' -Try \`$0 --help' for more information." ;; - - *) as_fn_append ac_config_targets " $1" - ac_need_defaults=false ;; - - esac - shift -done - -ac_configure_extra_args= - -if $ac_cs_silent; then - exec 6>/dev/null - ac_configure_extra_args="$ac_configure_extra_args --silent" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -if \$ac_cs_recheck; then - set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion - shift - \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 - CONFIG_SHELL='$SHELL' - export CONFIG_SHELL - exec "\$@" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -exec 5>>config.log -{ - echo - sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX -## Running $as_me. ## -_ASBOX - $as_echo "$ac_log" -} >&5 - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# -# INIT-COMMANDS -# -AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" - - -# The HP-UX ksh and POSIX shell print the target directory to stdout -# if CDPATH is set. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -sed_quote_subst='$sed_quote_subst' -double_quote_subst='$double_quote_subst' -delay_variable_subst='$delay_variable_subst' -macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`' -macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`' -enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`' -enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' -pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' -enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' -SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' -ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' -PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`' -host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`' -host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`' -host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`' -build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`' -build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`' -build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`' -SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`' -Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`' -GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`' -EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`' -FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`' -LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`' -NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`' -LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`' -max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`' -ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`' -exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' -lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' -lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' -lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' -reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' -reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' -OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' -deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' -file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' -AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' -AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' -STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' -RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' -old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`' -old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`' -lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`' -CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`' -CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`' -compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`' -GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' -lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' -lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' -lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' -lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' -objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' -MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' -lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' -need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' -DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' -NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' -LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`' -OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`' -libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`' -shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`' -extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`' -archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`' -enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`' -export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`' -whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`' -compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`' -old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`' -old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`' -archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`' -archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`' -module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`' -module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`' -with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`' -allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`' -no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`' -hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`' -hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`' -hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`' -hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`' -hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`' -hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`' -hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' -inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' -link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' -always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' -export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' -exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' -include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' -prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' -file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' -variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' -need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`' -version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`' -runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`' -shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`' -shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`' -libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`' -library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`' -soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`' -install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`' -postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`' -postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`' -finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`' -finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' -hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' -sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' -sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`' -hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' -enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' -enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' -enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`' -old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`' -striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`' -compiler_lib_search_dirs='`$ECHO "$compiler_lib_search_dirs" | $SED "$delay_single_quote_subst"`' -predep_objects='`$ECHO "$predep_objects" | $SED "$delay_single_quote_subst"`' -postdep_objects='`$ECHO "$postdep_objects" | $SED "$delay_single_quote_subst"`' -predeps='`$ECHO "$predeps" | $SED "$delay_single_quote_subst"`' -postdeps='`$ECHO "$postdeps" | $SED "$delay_single_quote_subst"`' -compiler_lib_search_path='`$ECHO "$compiler_lib_search_path" | $SED "$delay_single_quote_subst"`' -LD_FC='`$ECHO "$LD_FC" | $SED "$delay_single_quote_subst"`' -reload_flag_FC='`$ECHO "$reload_flag_FC" | $SED "$delay_single_quote_subst"`' -reload_cmds_FC='`$ECHO "$reload_cmds_FC" | $SED "$delay_single_quote_subst"`' -old_archive_cmds_FC='`$ECHO "$old_archive_cmds_FC" | $SED "$delay_single_quote_subst"`' -compiler_FC='`$ECHO "$compiler_FC" | $SED "$delay_single_quote_subst"`' -GCC_FC='`$ECHO "$GCC_FC" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_no_builtin_flag_FC='`$ECHO "$lt_prog_compiler_no_builtin_flag_FC" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_pic_FC='`$ECHO "$lt_prog_compiler_pic_FC" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_wl_FC='`$ECHO "$lt_prog_compiler_wl_FC" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_static_FC='`$ECHO "$lt_prog_compiler_static_FC" | $SED "$delay_single_quote_subst"`' -lt_cv_prog_compiler_c_o_FC='`$ECHO "$lt_cv_prog_compiler_c_o_FC" | $SED "$delay_single_quote_subst"`' -archive_cmds_need_lc_FC='`$ECHO "$archive_cmds_need_lc_FC" | $SED "$delay_single_quote_subst"`' -enable_shared_with_static_runtimes_FC='`$ECHO "$enable_shared_with_static_runtimes_FC" | $SED "$delay_single_quote_subst"`' -export_dynamic_flag_spec_FC='`$ECHO "$export_dynamic_flag_spec_FC" | $SED "$delay_single_quote_subst"`' -whole_archive_flag_spec_FC='`$ECHO "$whole_archive_flag_spec_FC" | $SED "$delay_single_quote_subst"`' -compiler_needs_object_FC='`$ECHO "$compiler_needs_object_FC" | $SED "$delay_single_quote_subst"`' -old_archive_from_new_cmds_FC='`$ECHO "$old_archive_from_new_cmds_FC" | $SED "$delay_single_quote_subst"`' -old_archive_from_expsyms_cmds_FC='`$ECHO "$old_archive_from_expsyms_cmds_FC" | $SED "$delay_single_quote_subst"`' -archive_cmds_FC='`$ECHO "$archive_cmds_FC" | $SED "$delay_single_quote_subst"`' -archive_expsym_cmds_FC='`$ECHO "$archive_expsym_cmds_FC" | $SED "$delay_single_quote_subst"`' -module_cmds_FC='`$ECHO "$module_cmds_FC" | $SED "$delay_single_quote_subst"`' -module_expsym_cmds_FC='`$ECHO "$module_expsym_cmds_FC" | $SED "$delay_single_quote_subst"`' -with_gnu_ld_FC='`$ECHO "$with_gnu_ld_FC" | $SED "$delay_single_quote_subst"`' -allow_undefined_flag_FC='`$ECHO "$allow_undefined_flag_FC" | $SED "$delay_single_quote_subst"`' -no_undefined_flag_FC='`$ECHO "$no_undefined_flag_FC" | $SED "$delay_single_quote_subst"`' -hardcode_libdir_flag_spec_FC='`$ECHO "$hardcode_libdir_flag_spec_FC" | $SED "$delay_single_quote_subst"`' -hardcode_libdir_separator_FC='`$ECHO "$hardcode_libdir_separator_FC" | $SED "$delay_single_quote_subst"`' -hardcode_direct_FC='`$ECHO "$hardcode_direct_FC" | $SED "$delay_single_quote_subst"`' -hardcode_direct_absolute_FC='`$ECHO "$hardcode_direct_absolute_FC" | $SED "$delay_single_quote_subst"`' -hardcode_minus_L_FC='`$ECHO "$hardcode_minus_L_FC" | $SED "$delay_single_quote_subst"`' -hardcode_shlibpath_var_FC='`$ECHO "$hardcode_shlibpath_var_FC" | $SED "$delay_single_quote_subst"`' -hardcode_automatic_FC='`$ECHO "$hardcode_automatic_FC" | $SED "$delay_single_quote_subst"`' -inherit_rpath_FC='`$ECHO "$inherit_rpath_FC" | $SED "$delay_single_quote_subst"`' -link_all_deplibs_FC='`$ECHO "$link_all_deplibs_FC" | $SED "$delay_single_quote_subst"`' -always_export_symbols_FC='`$ECHO "$always_export_symbols_FC" | $SED "$delay_single_quote_subst"`' -export_symbols_cmds_FC='`$ECHO "$export_symbols_cmds_FC" | $SED "$delay_single_quote_subst"`' -exclude_expsyms_FC='`$ECHO "$exclude_expsyms_FC" | $SED "$delay_single_quote_subst"`' -include_expsyms_FC='`$ECHO "$include_expsyms_FC" | $SED "$delay_single_quote_subst"`' -prelink_cmds_FC='`$ECHO "$prelink_cmds_FC" | $SED "$delay_single_quote_subst"`' -postlink_cmds_FC='`$ECHO "$postlink_cmds_FC" | $SED "$delay_single_quote_subst"`' -file_list_spec_FC='`$ECHO "$file_list_spec_FC" | $SED "$delay_single_quote_subst"`' -hardcode_action_FC='`$ECHO "$hardcode_action_FC" | $SED "$delay_single_quote_subst"`' -compiler_lib_search_dirs_FC='`$ECHO "$compiler_lib_search_dirs_FC" | $SED "$delay_single_quote_subst"`' -predep_objects_FC='`$ECHO "$predep_objects_FC" | $SED "$delay_single_quote_subst"`' -postdep_objects_FC='`$ECHO "$postdep_objects_FC" | $SED "$delay_single_quote_subst"`' -predeps_FC='`$ECHO "$predeps_FC" | $SED "$delay_single_quote_subst"`' -postdeps_FC='`$ECHO "$postdeps_FC" | $SED "$delay_single_quote_subst"`' -compiler_lib_search_path_FC='`$ECHO "$compiler_lib_search_path_FC" | $SED "$delay_single_quote_subst"`' - -LTCC='$LTCC' -LTCFLAGS='$LTCFLAGS' -compiler='$compiler_DEFAULT' - -# A function that is used when there is no print builtin or printf. -func_fallback_echo () -{ - eval 'cat <<_LTECHO_EOF -\$1 -_LTECHO_EOF' -} - -# Quote evaled strings. -for var in SHELL \ -ECHO \ -PATH_SEPARATOR \ -SED \ -GREP \ -EGREP \ -FGREP \ -LD \ -NM \ -LN_S \ -lt_SP2NL \ -lt_NL2SP \ -reload_flag \ -OBJDUMP \ -deplibs_check_method \ -file_magic_cmd \ -file_magic_glob \ -want_nocaseglob \ -DLLTOOL \ -sharedlib_from_linklib_cmd \ -AR \ -AR_FLAGS \ -archiver_list_spec \ -STRIP \ -RANLIB \ -CC \ -CFLAGS \ -compiler \ -lt_cv_sys_global_symbol_pipe \ -lt_cv_sys_global_symbol_to_cdecl \ -lt_cv_sys_global_symbol_to_c_name_address \ -lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -nm_file_list_spec \ -lt_prog_compiler_no_builtin_flag \ -lt_prog_compiler_pic \ -lt_prog_compiler_wl \ -lt_prog_compiler_static \ -lt_cv_prog_compiler_c_o \ -need_locks \ -MANIFEST_TOOL \ -DSYMUTIL \ -NMEDIT \ -LIPO \ -OTOOL \ -OTOOL64 \ -shrext_cmds \ -export_dynamic_flag_spec \ -whole_archive_flag_spec \ -compiler_needs_object \ -with_gnu_ld \ -allow_undefined_flag \ -no_undefined_flag \ -hardcode_libdir_flag_spec \ -hardcode_libdir_separator \ -exclude_expsyms \ -include_expsyms \ -file_list_spec \ -variables_saved_for_relink \ -libname_spec \ -library_names_spec \ -soname_spec \ -install_override_mode \ -finish_eval \ -old_striplib \ -striplib \ -compiler_lib_search_dirs \ -predep_objects \ -postdep_objects \ -predeps \ -postdeps \ -compiler_lib_search_path \ -LD_FC \ -reload_flag_FC \ -compiler_FC \ -lt_prog_compiler_no_builtin_flag_FC \ -lt_prog_compiler_pic_FC \ -lt_prog_compiler_wl_FC \ -lt_prog_compiler_static_FC \ -lt_cv_prog_compiler_c_o_FC \ -export_dynamic_flag_spec_FC \ -whole_archive_flag_spec_FC \ -compiler_needs_object_FC \ -with_gnu_ld_FC \ -allow_undefined_flag_FC \ -no_undefined_flag_FC \ -hardcode_libdir_flag_spec_FC \ -hardcode_libdir_separator_FC \ -exclude_expsyms_FC \ -include_expsyms_FC \ -file_list_spec_FC \ -compiler_lib_search_dirs_FC \ -predep_objects_FC \ -postdep_objects_FC \ -predeps_FC \ -postdeps_FC \ -compiler_lib_search_path_FC; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) - eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" - ;; - esac -done - -# Double-quote double-evaled strings. -for var in reload_cmds \ -old_postinstall_cmds \ -old_postuninstall_cmds \ -old_archive_cmds \ -extract_expsyms_cmds \ -old_archive_from_new_cmds \ -old_archive_from_expsyms_cmds \ -archive_cmds \ -archive_expsym_cmds \ -module_cmds \ -module_expsym_cmds \ -export_symbols_cmds \ -prelink_cmds \ -postlink_cmds \ -postinstall_cmds \ -postuninstall_cmds \ -finish_cmds \ -sys_lib_search_path_spec \ -sys_lib_dlsearch_path_spec \ -reload_cmds_FC \ -old_archive_cmds_FC \ -old_archive_from_new_cmds_FC \ -old_archive_from_expsyms_cmds_FC \ -archive_cmds_FC \ -archive_expsym_cmds_FC \ -module_cmds_FC \ -module_expsym_cmds_FC \ -export_symbols_cmds_FC \ -prelink_cmds_FC \ -postlink_cmds_FC; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) - eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" - ;; - esac -done - -ac_aux_dir='$ac_aux_dir' -xsi_shell='$xsi_shell' -lt_shell_append='$lt_shell_append' - -# See if we are running on zsh, and set the options which allow our -# commands through without removal of \ escapes INIT. -if test -n "\${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST -fi - - - PACKAGE='$PACKAGE' - VERSION='$VERSION' - TIMESTAMP='$TIMESTAMP' - RM='$RM' - ofile='$ofile' - - - - - - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - -# Handling of arguments. -for ac_config_target in $ac_config_targets -do - case $ac_config_target in - "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; - "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; - "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; - "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; - "lib/Makefile") CONFIG_FILES="$CONFIG_FILES lib/Makefile" ;; - "lib/xmlFailures/Makefile") CONFIG_FILES="$CONFIG_FILES lib/xmlFailures/Makefile" ;; - "lib/xmlSuccesses/Makefile") CONFIG_FILES="$CONFIG_FILES lib/xmlSuccesses/Makefile" ;; - "prog/Makefile") CONFIG_FILES="$CONFIG_FILES prog/Makefile" ;; - - *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; - esac -done - - -# If the user did not use the arguments to specify the items to instantiate, -# then the envvar interface is used. Set only those that are not. -# We use the long form for the default assignment because of an extremely -# bizarre bug on SunOS 4.1.3. -if $ac_need_defaults; then - test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files - test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers - test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands -fi - -# Have a temporary directory for convenience. Make it in the build tree -# simply because there is no reason against having it here, and in addition, -# creating and moving files from /tmp can sometimes cause problems. -# Hook for its removal unless debugging. -# Note that there is a small window in which the directory will not be cleaned: -# after its creation but before its name has been assigned to `$tmp'. -$debug || -{ - tmp= ac_tmp= - trap 'exit_status=$? - : "${ac_tmp:=$tmp}" - { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status -' 0 - trap 'as_fn_exit 1' 1 2 13 15 -} -# Create a (secure) tmp directory for tmp files. - -{ - tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -d "$tmp" -} || -{ - tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 -ac_tmp=$tmp - -# Set up the scripts for CONFIG_FILES section. -# No need to generate them if there are no CONFIG_FILES. -# This happens for instance with `./config.status config.h'. -if test -n "$CONFIG_FILES"; then - - -ac_cr=`echo X | tr X '\015'` -# On cygwin, bash can eat \r inside `` if the user requested igncr. -# But we know of no other shell where ac_cr would be empty at this -# point, so we can use a bashism as a fallback. -if test "x$ac_cr" = x; then - eval ac_cr=\$\'\\r\' -fi -ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` -if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then - ac_cs_awk_cr='\\r' -else - ac_cs_awk_cr=$ac_cr -fi - -echo 'BEGIN {' >"$ac_tmp/subs1.awk" && -_ACEOF - - -{ - echo "cat >conf$$subs.awk <<_ACEOF" && - echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && - echo "_ACEOF" -} >conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 -ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` -ac_delim='%!_!# ' -for ac_last_try in false false false false false :; do - . ./conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - - ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` - if test $ac_delim_n = $ac_delim_num; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done -rm -f conf$$subs.sh - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && -_ACEOF -sed -n ' -h -s/^/S["/; s/!.*/"]=/ -p -g -s/^[^!]*!// -:repl -t repl -s/'"$ac_delim"'$// -t delim -:nl -h -s/\(.\{148\}\)..*/\1/ -t more1 -s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ -p -n -b repl -:more1 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t nl -:delim -h -s/\(.\{148\}\)..*/\1/ -t more2 -s/["\\]/\\&/g; s/^/"/; s/$/"/ -p -b -:more2 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t delim -' >$CONFIG_STATUS || ac_write_fail=1 -rm -f conf$$subs.awk -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACAWK -cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && - for (key in S) S_is_set[key] = 1 - FS = "" - -} -{ - line = $ 0 - nfields = split(line, field, "@") - substed = 0 - len = length(field[1]) - for (i = 2; i < nfields; i++) { - key = field[i] - keylen = length(key) - if (S_is_set[key]) { - value = S[key] - line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) - len += length(value) + length(field[++i]) - substed = 1 - } else - len += 1 + keylen - } - - print line -} - -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then - sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" -else - cat -fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ - || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 -_ACEOF - -# VPATH may cause trouble with some makes, so we remove sole $(srcdir), -# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and -# trailing colons and then remove the whole line if VPATH becomes empty -# (actually we leave an empty line to preserve line numbers). -if test "x$srcdir" = x.; then - ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ -h -s/// -s/^/:/ -s/[ ]*$/:/ -s/:\$(srcdir):/:/g -s/:\${srcdir}:/:/g -s/:@srcdir@:/:/g -s/^:*// -s/:*$// -x -s/\(=[ ]*\).*/\1/ -G -s/\n// -s/^[^=]*=[ ]*$// -}' -fi - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -fi # test -n "$CONFIG_FILES" - -# Set up the scripts for CONFIG_HEADERS section. -# No need to generate them if there are no CONFIG_HEADERS. -# This happens for instance with `./config.status Makefile'. -if test -n "$CONFIG_HEADERS"; then -cat >"$ac_tmp/defines.awk" <<\_ACAWK || -BEGIN { -_ACEOF - -# Transform confdefs.h into an awk script `defines.awk', embedded as -# here-document in config.status, that substitutes the proper values into -# config.h.in to produce config.h. - -# Create a delimiter string that does not exist in confdefs.h, to ease -# handling of long lines. -ac_delim='%!_!# ' -for ac_last_try in false false :; do - ac_tt=`sed -n "/$ac_delim/p" confdefs.h` - if test -z "$ac_tt"; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done - -# For the awk script, D is an array of macro values keyed by name, -# likewise P contains macro parameters if any. Preserve backslash -# newline sequences. - -ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* -sed -n ' -s/.\{148\}/&'"$ac_delim"'/g -t rset -:rset -s/^[ ]*#[ ]*define[ ][ ]*/ / -t def -d -:def -s/\\$// -t bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3"/p -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p -d -:bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3\\\\\\n"\\/p -t cont -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p -t cont -d -:cont -n -s/.\{148\}/&'"$ac_delim"'/g -t clear -:clear -s/\\$// -t bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/"/p -d -:bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p -b cont -' >$CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - for (key in D) D_is_set[key] = 1 - FS = "" -} -/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { - line = \$ 0 - split(line, arg, " ") - if (arg[1] == "#") { - defundef = arg[2] - mac1 = arg[3] - } else { - defundef = substr(arg[1], 2) - mac1 = arg[2] - } - split(mac1, mac2, "(") #) - macro = mac2[1] - prefix = substr(line, 1, index(line, defundef) - 1) - if (D_is_set[macro]) { - # Preserve the white space surrounding the "#". - print prefix "define", macro P[macro] D[macro] - next - } else { - # Replace #undef with comments. This is necessary, for example, - # in the case of _POSIX_SOURCE, which is predefined and required - # on some systems where configure will not decide to define it. - if (defundef == "undef") { - print "/*", prefix defundef, macro, "*/" - next - } - } -} -{ print } -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 -fi # test -n "$CONFIG_HEADERS" - - -eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" -shift -for ac_tag -do - case $ac_tag in - :[FHLC]) ac_mode=$ac_tag; continue;; - esac - case $ac_mode$ac_tag in - :[FHL]*:*);; - :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; - :[FH]-) ac_tag=-:-;; - :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; - esac - ac_save_IFS=$IFS - IFS=: - set x $ac_tag - IFS=$ac_save_IFS - shift - ac_file=$1 - shift - - case $ac_mode in - :L) ac_source=$1;; - :[FH]) - ac_file_inputs= - for ac_f - do - case $ac_f in - -) ac_f="$ac_tmp/stdin";; - *) # Look for the file first in the build tree, then in the source tree - # (if the path is not absolute). The absolute path cannot be DOS-style, - # because $ac_f cannot contain `:'. - test -f "$ac_f" || - case $ac_f in - [\\/$]*) false;; - *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; - esac || - as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; - esac - case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac - as_fn_append ac_file_inputs " '$ac_f'" - done - - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ - configure_input='Generated from '` - $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' - `' by configure.' - if test x"$ac_file" != x-; then - configure_input="$ac_file. $configure_input" - { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 -$as_echo "$as_me: creating $ac_file" >&6;} - fi - # Neutralize special characters interpreted by sed in replacement strings. - case $configure_input in #( - *\&* | *\|* | *\\* ) - ac_sed_conf_input=`$as_echo "$configure_input" | - sed 's/[\\\\&|]/\\\\&/g'`;; #( - *) ac_sed_conf_input=$configure_input;; - esac - - case $ac_tag in - *:-:* | *:-) cat >"$ac_tmp/stdin" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; - esac - ;; - esac - - ac_dir=`$as_dirname -- "$ac_file" || -$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$ac_file" : 'X\(//\)[^/]' \| \ - X"$ac_file" : 'X\(//\)$' \| \ - X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir="$ac_dir"; as_fn_mkdir_p - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - - case $ac_mode in - :F) - # - # CONFIG_FILE - # - - case $INSTALL in - [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; - *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; - esac - ac_MKDIR_P=$MKDIR_P - case $MKDIR_P in - [\\/$]* | ?:[\\/]* ) ;; - */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; - esac -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# If the template does not know about datarootdir, expand it. -# FIXME: This hack should be removed a few years after 2.60. -ac_datarootdir_hack=; ac_datarootdir_seen= -ac_sed_dataroot=' -/datarootdir/ { - p - q -} -/@datadir@/p -/@docdir@/p -/@infodir@/p -/@localedir@/p -/@mandir@/p' -case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in -*datarootdir*) ac_datarootdir_seen=yes;; -*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - ac_datarootdir_hack=' - s&@datadir@&$datadir&g - s&@docdir@&$docdir&g - s&@infodir@&$infodir&g - s&@localedir@&$localedir&g - s&@mandir@&$mandir&g - s&\\\${datarootdir}&$datarootdir&g' ;; -esac -_ACEOF - -# Neutralize VPATH when `$srcdir' = `.'. -# Shell code in configure.ac might set extrasub. -# FIXME: do we really want to maintain this feature? -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_sed_extra="$ac_vpsub -$extrasub -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -:t -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s|@configure_input@|$ac_sed_conf_input|;t t -s&@top_builddir@&$ac_top_builddir_sub&;t t -s&@top_build_prefix@&$ac_top_build_prefix&;t t -s&@srcdir@&$ac_srcdir&;t t -s&@abs_srcdir@&$ac_abs_srcdir&;t t -s&@top_srcdir@&$ac_top_srcdir&;t t -s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t -s&@builddir@&$ac_builddir&;t t -s&@abs_builddir@&$ac_abs_builddir&;t t -s&@abs_top_builddir@&$ac_abs_top_builddir&;t t -s&@INSTALL@&$ac_INSTALL&;t t -s&@MKDIR_P@&$ac_MKDIR_P&;t t -$ac_datarootdir_hack -" -eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ - >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - -test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ - "$ac_tmp/out"`; test -z "$ac_out"; } && - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&5 -$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&2;} - - rm -f "$ac_tmp/stdin" - case $ac_file in - -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; - *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; - esac \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - ;; - :H) - # - # CONFIG_HEADER - # - if test x"$ac_file" != x-; then - { - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" - } >"$ac_tmp/config.h" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then - { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 -$as_echo "$as_me: $ac_file is unchanged" >&6;} - else - rm -f "$ac_file" - mv "$ac_tmp/config.h" "$ac_file" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - fi - else - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ - || as_fn_error $? "could not create -" "$LINENO" 5 - fi -# Compute "$ac_file"'s index in $config_headers. -_am_arg="$ac_file" -_am_stamp_count=1 -for _am_header in $config_headers :; do - case $_am_header in - $_am_arg | $_am_arg:* ) - break ;; - * ) - _am_stamp_count=`expr $_am_stamp_count + 1` ;; - esac -done -echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || -$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$_am_arg" : 'X\(//\)[^/]' \| \ - X"$_am_arg" : 'X\(//\)$' \| \ - X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$_am_arg" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'`/stamp-h$_am_stamp_count - ;; - - :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 -$as_echo "$as_me: executing $ac_file commands" >&6;} - ;; - esac - - - case $ac_file$ac_mode in - "depfiles":C) test x"$AMDEP_TRUE" != x"" || { - # Autoconf 2.62 quotes --file arguments for eval, but not when files - # are listed without --file. Let's play safe and only enable the eval - # if we detect the quoting. - case $CONFIG_FILES in - *\'*) eval set x "$CONFIG_FILES" ;; - *) set x $CONFIG_FILES ;; - esac - shift - for mf - do - # Strip MF so we end up with the name of the file. - mf=`echo "$mf" | sed -e 's/:.*$//'` - # Check whether this is an Automake generated Makefile or not. - # We used to match only the files named `Makefile.in', but - # some people rename them; so instead we look at the file content. - # Grep'ing the first line is not enough: some people post-process - # each Makefile.in and add a new line on top of each file to say so. - # Grep'ing the whole file is not good either: AIX grep has a line - # limit of 2048, but all sed's we know have understand at least 4000. - if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then - dirpart=`$as_dirname -- "$mf" || -$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$mf" : 'X\(//\)[^/]' \| \ - X"$mf" : 'X\(//\)$' \| \ - X"$mf" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$mf" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - else - continue - fi - # Extract the definition of DEPDIR, am__include, and am__quote - # from the Makefile without running `make'. - DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` - test -z "$DEPDIR" && continue - am__include=`sed -n 's/^am__include = //p' < "$mf"` - test -z "am__include" && continue - am__quote=`sed -n 's/^am__quote = //p' < "$mf"` - # When using ansi2knr, U may be empty or an underscore; expand it - U=`sed -n 's/^U = //p' < "$mf"` - # Find all dependency output files, they are included files with - # $(DEPDIR) in their names. We invoke sed twice because it is the - # simplest approach to changing $(DEPDIR) to its actual value in the - # expansion. - for file in `sed -n " - s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ - sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do - # Make sure the directory exists. - test -f "$dirpart/$file" && continue - fdir=`$as_dirname -- "$file" || -$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$file" : 'X\(//\)[^/]' \| \ - X"$file" : 'X\(//\)$' \| \ - X"$file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir=$dirpart/$fdir; as_fn_mkdir_p - # echo "creating $dirpart/$file" - echo '# dummy' > "$dirpart/$file" - done - done -} - ;; - "libtool":C) - - # See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes. - if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - - cfgfile="${ofile}T" - trap "$RM \"$cfgfile\"; exit 1" 1 2 15 - $RM "$cfgfile" - - cat <<_LT_EOF >> "$cfgfile" -#! $SHELL - -# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. -# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION -# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: -# NOTE: Changes made to this file will be lost: look at ltmain.sh. -# -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -# 2006, 2007, 2008, 2009, 2010, 2011 Free Software -# Foundation, Inc. -# Written by Gordon Matzigkeit, 1996 -# -# This file is part of GNU Libtool. -# -# GNU Libtool is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 2 of -# the License, or (at your option) any later version. -# -# As a special exception to the GNU General Public License, -# if you distribute this file as part of a program or library that -# is built using GNU Libtool, you may include this file under the -# same distribution terms that you use for the rest of that program. -# -# GNU Libtool is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with GNU Libtool; see the file COPYING. If not, a copy -# can be downloaded from http://www.gnu.org/licenses/gpl.html, or -# obtained by writing to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - - -# The names of the tagged configurations supported by this script. -available_tags="FC " - -# ### BEGIN LIBTOOL CONFIG - -# Which release of libtool.m4 was used? -macro_version=$macro_version -macro_revision=$macro_revision - -# Whether or not to build shared libraries. -build_libtool_libs=$enable_shared - -# Whether or not to build static libraries. -build_old_libs=$enable_static - -# What type of objects to build. -pic_mode=$pic_mode - -# Whether or not to optimize for fast installation. -fast_install=$enable_fast_install - -# Shell to use when invoking shell scripts. -SHELL=$lt_SHELL - -# An echo program that protects backslashes. -ECHO=$lt_ECHO - -# The PATH separator for the build system. -PATH_SEPARATOR=$lt_PATH_SEPARATOR - -# The host system. -host_alias=$host_alias -host=$host -host_os=$host_os - -# The build system. -build_alias=$build_alias -build=$build -build_os=$build_os - -# A sed program that does not truncate output. -SED=$lt_SED - -# Sed that helps us avoid accidentally triggering echo(1) options like -n. -Xsed="\$SED -e 1s/^X//" - -# A grep program that handles long lines. -GREP=$lt_GREP - -# An ERE matcher. -EGREP=$lt_EGREP - -# A literal string matcher. -FGREP=$lt_FGREP - -# A BSD- or MS-compatible name lister. -NM=$lt_NM - -# Whether we need soft or hard links. -LN_S=$lt_LN_S - -# What is the maximum length of a command? -max_cmd_len=$max_cmd_len - -# Object file suffix (normally "o"). -objext=$ac_objext - -# Executable file suffix (normally ""). -exeext=$exeext - -# whether the shell understands "unset". -lt_unset=$lt_unset - -# turn spaces into newlines. -SP2NL=$lt_lt_SP2NL - -# turn newlines into spaces. -NL2SP=$lt_lt_NL2SP - -# convert \$build file names to \$host format. -to_host_file_cmd=$lt_cv_to_host_file_cmd - -# convert \$build files to toolchain format. -to_tool_file_cmd=$lt_cv_to_tool_file_cmd - -# An object symbol dumper. -OBJDUMP=$lt_OBJDUMP - -# Method to check whether dependent libraries are shared objects. -deplibs_check_method=$lt_deplibs_check_method - -# Command to use when deplibs_check_method = "file_magic". -file_magic_cmd=$lt_file_magic_cmd - -# How to find potential files when deplibs_check_method = "file_magic". -file_magic_glob=$lt_file_magic_glob - -# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -want_nocaseglob=$lt_want_nocaseglob - -# DLL creation program. -DLLTOOL=$lt_DLLTOOL - -# Command to associate shared and link libraries. -sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd - -# The archiver. -AR=$lt_AR - -# Flags to create an archive. -AR_FLAGS=$lt_AR_FLAGS - -# How to feed a file listing to the archiver. -archiver_list_spec=$lt_archiver_list_spec - -# A symbol stripping program. -STRIP=$lt_STRIP - -# Commands used to install an old-style archive. -RANLIB=$lt_RANLIB -old_postinstall_cmds=$lt_old_postinstall_cmds -old_postuninstall_cmds=$lt_old_postuninstall_cmds - -# Whether to use a lock for old archive extraction. -lock_old_archive_extraction=$lock_old_archive_extraction - -# A C compiler. -LTCC=$lt_CC - -# LTCC compiler flags. -LTCFLAGS=$lt_CFLAGS - -# Take the output of nm and produce a listing of raw symbols and C names. -global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe - -# Transform the output of nm in a proper C declaration. -global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl - -# Transform the output of nm in a C name address pair. -global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - -# Transform the output of nm in a C name address pair when lib prefix is needed. -global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -# Specify filename containing input files for \$NM. -nm_file_list_spec=$lt_nm_file_list_spec - -# The root where to search for dependent libraries,and in which our libraries should be installed. -lt_sysroot=$lt_sysroot - -# The name of the directory that contains temporary libtool files. -objdir=$objdir - -# Used to examine libraries when file_magic_cmd begins with "file". -MAGIC_CMD=$MAGIC_CMD - -# Must we lock files when doing compilation? -need_locks=$lt_need_locks - -# Manifest tool. -MANIFEST_TOOL=$lt_MANIFEST_TOOL - -# Tool to manipulate archived DWARF debug symbol files on Mac OS X. -DSYMUTIL=$lt_DSYMUTIL - -# Tool to change global to local symbols on Mac OS X. -NMEDIT=$lt_NMEDIT - -# Tool to manipulate fat objects and archives on Mac OS X. -LIPO=$lt_LIPO - -# ldd/readelf like tool for Mach-O binaries on Mac OS X. -OTOOL=$lt_OTOOL - -# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. -OTOOL64=$lt_OTOOL64 - -# Old archive suffix (normally "a"). -libext=$libext - -# Shared library suffix (normally ".so"). -shrext_cmds=$lt_shrext_cmds - -# The commands to extract the exported symbol list from a shared archive. -extract_expsyms_cmds=$lt_extract_expsyms_cmds - -# Variables whose values should be saved in libtool wrapper scripts and -# restored at link time. -variables_saved_for_relink=$lt_variables_saved_for_relink - -# Do we need the "lib" prefix for modules? -need_lib_prefix=$need_lib_prefix - -# Do we need a version for libraries? -need_version=$need_version - -# Library versioning type. -version_type=$version_type - -# Shared library runtime path variable. -runpath_var=$runpath_var - -# Shared library path variable. -shlibpath_var=$shlibpath_var - -# Is shlibpath searched before the hard-coded library search path? -shlibpath_overrides_runpath=$shlibpath_overrides_runpath - -# Format of library name prefix. -libname_spec=$lt_libname_spec - -# List of archive names. First name is the real one, the rest are links. -# The last name is the one that the linker finds with -lNAME -library_names_spec=$lt_library_names_spec - -# The coded name of the library, if different from the real name. -soname_spec=$lt_soname_spec - -# Permission mode override for installation of shared libraries. -install_override_mode=$lt_install_override_mode - -# Command to use after installation of a shared archive. -postinstall_cmds=$lt_postinstall_cmds - -# Command to use after uninstallation of a shared archive. -postuninstall_cmds=$lt_postuninstall_cmds - -# Commands used to finish a libtool library installation in a directory. -finish_cmds=$lt_finish_cmds - -# As "finish_cmds", except a single script fragment to be evaled but -# not shown. -finish_eval=$lt_finish_eval - -# Whether we should hardcode library paths into libraries. -hardcode_into_libs=$hardcode_into_libs - -# Compile-time system search path for libraries. -sys_lib_search_path_spec=$lt_sys_lib_search_path_spec - -# Run-time system search path for libraries. -sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec - -# Whether dlopen is supported. -dlopen_support=$enable_dlopen - -# Whether dlopen of programs is supported. -dlopen_self=$enable_dlopen_self - -# Whether dlopen of statically linked programs is supported. -dlopen_self_static=$enable_dlopen_self_static - -# Commands to strip libraries. -old_striplib=$lt_old_striplib -striplib=$lt_striplib - - -# The linker used to build libraries. -LD=$lt_LD - -# How to create reloadable object files. -reload_flag=$lt_reload_flag -reload_cmds=$lt_reload_cmds - -# Commands used to build an old-style archive. -old_archive_cmds=$lt_old_archive_cmds - -# A language specific compiler. -CC=$lt_compiler - -# Is the compiler the GNU compiler? -with_gcc=$GCC - -# Compiler flag to turn off builtin functions. -no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - -# Additional compiler flags for building library objects. -pic_flag=$lt_lt_prog_compiler_pic - -# How to pass a linker flag through the compiler. -wl=$lt_lt_prog_compiler_wl - -# Compiler flag to prevent dynamic linking. -link_static_flag=$lt_lt_prog_compiler_static - -# Does compiler simultaneously support -c and -o options? -compiler_c_o=$lt_lt_cv_prog_compiler_c_o - -# Whether or not to add -lc for building shared libraries. -build_libtool_need_lc=$archive_cmds_need_lc - -# Whether or not to disallow shared libs when runtime libs are static. -allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes - -# Compiler flag to allow reflexive dlopens. -export_dynamic_flag_spec=$lt_export_dynamic_flag_spec - -# Compiler flag to generate shared objects directly from archives. -whole_archive_flag_spec=$lt_whole_archive_flag_spec - -# Whether the compiler copes with passing no objects directly. -compiler_needs_object=$lt_compiler_needs_object - -# Create an old-style archive from a shared archive. -old_archive_from_new_cmds=$lt_old_archive_from_new_cmds - -# Create a temporary old-style archive to link instead of a shared archive. -old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds - -# Commands used to build a shared archive. -archive_cmds=$lt_archive_cmds -archive_expsym_cmds=$lt_archive_expsym_cmds - -# Commands used to build a loadable module if different from building -# a shared archive. -module_cmds=$lt_module_cmds -module_expsym_cmds=$lt_module_expsym_cmds - -# Whether we are building with GNU ld or not. -with_gnu_ld=$lt_with_gnu_ld - -# Flag that allows shared libraries with undefined symbols to be built. -allow_undefined_flag=$lt_allow_undefined_flag - -# Flag that enforces no undefined symbols. -no_undefined_flag=$lt_no_undefined_flag - -# Flag to hardcode \$libdir into a binary during linking. -# This must work even if \$libdir does not exist -hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec - -# Whether we need a single "-rpath" flag with a separated argument. -hardcode_libdir_separator=$lt_hardcode_libdir_separator - -# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes -# DIR into the resulting binary. -hardcode_direct=$hardcode_direct - -# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes -# DIR into the resulting binary and the resulting library dependency is -# "absolute",i.e impossible to change by setting \${shlibpath_var} if the -# library is relocated. -hardcode_direct_absolute=$hardcode_direct_absolute - -# Set to "yes" if using the -LDIR flag during linking hardcodes DIR -# into the resulting binary. -hardcode_minus_L=$hardcode_minus_L - -# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR -# into the resulting binary. -hardcode_shlibpath_var=$hardcode_shlibpath_var - -# Set to "yes" if building a shared library automatically hardcodes DIR -# into the library and all subsequent libraries and executables linked -# against it. -hardcode_automatic=$hardcode_automatic - -# Set to yes if linker adds runtime paths of dependent libraries -# to runtime path list. -inherit_rpath=$inherit_rpath - -# Whether libtool must link a program against all its dependency libraries. -link_all_deplibs=$link_all_deplibs - -# Set to "yes" if exported symbols are required. -always_export_symbols=$always_export_symbols - -# The commands to list exported symbols. -export_symbols_cmds=$lt_export_symbols_cmds - -# Symbols that should not be listed in the preloaded symbols. -exclude_expsyms=$lt_exclude_expsyms - -# Symbols that must always be exported. -include_expsyms=$lt_include_expsyms - -# Commands necessary for linking programs (against libraries) with templates. -prelink_cmds=$lt_prelink_cmds - -# Commands necessary for finishing linking programs. -postlink_cmds=$lt_postlink_cmds - -# Specify filename containing input files. -file_list_spec=$lt_file_list_spec - -# How to hardcode a shared library path into an executable. -hardcode_action=$hardcode_action - -# The directories searched by this compiler when creating a shared library. -compiler_lib_search_dirs=$lt_compiler_lib_search_dirs - -# Dependencies to place before and after the objects being linked to -# create a shared library. -predep_objects=$lt_predep_objects -postdep_objects=$lt_postdep_objects -predeps=$lt_predeps -postdeps=$lt_postdeps - -# The library search path used internally by the compiler when linking -# a shared library. -compiler_lib_search_path=$lt_compiler_lib_search_path - -# ### END LIBTOOL CONFIG - -_LT_EOF - - case $host_os in - aix3*) - cat <<\_LT_EOF >> "$cfgfile" -# AIX sometimes has problems with the GCC collect2 program. For some -# reason, if we set the COLLECT_NAMES environment variable, the problems -# vanish in a puff of smoke. -if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES -fi -_LT_EOF - ;; - esac - - -ltmain="$ac_aux_dir/ltmain.sh" - - - # We use sed instead of cat because bash on DJGPP gets confused if - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? - sed '$q' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - - if test x"$xsi_shell" = xyes; then - sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -func_dirname ()\ -{\ -\ case ${1} in\ -\ */*) func_dirname_result="${1%/*}${2}" ;;\ -\ * ) func_dirname_result="${3}" ;;\ -\ esac\ -} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_basename ()$/,/^} # func_basename /c\ -func_basename ()\ -{\ -\ func_basename_result="${1##*/}"\ -} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -func_dirname_and_basename ()\ -{\ -\ case ${1} in\ -\ */*) func_dirname_result="${1%/*}${2}" ;;\ -\ * ) func_dirname_result="${3}" ;;\ -\ esac\ -\ func_basename_result="${1##*/}"\ -} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -func_stripname ()\ -{\ -\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -\ # positional parameters, so assign one to ordinary parameter first.\ -\ func_stripname_result=${3}\ -\ func_stripname_result=${func_stripname_result#"${1}"}\ -\ func_stripname_result=${func_stripname_result%"${2}"}\ -} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -func_split_long_opt ()\ -{\ -\ func_split_long_opt_name=${1%%=*}\ -\ func_split_long_opt_arg=${1#*=}\ -} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -func_split_short_opt ()\ -{\ -\ func_split_short_opt_arg=${1#??}\ -\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -func_lo2o ()\ -{\ -\ case ${1} in\ -\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -\ *) func_lo2o_result=${1} ;;\ -\ esac\ -} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_xform ()$/,/^} # func_xform /c\ -func_xform ()\ -{\ - func_xform_result=${1%.*}.lo\ -} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_arith ()$/,/^} # func_arith /c\ -func_arith ()\ -{\ - func_arith_result=$(( $* ))\ -} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_len ()$/,/^} # func_len /c\ -func_len ()\ -{\ - func_len_result=${#1}\ -} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - -fi - -if test x"$lt_shell_append" = xyes; then - sed -e '/^func_append ()$/,/^} # func_append /c\ -func_append ()\ -{\ - eval "${1}+=\\${2}"\ -} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -func_append_quoted ()\ -{\ -\ func_quote_for_eval "${2}"\ -\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - # Save a `func_append' function call where possible by direct use of '+=' - sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") - test 0 -eq $? || _lt_function_replace_fail=: -else - # Save a `func_append' function call even when '+=' is not available - sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") - test 0 -eq $? || _lt_function_replace_fail=: -fi - -if test x"$_lt_function_replace_fail" = x":"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -fi - - - mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - - - cat <<_LT_EOF >> "$ofile" - -# ### BEGIN LIBTOOL TAG CONFIG: FC - -# The linker used to build libraries. -LD=$lt_LD_FC - -# How to create reloadable object files. -reload_flag=$lt_reload_flag_FC -reload_cmds=$lt_reload_cmds_FC - -# Commands used to build an old-style archive. -old_archive_cmds=$lt_old_archive_cmds_FC - -# A language specific compiler. -CC=$lt_compiler_FC - -# Is the compiler the GNU compiler? -with_gcc=$GCC_FC - -# Compiler flag to turn off builtin functions. -no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_FC - -# Additional compiler flags for building library objects. -pic_flag=$lt_lt_prog_compiler_pic_FC - -# How to pass a linker flag through the compiler. -wl=$lt_lt_prog_compiler_wl_FC - -# Compiler flag to prevent dynamic linking. -link_static_flag=$lt_lt_prog_compiler_static_FC - -# Does compiler simultaneously support -c and -o options? -compiler_c_o=$lt_lt_cv_prog_compiler_c_o_FC - -# Whether or not to add -lc for building shared libraries. -build_libtool_need_lc=$archive_cmds_need_lc_FC - -# Whether or not to disallow shared libs when runtime libs are static. -allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_FC - -# Compiler flag to allow reflexive dlopens. -export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_FC - -# Compiler flag to generate shared objects directly from archives. -whole_archive_flag_spec=$lt_whole_archive_flag_spec_FC - -# Whether the compiler copes with passing no objects directly. -compiler_needs_object=$lt_compiler_needs_object_FC - -# Create an old-style archive from a shared archive. -old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_FC - -# Create a temporary old-style archive to link instead of a shared archive. -old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_FC - -# Commands used to build a shared archive. -archive_cmds=$lt_archive_cmds_FC -archive_expsym_cmds=$lt_archive_expsym_cmds_FC - -# Commands used to build a loadable module if different from building -# a shared archive. -module_cmds=$lt_module_cmds_FC -module_expsym_cmds=$lt_module_expsym_cmds_FC - -# Whether we are building with GNU ld or not. -with_gnu_ld=$lt_with_gnu_ld_FC - -# Flag that allows shared libraries with undefined symbols to be built. -allow_undefined_flag=$lt_allow_undefined_flag_FC - -# Flag that enforces no undefined symbols. -no_undefined_flag=$lt_no_undefined_flag_FC - -# Flag to hardcode \$libdir into a binary during linking. -# This must work even if \$libdir does not exist -hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_FC - -# Whether we need a single "-rpath" flag with a separated argument. -hardcode_libdir_separator=$lt_hardcode_libdir_separator_FC - -# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes -# DIR into the resulting binary. -hardcode_direct=$hardcode_direct_FC - -# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes -# DIR into the resulting binary and the resulting library dependency is -# "absolute",i.e impossible to change by setting \${shlibpath_var} if the -# library is relocated. -hardcode_direct_absolute=$hardcode_direct_absolute_FC - -# Set to "yes" if using the -LDIR flag during linking hardcodes DIR -# into the resulting binary. -hardcode_minus_L=$hardcode_minus_L_FC - -# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR -# into the resulting binary. -hardcode_shlibpath_var=$hardcode_shlibpath_var_FC - -# Set to "yes" if building a shared library automatically hardcodes DIR -# into the library and all subsequent libraries and executables linked -# against it. -hardcode_automatic=$hardcode_automatic_FC - -# Set to yes if linker adds runtime paths of dependent libraries -# to runtime path list. -inherit_rpath=$inherit_rpath_FC - -# Whether libtool must link a program against all its dependency libraries. -link_all_deplibs=$link_all_deplibs_FC - -# Set to "yes" if exported symbols are required. -always_export_symbols=$always_export_symbols_FC - -# The commands to list exported symbols. -export_symbols_cmds=$lt_export_symbols_cmds_FC - -# Symbols that should not be listed in the preloaded symbols. -exclude_expsyms=$lt_exclude_expsyms_FC - -# Symbols that must always be exported. -include_expsyms=$lt_include_expsyms_FC - -# Commands necessary for linking programs (against libraries) with templates. -prelink_cmds=$lt_prelink_cmds_FC - -# Commands necessary for finishing linking programs. -postlink_cmds=$lt_postlink_cmds_FC - -# Specify filename containing input files. -file_list_spec=$lt_file_list_spec_FC - -# How to hardcode a shared library path into an executable. -hardcode_action=$hardcode_action_FC - -# The directories searched by this compiler when creating a shared library. -compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_FC - -# Dependencies to place before and after the objects being linked to -# create a shared library. -predep_objects=$lt_predep_objects_FC -postdep_objects=$lt_postdep_objects_FC -predeps=$lt_predeps_FC -postdeps=$lt_postdeps_FC - -# The library search path used internally by the compiler when linking -# a shared library. -compiler_lib_search_path=$lt_compiler_lib_search_path_FC - -# ### END LIBTOOL TAG CONFIG: FC -_LT_EOF - - ;; - - esac -done # for ac_tag - - -as_fn_exit 0 -_ACEOF -ac_clean_files=$ac_clean_files_save - -test $ac_write_fail = 0 || - as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 - - -# configure is writing to config.log, and then calls config.status. -# config.status does its own redirection, appending to config.log. -# Unfortunately, on DOS this fails, as config.log is still kept open -# by configure, so config.status won't be able to write to it; its -# output is simply discarded. So we exec the FD to /dev/null, -# effectively closing config.log, so it can be properly (re)opened and -# appended to by config.status. When coming back to configure, we -# need to make the FD available again. -if test "$no_create" != yes; then - ac_cs_success=: - ac_config_status_args= - test "$silent" = yes && - ac_config_status_args="$ac_config_status_args --quiet" - exec 5>/dev/null - $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false - exec 5>>config.log - # Use ||, not &&, to avoid exiting from the if with $? = 1, which - # would make configure fail if this is the last instruction. - $ac_cs_success || as_fn_exit 1 -fi -if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 -$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} -fi - diff --git a/CMake/cdat_modules_extra/uvcdat.in b/CMake/cdat_modules_extra/uvcdat.in deleted file mode 100755 index 8b1a3f238d..0000000000 --- a/CMake/cdat_modules_extra/uvcdat.in +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash -# source is not portable whereas . is -. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh" - -# Used in event of -o "log_location"; grabs the next arg and puts it in target -capture=false -# The location we'll be logging to -target="$HOME/.uvcdat/uvcdatsession.log" -# Whether or not we're redirecting the stdout/stderr -redirect=true - -for var in "$@" -do - if [ $capture = true ]; then - # -o was found, grabbing the next value - target=$var - if [ "$target" = "" ]; then - # This is the way we can redirect output to stdout - # Do not redirect output - redirect=false - fi - # Don't need to capture anything else - capture=false - continue - fi - - case $var in - # Trigger above block on the next arg - -o) capture=true; - ;; - # Parse the target out of the = section - --output=*) target=`sed "s/--output=\(.*\)/\1/" <<< $var` - if [ "$target" = "" ]; then - # Do not redirect output - redirect=false - fi - ;; - # Do not redirect output - --output-std) redirect=false - ;; - # Shouldn't redirect for help - --help) redirect=false - ;; - *) ;; - esac -done - -if [ $redirect = false ] ;then - python@PYVER@ "@CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py" "$@" -else - # Replace all uses of ~ with $HOME - target="${target/#\~/$HOME}" - - # Check if path exists - target_dir="$(dirname $target)" - if [ ! -d "$target_dir" ] ;then - mkdir -p $target_dir - fi - - # Make sure the file exists and that we have write privileges - touch $target - # Launch with redirection - python@PYVER@ "@CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py" "$@" >>$target 2>&1 -fi diff --git a/CMake/cdat_modules_extra/uvcdat.mac.in b/CMake/cdat_modules_extra/uvcdat.mac.in deleted file mode 100755 index 14a394f373..0000000000 --- a/CMake/cdat_modules_extra/uvcdat.mac.in +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -# source is not portable where as . is -BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -. $BASEDIR/setup_runtime.sh -python@PYVER@ $BASEDIR/../vistrails/vistrails/uvcdat.py - diff --git a/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt b/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt deleted file mode 100644 index e61b4896d0..0000000000 --- a/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt +++ /dev/null @@ -1,232 +0,0 @@ -82848263d3f9032b41bc02f758cb0bed acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-01.nc -09c2f48312305fef59ee571fe1c3a84a acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-02.nc -3274cb2d7cccffac20059f564a97998e acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-03.nc -5e677beb0eccfe8c94ec9e18460c2581 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-04.nc -cd565477d7d8555566e16bf5ff4bfe44 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-05.nc -d6038ef39f33b6a6d06a3554531a1ed2 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-06.nc -97bf73768c9f50068ffa7399fc0a1e0a acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-07.nc -705147cb320524d8257dcee8b450aec3 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-08.nc -164861198d2cb1897713afbeebf9eb62 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-09.nc -0b342120b940679cab8a2204e6b9f0d0 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-10.nc -6a12f3a02fc2607afe871f1d4aff7ea2 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-11.nc -0d642c1b3697ff3c45d07b7a90a07fab acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-12.nc -8b3e27df842aba7dc88b4c13266cc4ed acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-01.nc -38a7850265356a9b49ab78172b121927 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-02.nc -30ab14ec20e9ee54ff9ba3bd0332c490 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-03.nc -2d4c0cf37429c5a1d97be1acc5b907b1 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-04.nc -72ed71d9937b77e9c01f35ec3924e478 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-05.nc -62c85090e8b93a0caedebae52a6feddf acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-06.nc -7c9a70dfc28d7a9eb052f281738adb55 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-07.nc -d505af09b431fcfb2255fbabcae16ce0 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-08.nc -2875586ec0f21efd94a4fca640ef7f59 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-09.nc -6bc3b40018820413633a07c4d8278e50 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-10.nc -6a56554e98908dbcb1ad04f8129b7e8d acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-11.nc -610ad7ff458a87c863fc2d792e69dc2f acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-12.nc -e17b3f827c0162c246df0a3aabe4ce9d acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-01.nc -79a551fdfb44b88c64fb6552048f4dc5 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-02.nc -e8c38da3ad16c7866b3b3b540647a5da acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-03.nc -59de1e4fedabf0976590af6e470ceec1 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-04.nc -147389dbf5bfb479d09a8982d6690e8b acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-05.nc -c6f3843a3f716de98693c11bc807c206 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-06.nc -be02c6100e317dd037ad0cccf9d8a8cf acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-07.nc -109b769371207a503ac9039b37fd4dad acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-08.nc -629f86af7dbe6f3b379450f951e3e1b2 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-09.nc -02c3a536f6025ebde38bee00bc69aa09 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-10.nc -a661f1ce9b87e46865b489fde9752edf acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-11.nc -7de08765c4e2f9a34e21ba8024453adc acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-12.nc -28441278df2af93f9ebfa1b51ef21007 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-01.nc -1576faec4df27627c3eb975e7c6f5fef acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-02.nc -abccaf09316d0f1705557dd752d359af acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-03.nc -cde766ef10310253fc3baaa4d5ca8761 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-04.nc -fd58a1f7d6d2a6037df183e0fca9ff5f acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-05.nc -73a0b57991b798ca2b52e56afcf4f630 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-06.nc -f45485c533798bb53b4452469a5bc678 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-07.nc -149bfade64fe7b0b984059954e88ce97 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-08.nc -ada05ce9162160c9a6c02d9d335c9349 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-09.nc -aca027b6b88bfa17059ff22945cd393f acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-10.nc -54e738cdb234fcec78d86a49790fafdc acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-11.nc -151f3e6f7c5a8cbfd31abada8df36dd2 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-12.nc -91b73bdb596231c604d4c76db55bce5e acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-01.nc -5446fed21e3700d9d90f212ddbdbedc4 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-02.nc -8f69e20b5993613eb473a904cb3c5cfd acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-03.nc -6d984999d23f93c2434960f43381556d acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-04.nc -8be183c391e859bc36a8215f276bdd1b acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-05.nc -6e610ae6499ec706940ce81b3ee5df85 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-06.nc -2be1078885df583b0a1ee929ef663846 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-07.nc -493969c7aef835400219722322276ec5 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-08.nc -055d76ef47600f3b0e0142d6cb4db758 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-09.nc -12ec6242e2e3269b180c4a2367963327 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-10.nc -a857e9ae0696c33c38171c7d92791181 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-11.nc -42097c573ac657ec44bde9aabfa98afd acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-12.nc -b7198ad93b6eae51fcfd49fb3f9877a9 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-01.nc -09d6b9c23bf272f7ad8e6eba37e45edb acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-02.nc -b3ab42c5083df9f901dde9c7fe90bf26 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-03.nc -4a63c5b704fa1e8fefab025c4e8c46aa acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-04.nc -4608c9358aa5754352eb9b87d85e7a1c acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-05.nc -4eff1ec373b9beb820e5e1e4113498aa acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-06.nc -cffdc3aab308d233c956720d80671b95 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-07.nc -8dfcd2ecac7d37c12ac0adef4825c67f acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-08.nc -0a196de01ca67ce291a026e755b9921d acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-09.nc -e6931415ab36579fff13f4933a6bf1f5 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-10.nc -526fbd9987a6d5faf927106bf048aa2b acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-11.nc -839301c709e5a7b3eb271e75782979af acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-12.nc -6d86e5edd0a92931226ac99d13167980 acme_lores_atm_climo/F1850.g37_bench_ANN_climo.nc -f3c5c5a4d91d6e3e0cc4d2df362b0503 acme_lores_atm_climo/F1850.g37_bench_DJF_climo.nc -06e0cc5f50cd7f2c1e2f30c4c4278b47 acme_lores_cam_climo/F1850.g37_bench_ANN_climo.nc -712d887975121e81168ab03a535cadba acme_lores_cam_climo/F1850.g37_bench_APR_climo.nc -e2e5505205c326eea69574226d881359 acme_lores_cam_climo/F1850.g37_bench_DJF_climo.nc -d36b0d4f7fb27c3897668131bdec05aa acme_lores_cam_climo/F1850.g37_bench_JAN_climo.nc -39342297493a616eb8988ef0a3a9c988 acme_lores_cam_climo/F1850.g37_bench_JJA_climo.nc -0a8cbf9b41f2cc752800a584f6356cbd acme_lores_cam_climo/F1850.g37_bench_JUL_climo.nc -7b2da1926acf2c0f9ffad80497775bb6 acme_lores_cam_climo/F1850.g37_bench_MAM_climo.nc -7df286b070640d0074c556560edc6a73 acme_lores_cam_climo/F1850.g37_bench_SON_climo.nc -77d7b6de33467bdebe1a05700f03cae7 acme_lores_clm_climo/ANN_climo.nc -a075f9d88b0b29b9f6a706f56bc628fa acme_lores_clm_climo/APR_climo.nc -f0694a365f88bef9f2ae34169afcd99b acme_lores_clm_climo/AUG_climo.nc -3928a8108bed42d5035bb9e9ef06a227 acme_lores_clm_climo/DEC_climo.nc -5cd00312d791f34b1d33ca336d874473 acme_lores_clm_climo/DJF_climo.nc -5e2849739943108c549c6724c6927ccd acme_lores_clm_climo/FEB_climo.nc -4badd8e20c7e45d8156f0677416d4f85 acme_lores_clm_climo/JAN_climo.nc -bcc44d7f27938f1f21cf3c34d29dfe0d acme_lores_clm_climo/JJA_climo.nc -c11b441acebdf5e7dac696485abd31b8 acme_lores_clm_climo/JUL_climo.nc -1740586484d8e59b18bf97d89658cd97 acme_lores_clm_climo/JUN_climo.nc -6aca924e7541a42f37c189934912d4bb acme_lores_clm_climo/MAM_climo.nc -16c8c8d84c30d2f72b1bafd7929841a5 acme_lores_clm_climo/MAR_climo.nc -eb483652fc0b0b069761659262d1d111 acme_lores_clm_climo/MAY_climo.nc -e3e52b82e64357c50fe42aed7e0ba56c acme_lores_clm_climo/NOV_climo.nc -8969b2045cd430d03cebaccb91995f3d acme_lores_clm_climo/OCT_climo.nc -4a1d44b3ab16645aef032006be8b4af3 acme_lores_clm_climo/SEP_climo.nc -f57a1c82229d2985894ef643e0392135 acme_lores_clm_climo/SON_climo.nc -2a40dbd588429cbefb6317fc48076bb9 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-01.nc -176fbe665aa0ea9ee3ba63d2df780537 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-02.nc -cc857575c3b7e81520be03a20fd5fc4c acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-03.nc -1a01b328a240435c32ea7f4dcc880db6 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-04.nc -14b1ed3abf5c37c7d3611b57111123a8 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-05.nc -a2cf201b629578dc40a1a6c8c2ebfdd4 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-06.nc -3ba6118cecded5739d20ef78d2e75458 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-07.nc -a42132db7da5c17b9a69aee42951ae3d acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-08.nc -ee65c00602bc7e0de884e09be4b2bb1d acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-09.nc -1909f013d84b298eeff19b5250f61daa acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-10.nc -4b96d62be06f31b8be94388ce59dbeb7 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-11.nc -486218898744c21420a24ab36121520d acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-12.nc -950360fe5f5334d3026ba44850c539a6 cam35_data/cam3_5_01_climo.nc -fc869f4e9c79960f2f0766905379c4c3 cam35_data/cam3_5_02_climo.nc -c11b0b3283b726318d84edc8ad042714 cam35_data/cam3_5_03_climo.nc -4d1bfc12f358026addd34d47eca1b52c cam35_data/cam3_5_04_climo.nc -da9d0149d3e81d7bdae96076e07daf39 cam35_data/cam3_5_05_climo.nc -a5526dbaac0a0da52ca04bc5b9c71c56 cam35_data/cam3_5_06_climo.nc -00662c2eafcb297cf8aabf8c51456d0b cam35_data/cam3_5_07_climo.nc -ba72017189a80edd5181d639ae6204e9 cam35_data/cam3_5_08_climo.nc -b23c87bbf00d39b0966e3a6d072c0abc cam35_data/cam3_5_09_climo.nc -4f5d4e732e97c163f63ed1430858c5e3 cam35_data/cam3_5_10_climo.nc -6abc0b942e43cf5fbadbead8ea2aac26 cam35_data/cam3_5_11_climo.nc -c9ecb1cbabcc60196263f0a8b488d1e1 cam35_data/cam3_5_12_climo.nc -84204a1bc34f41f71ed613278b29a57f cam35_data_smaller/cam3_5_01_climo.nc -9fcd1364523a26f4fa833a89fc14bae9 cam35_data_smaller/cam3_5_02_climo.nc -d53f58834cf9053f3255818e441c735a cam35_data_smaller/cam3_5_03_climo.nc -7c848ac7acf21552d93273b0ba4817e5 cam35_data_smaller/cam3_5_04_climo.nc -96248cd867434a51d160ada6af4d0f4f cam35_data_smaller/cam3_5_05_climo.nc -155a163a204538164980a1425f4aa301 cam35_data_smaller/cam3_5_06_climo.nc -b33bf096521235e9fec1a64479438568 cam35_data_smaller/cam3_5_07_climo.nc -6fe5fcd5a4221dc4ae711ab6631b9cea cam35_data_smaller/cam3_5_08_climo.nc -7f2b52b2807e52ab0cdb94e892cec986 cam35_data_smaller/cam3_5_09_climo.nc -a5121dec5eb93415d8988fb3ae1f279e cam35_data_smaller/cam3_5_10_climo.nc -36183ada10292e09053a6573f0d493b6 cam35_data_smaller/cam3_5_11_climo.nc -018e37b4e760d92edfafcb035173db3d cam35_data_smaller/cam3_5_12_climo.nc -8cd47baae6710a9373ebaba96a6e262b cam_output/c_t_b30.009.cam2.h0.0600-01.nc -82731ab10329e5cdacfa78ea3da520f2 cam_output/c_t_b30.009.cam2.h0.0600-02.nc -146a578b04623773ad0e98e930d1a5e5 cam_output/c_t_b30.009.cam2.h0.0600-03.nc -e6ce8ea3580b3266bd93fc73dcad9adc cam_output/c_t_b30.009.cam2.h0.0600-04.nc -a5698548a26c40c514adcadd9623eb27 cam_output/c_t_b30.009.cam2.h0.0600-05.nc -848918d62382e94bad56a2cc2cd07fd8 cam_output/c_t_b30.009.cam2.h0.0600-06.nc -bf447ef80bef314a5e2b2003d741a529 cam_output/c_t_b30.009.cam2.h0.0600-07.nc -be548db39e7607d4153f73e4b5657aa1 cam_output/c_t_b30.009.cam2.h0.0600-08.nc -0f7764b3aaf5412bdcd70943129026d6 cam_output/c_t_b30.009.cam2.h0.0600-09.nc -f0ac64dfbf1e5ccb97a167d0f6c75672 cam_output/c_t_b30.009.cam2.h0.0600-10.nc -7bf5f3401a0fbe8263bac61ca113e7d8 cam_output/c_t_b30.009.cam2.h0.0600-11.nc -cf83e939285b29ff808ed41544d7df92 cam_output/c_t_b30.009.cam2.h0.0600-12.nc -6e8cdaf575f9101921d11c571334842f cam_output/c_t_b30.009.cam2.h0.0601-01.nc -999693e6583eb4ed322151b68dda4e72 cam_output/c_t_b30.009.cam2.h0.0601-02.nc -e6d09f6db4fcf81ce68c935277fb110f cam_output/c_t_b30.009.cam2.h0.0601-03.nc -635be9948c7e7cecf82c76f953ed0624 cam_output/c_t_b30.009.cam2.h0.0601-04.nc -a2c14b3f0602aa9ad3b43316f11ae5ff cam_output/c_t_b30.009.cam2.h0.0601-05.nc -fbbb8c51f858fe89f4880a41b5f17d04 cam_output/c_t_b30.009.cam2.h0.0601-06.nc -1e5b7508a062d6aeb16afbf98045a5de cam_output/c_t_b30.009.cam2.h0.0601-07.nc -fc30abee308e251bde7be642fa0c3f7a cam_output/c_t_b30.009.cam2.h0.0601-08.nc -beafa07dc0c98b09984fd7830eb99f52 cam_output/c_t_b30.009.cam2.h0.0601-09.nc -4f36607badf32ee9d2c5234a58e779ad cam_output/c_t_b30.009.cam2.h0.0601-10.nc -039b724f844a15b936bfe7ee00e79a6e cam_output/c_t_b30.009.cam2.h0.0601-11.nc -da7fb4fcc052983bd7e5ac8a63a6a451 cam_output/c_t_b30.009.cam2.h0.0601-12.nc -f7a5944e246ca97ec722ed72d2e53315 model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-01.nc -c4ad68141d351aea55ce1e9bf0859798 model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-04.nc -bf0b2ef03cd280f5e635870b2ccda8d9 model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-07.nc -6893d78c8c5541999043f19d2dcee035 model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-10.nc -e241fc465279e7126e0e59789d9baedf obs/NCEP_01_climo.nc -cd1f8016b4f575c4b2a08a69c78b041a obs/NCEP_02_climo.nc -fae4c3bfa51707a9329b274e8de3633e obs/NCEP_03_climo.nc -49d418806a382eb17ae1c7cfa5295355 obs/NCEP_04_climo.nc -97fa9532455053987f1a92645c42ef71 obs/NCEP_05_climo.nc -078fece9cf0a1730ee13a18211cefa05 obs/NCEP_06_climo.nc -039a8dd4c98b2e2332699e750f72e2b2 obs/NCEP_07_climo.nc -4bc14d3447ff3af8c0fec1a19c7cd7b3 obs/NCEP_08_climo.nc -ef45a99e527f5a36b4a145d9919ac628 obs/NCEP_09_climo.nc -6673a7bcbf1476015dad7b5106a4213f obs/NCEP_10_climo.nc -9e86a777517ad6f4b392f7d63d8e98f7 obs/NCEP_11_climo.nc -5f4ec5821d1ebb9e5a73c9a46666291a obs/NCEP_12_climo.nc -578dcbfb4979cd3cbee2bde42a52d5c7 obs/NCEP_ANN_climo.nc -78c01194a72dc3da7b25c1ce402dfe7b obs/NCEP_DJF_climo.nc -dcd392831c5c0628fde4f92e2f704c18 obs/NCEP_JJA_climo.nc -185a376e3e6403191d42dbef55b72928 obs_atmos/c_CRU_ANN_climo.nc -9c754380f93e4305c5ed40b67d7282e5 obs_atmos/c_CRU_DJF_climo.nc -a8b02bd2ea54d089db13005e7a9b4999 obs_atmos/c_CRU_JJA_climo.nc -ef18dbf141367c0d7cf3990d7e10d64c obs_atmos/c_t_NCEP_01_climo.nc -10c09087712b3b283765381c78002154 obs_atmos/c_t_NCEP_02_climo.nc -3bcec656166614c11ad1f436129b4922 obs_atmos/c_t_NCEP_03_climo.nc -bf326d77aceedcdf7197b6ca4d7624df obs_atmos/c_t_NCEP_04_climo.nc -631dadd9a88b46a47506fa2b2cc0cc1e obs_atmos/c_t_NCEP_05_climo.nc -3b65eb064433b28d9e23aaf260994768 obs_atmos/c_t_NCEP_06_climo.nc -dd2962224eb21be51dd2e1d38d4d7bfc obs_atmos/c_t_NCEP_07_climo.nc -a7f0f0a58959c30f4342a643537d5791 obs_atmos/c_t_NCEP_08_climo.nc -16f1fb6a6fd60428a24821dfdbf9ba3f obs_atmos/c_t_NCEP_09_climo.nc -c1c5580c10e6017d7a1b4c844f4bee95 obs_atmos/c_t_NCEP_10_climo.nc -58ca74759be8e809e6113309163eb87e obs_atmos/c_t_NCEP_11_climo.nc -0a34a591d117471b83ec15d41ca4de5e obs_atmos/c_t_NCEP_12_climo.nc -53a07928fd5bb8282e3b00707c30d352 obs_atmos/c_t_NCEP_ANN_climo.nc -07fbdfe7c5ac96dca4d5b30cf0ffca4d obs_atmos/c_t_NCEP_DJF_climo.nc -bba7b95da836594ba56eccc5cc735953 obs_atmos/c_t_NCEP_JJA_climo.nc -ded2539f0946958f20946211ec6de7c6 obs_data_12/._RAOBS.nc -2df5c553f24cf4e51a826a34075a6122 obs_data_12/RAOBS.nc -3057f458f2eea7e29b5df6622b71c5c6 obs_data_13/ISCCPCOSP_01_climo.nc -863fdc036ca6c8bc181b68934fb5f334 obs_data_13/ISCCPCOSP_02_climo.nc -44d91325876baa34dd53a3d5fdebc8a5 obs_data_13/ISCCPCOSP_03_climo.nc -2821ea5e0d7d1ab2e32486e6336c07b5 obs_data_13/ISCCPCOSP_04_climo.nc -dc5823c8971136e536c1f7c7d8f8452f obs_data_13/ISCCPCOSP_05_climo.nc -b0fb19767ddf330a4dd37a429810b9d9 obs_data_13/ISCCPCOSP_06_climo.nc -a07c2a2e6adfed391c53a0aff0c436ab obs_data_13/ISCCPCOSP_07_climo.nc -ca089074a4f3d1fe7f6897c0c88b1b6b obs_data_13/ISCCPCOSP_08_climo.nc -9f9c9897dc8e09e18f155fe5355d1ed8 obs_data_13/ISCCPCOSP_09_climo.nc -d74abae2b663ea67cf95de9b5f4e8485 obs_data_13/ISCCPCOSP_10_climo.nc -ba01b312ad7fc2f936299798c963114c obs_data_13/ISCCPCOSP_11_climo.nc -0a20a6f6220e941ad84e75347d044ff0 obs_data_13/ISCCPCOSP_12_climo.nc -f422c02f76cfd8ffdc3d664f7df29fa5 obs_data_13/ISCCPCOSP_ANN_climo.nc -c0c6e18ef0202b8da755210ff5bab6d0 obs_data_13/ISCCPCOSP_DJF_climo.nc -a52e9a734e34d3b6198f836c407a834b obs_data_13/ISCCPCOSP_JJA_climo.nc -0692a353d71f86e3b008f5b7136fead4 obs_data_13/ISCCPCOSP_MAM_climo.nc -65790f602a139f5e7ac561c0f50073a6 obs_data_13/ISCCPCOSP_SON_climo.nc -25da719f4a94f073b344d463ef46dd5c obs_data_5.6/ERS_01_climo.nc -82938151479416212514ea92f5c8944d obs_data_5.6/ERS_02_climo.nc -4474e171bc3ed010bc4cf85f2156331c obs_data_5.6/ERS_03_climo.nc -5928149aaa7e20e8e021051e4c1cf8af obs_data_5.6/ERS_04_climo.nc -8ba71cabf16409ec359250137313e1fc obs_data_5.6/ERS_05_climo.nc -7173b6c6ad21ebba3faae364bb0e2abd obs_data_5.6/ERS_06_climo.nc -4a4dce6ec29ff746e6ca438a1144e2f9 obs_data_5.6/ERS_07_climo.nc -89b82d69760e786d4c5cd6007e67ad8e obs_data_5.6/ERS_08_climo.nc -703d8a3c2bca30d721db74e4a9607991 obs_data_5.6/ERS_09_climo.nc -6be5b6eaacbd4bfee413b0432a3822bd obs_data_5.6/ERS_10_climo.nc -3aab5e306b45952d4bc538cf09733d36 obs_data_5.6/ERS_11_climo.nc -b7d52d062f54e6c28b73c1630866eb8f obs_data_5.6/ERS_12_climo.nc -257874570e3aeeda6cbd55accf60f6c9 obs_data_5.6/ERS_ANN_climo.nc -d7fc6bbb9a2dfdb0fa44d7835f94a3d4 obs_data_5.6/ERS_DJF_climo.nc -3cce9af23687f27d3b134f60039ebdce obs_data_5.6/ERS_JJA_climo.nc -aaedba911f145e711d05b6430e13ce4e obs_data_5.6/ERS_MAM_climo.nc -e40f05dfec15f145e9623290d5142705 obs_data_5.6/ERS_SON_climo.nc diff --git a/CMake/cdat_modules_extra/vacumm_build_step.cmake.in b/CMake/cdat_modules_extra/vacumm_build_step.cmake.in deleted file mode 100644 index 8ef121b843..0000000000 --- a/CMake/cdat_modules_extra/vacumm_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@vacumm_source_dir@" - ) diff --git a/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in b/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in deleted file mode 100644 index 9d7ba552bf..0000000000 --- a/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in +++ /dev/null @@ -1,36 +0,0 @@ - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib") -set(ENV{CFLAGS} "@cdat_osx_flags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CFLAGS@") -set(ENV{CPPFLAGS} "@cdat_osx_cppflags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CPPFLAGS@") -set(ENV{CXXFLAGS} "@cdat_osx_cxxflags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CXXFLAGS@") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @cdat_BINARY_DIR@/build/ParaView-build/VTK/Wrapping/Python - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in VTK Python Install") -endif() - -if (APPLE) - message("We are on a Mac, need to relink all libraries") - execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" @cdat_CMAKE_SOURCE_DIR@/fixlink.py - WORKING_DIRECTORY @cdat_BINARY_DIR@ - OUTPUT_VARIABLE out - ERROR_VARIABLE err - RESULT_VARIABLE res) - message("got: "${res}) -endif () - -message("Install succeeded.") - diff --git a/CMake/cdat_modules_extra/xgks_configure_step.cmake.in b/CMake/cdat_modules_extra/xgks_configure_step.cmake.in deleted file mode 100644 index 0abcb5c96a..0000000000 --- a/CMake/cdat_modules_extra/xgks_configure_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND env FC="" sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/curses_gcc5.patch b/CMake/curses_gcc5.patch deleted file mode 100644 index a1ee0240b8..0000000000 --- a/CMake/curses_gcc5.patch +++ /dev/null @@ -1,30 +0,0 @@ -index d8cc3c9..b91398c 100755 ---- a/ncurses/base/MKlib_gen.sh -+++ b/ncurses/base/MKlib_gen.sh -@@ -491,11 +492,22 @@ sed -n -f $ED1 \ - -e 's/gen_$//' \ - -e 's/ / /g' >>$TMP - -+cat >$ED1 < $ED2 -+cat $ED2 >$TMP -+ - $preprocessor $TMP 2>/dev/null \ --| sed \ -- -e 's/ / /g' \ -- -e 's/^ //' \ -- -e 's/_Bool/NCURSES_BOOL/g' \ -+| sed -f $ED1 \ - | $AWK -f $AW2 \ - | sed -f $ED3 \ - | sed \ diff --git a/CMake/dummy.f90 b/CMake/dummy.f90 deleted file mode 100644 index 4bbd9fbdc3..0000000000 --- a/CMake/dummy.f90 +++ /dev/null @@ -1,4 +0,0 @@ - PROGRAM dummy - - print*, "Hi" - END diff --git a/CMake/fixName.py b/CMake/fixName.py deleted file mode 100644 index 17f2a06067..0000000000 --- a/CMake/fixName.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python - - -import sys - -fnm = sys.prefix+"/Resources/Python.app/Contents/Info.plist" - -f=open(fnm) -s=f.read() -pat="CFBundleName" -i=s.find(pat)#Python") -s2=s[:i+len(pat)]+s[i+len(pat):].replace("Python","UV-CDAT",1) -f=open(fnm,'w') -f.write(s2) -f.close() diff --git a/CMake/fix_install_name.py.in b/CMake/fix_install_name.py.in deleted file mode 100644 index 787490a48e..0000000000 --- a/CMake/fix_install_name.py.in +++ /dev/null @@ -1,33 +0,0 @@ -import os -import sys -import fnmatch - -def find_files(directory, pattern): - for root, dirs, files in os.walk(directory): - for basename in files: - if fnmatch.fnmatch(basename, pattern): - filename = os.path.join(root, basename) - yield filename - -exts = ['*.dylib', '*.so'] - -# Find all the modules and shared libraries and replace the path contained -# if referencing the built ones -for pattern in exts: - for library in find_files("./", pattern): - print library - cmd = 'otool -L %s' % library - print "library is", library - deps = os.popen(cmd).readlines() - for dep in deps[1:]: - dep_name = os.path.split(dep)[1] - dep_name = dep_name.split()[0] - dep = dep.split()[0] - # Replace the ones that are built by us - if fnmatch.fnmatch(dep_name, pattern) and fnmatch.fnmatch(dep, "@CMAKE_INSTALL_PREFIX@*"): - print 'dep fullpath ', dep - print 'dep name', dep_name - cmd = "install_name_tool -change %s %s %s" % (dep, "@rpath/"+"".join(dep_name), library) - print 'change cmd is ', cmd - lns = os.popen(cmd) - print "\t"+"".join(lns) diff --git a/CMake/fixlink.py b/CMake/fixlink.py deleted file mode 100644 index 808baa7f2d..0000000000 --- a/CMake/fixlink.py +++ /dev/null @@ -1,49 +0,0 @@ -import os,sys,numpy -lib = '/usr/local/uvcdat/1.0.alpha/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/paraview/vtk/vtkCommonPython.so' -bad = 'ParaView-build' -#bad="System" -#bad="paraview3.11" -def change(lib,bad,paraviewPath,sameDir=False): - cmd = 'otool -L %s' % lib - print "LIB:",lib - ln=os.popen(cmd).readlines() - for l in ln[1:]: - link = l.strip().split()[0] - if link.find(bad)>-1: - print link,"\t", - nm=os.path.split(link)[1] - print nm - cmd = "install_name_tool -change %s %s/%s %s" % (link,paraviewPath,nm,lib) - print "\t",cmd - lns = os.popen(cmd) - print "\t"+"".join(lns) - if sameDir: - if link[:6] in ["libvtk","libXdm","libKWC","libQVT","libVPI","libCos","libpro"]: - cmd = "install_name_tool -change %s %s/%s %s" % (link,paraviewPath,link,lib) - print "\t",cmd - lns = os.popen(cmd) - print "\t"+"".join(lns) - - -inpath = "/".join(numpy.__path__[0].split("/")[:-1]+["paraview",]) -inpath2 = "/".join(numpy.__path__[0].split("/")[:-1]+["paraview","vtk"]) -inpath3 = "/".join(numpy.__path__[0].split("/")[:-1]+["vtk"]) -inpath4 = "/".join(numpy.__path__[0].split("/")[:-1]+["ParaView-3.11.1-py2.7.egg","paraview",]) -inpath5 = "/".join(numpy.__path__[0].split("/")[:-1]+["ParaView-3.11.1-py2.7.egg","paraview","vtk"]) -inpath6 = "/".join(numpy.__path__[0].split("/")[:-1]+["VTK-5.9.0-py2.7.egg","vtk"]) -paraviewPath = "/".join(sys.prefix.split("/")[:-5]+["Externals","lib","paraview-3.11"]) #= '/usr/local/uvcdat/1.0.alpha/Externals/lib/paraview-3.11/' -def doPath(inpath,paraviewPath,sameDir=False): - files = os.popen("ls %s" % inpath).readlines() - for f in files: - lib = inpath+"/"+f.strip() - print lib - change(lib,bad,paraviewPath,sameDir) -doPath(inpath,paraviewPath) -doPath(inpath2,paraviewPath) -doPath(inpath3,paraviewPath) -doPath(inpath4,paraviewPath) -doPath(inpath5,paraviewPath) -doPath(inpath6,paraviewPath) -doPath(paraviewPath,paraviewPath,True) - - diff --git a/CMake/install.py b/CMake/install.py deleted file mode 100644 index 2e37d9791e..0000000000 --- a/CMake/install.py +++ /dev/null @@ -1,10 +0,0 @@ -import os - -# The main installation script is installation/install.py -# However, we need to first check for problems using 1.5.2 syntax only. -current_dir = os.path.dirname(__file__) - -execfile(os.path.join(current_dir, 'test_python_ok.py')) - -install_script_path = os.path.join(current_dir, '..', 'installation', 'install.py') -execfile(install_script_path) diff --git a/CMake/netcdf_clang.patch b/CMake/netcdf_clang.patch deleted file mode 100644 index 44729cf6ef..0000000000 --- a/CMake/netcdf_clang.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/ncgen3/load.c b/git/uvcdat/exsrc/ncgen3_load.c -index 3da4712..147f4e7 100644 ---- a/ncgen3/load.c -+++ b/git/uvcdat/exsrc/ncgen3_load.c -@@ -5,6 +5,7 @@ - *********************************************************************/ - - #include -+#include - #include - #include - #include diff --git a/CMake/pyspharm_setup.patch b/CMake/pyspharm_setup.patch deleted file mode 100644 index da64371982..0000000000 --- a/CMake/pyspharm_setup.patch +++ /dev/null @@ -1,19 +0,0 @@ ---- setup.py.ok 2015-07-28 15:37:07.000000000 -0700 -+++ setup.py 2015-07-28 15:53:10.000000000 -0700 -@@ -27,12 +27,12 @@ - the terms of the SPHEREPACK license at - http://www2.cisl.ucar.edu/resources/legacy/spherepack/license\n - """) -- download = raw_input('Do you want to download SPHEREPACK now? (yes or no)') -- if download not in ['Y','y','yes','Yes','YES']: -- sys.exit(0) -+ # download = raw_input('Do you want to download SPHEREPACK now? (yes or no)') -+ # if download not in ['Y','y','yes','Yes','YES']: -+ # sys.exit(0) - import urllib, tarfile - tarfname = 'spherepack3.2.tar' -- URL="https://www2.cisl.ucar.edu/sites/default/files/"+tarfname -+ URL="http://uvcdat.llnl.gov/cdat/resources/"+tarfname - urllib.urlretrieve(URL,tarfname) - if not os.path.isfile(tarfname): - raise IOError('Sorry, download failed') diff --git a/CMake/python_patch_step.cmake.in b/CMake/python_patch_step.cmake.in deleted file mode 100644 index c1cb47384b..0000000000 --- a/CMake/python_patch_step.cmake.in +++ /dev/null @@ -1,15 +0,0 @@ -execute_process( - COMMAND - "@CMAKE_COMMAND@" -E copy_if_different @cdat_SOURCE_DIR@/pysrc/src/setup-@PYTHON_VERSION@.py @python_SOURCE_DIR@/setup.py -) - -# Refer: http://bugs.python.org/issue14572 -if(NOT WIN32) - execute_process( - WORKING_DIRECTORY @python_SOURCE_DIR@ - COMMAND patch -p1 - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/sqlite3_int64_v2.patch - ) -endif() - - diff --git a/CMake/sqlite3_int64_v2.patch b/CMake/sqlite3_int64_v2.patch deleted file mode 100644 index 3a3ab31d67..0000000000 --- a/CMake/sqlite3_int64_v2.patch +++ /dev/null @@ -1,24 +0,0 @@ -# HG changeset patch -# Parent 4641d8d99a7dd56c76aa7f769d6d91499113a3b8 - -diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c ---- a/Modules/_sqlite/connection.c -+++ b/Modules/_sqlite/connection.c -@@ -549,7 +549,7 @@ - } else if (py_val == Py_None) { - sqlite3_result_null(context); - } else if (PyInt_Check(py_val)) { -- sqlite3_result_int64(context, (sqlite3_int64)PyInt_AsLong(py_val)); -+ sqlite3_result_int64(context, (sqlite_int64)PyInt_AsLong(py_val)); - } else if (PyLong_Check(py_val)) { - sqlite3_result_int64(context, PyLong_AsLongLong(py_val)); - } else if (PyFloat_Check(py_val)) { -@@ -580,7 +580,7 @@ - sqlite3_value* cur_value; - PyObject* cur_py_value; - const char* val_str; -- sqlite3_int64 val_int; -+ sqlite_int64 val_int; - Py_ssize_t buflen; - void* raw_buffer; - diff --git a/CMake/test_python_ok.py b/CMake/test_python_ok.py deleted file mode 100644 index 274e15ac97..0000000000 --- a/CMake/test_python_ok.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys, os -# The main installation script is installation/install.py -# However, we need to first check for problems using 1.5.2 syntax only. -trouble = 0 -minimum_python_version = (2,5,0,'final',0) -if not hasattr(sys, 'version_info') or sys.version_info < minimum_python_version: - sys.stderr.write("Your Python is too old; please see README.txt.\n") - trouble = 1 -for x in ["PYTHONHOME"]: - if os.environ.has_key(x): - sys.stderr.write('Please undefine ' + x + ' before installation.\n') - trouble = 1 -if not os.environ.has_key('HOME'): - sys.stderr.write(\ -"Caution: You'll need to set environment variable HOME before using CDAT.\n") - -if trouble: - raise SystemExit, 1 -print 'Your Python checked OK!' diff --git a/CMake/travis_build.cmake b/CMake/travis_build.cmake deleted file mode 100644 index 83c8214350..0000000000 --- a/CMake/travis_build.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}") -set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/../_build") - -include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake) -set(CTEST_SITE "Travis") -set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}") -set(CTEST_CMAKE_GENERATOR "Unix Makefiles") - -ctest_start("Continuous") -ctest_configure() -ctest_build() -#ctest_test(INCLUDE vcs PARALLEL_LEVEL 1 RETURN_VALUE res) -#ctest_coverage() -#file(REMOVE ${CTEST_BINARY_DIRECTORY}/coverage.xml) - -#if(NOT res EQUAL 0) -# message(FATAL_ERROR "Test failures occurred.") -#endif() diff --git a/CMake/travis_submit.cmake b/CMake/travis_submit.cmake deleted file mode 100644 index 285e876e5c..0000000000 --- a/CMake/travis_submit.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}") -set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/../_build") - -include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake) -set(CTEST_SITE "Travis") -set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}") -set(CTEST_CMAKE_GENERATOR "Unix Makefiles") - -ctest_start("Continuous") -ctest_submit() diff --git a/CMake/uvcdat.plist b/CMake/uvcdat.plist deleted file mode 100644 index 496982c351..0000000000 --- a/CMake/uvcdat.plist +++ /dev/null @@ -1,38 +0,0 @@ - - - - - CFBundleDevelopmentRegion - English - CFBundleExecutable - uvcdat - CFBundleGetInfoString - - CFBundleIconFile - uvcdat.icns - CFBundleIdentifier - - CFBundleInfoDictionaryVersion - 6.0 - CFBundleLongVersionString - - CFBundleName - UVCDAT - CFBundlePackageType - APPL - CFBundleShortVersionString - - CFBundleSignature - ???? - CFBundleVersion - - CSResourcesFileMapped - - LSRequiresCarbon - - LSUIElement - 1 - NSHumanReadableCopyright - - - diff --git a/CMakeLists.txt b/CMakeLists.txt index a3a96dc7b5..bd7d1fa591 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,12 +1,4 @@ -#============================================================================= cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR) -CMAKE_POLICY(SET CMP0012 NEW) - -if ("${CMAKE_VERSION}" VERSION_LESS "2.8.12") - message(WARNING "Your CMake version is ${CMAKE_VERSION} which is depreciated for UV-CDAT. The recommended minimum CMake version is 2.8.12. Using older versions can result in build errors particularly with Xcode 5") -endif() - -# Project name and initial checks #============================================================================= project(cdat) @@ -19,15 +11,6 @@ set(cdat_external_patch_dir ${cdat_SOURCE_DIR}/exsrc) -if("${CMAKE_INSTALL_PREFIX}" STREQUAL "/usr/local") - get_filename_component(cdat_ROOT_DIR ${cdat_BINARY_DIR} PATH) - set(CMAKE_INSTALL_PREFIX ${cdat_BINARY_DIR}/install CACHE STRING "" FORCE) -endif() - -set(cdat_EXTERNALS ${CMAKE_INSTALL_PREFIX}/Externals) -set(ENV{PATH} "${cdat_EXTERNALS}/bin:$ENV{PATH}") -message("[INFO] We reset your path to: " $ENV{PATH}) - set(CMAKE_MODULE_PATH ${cdat_CMAKE_SOURCE_DIR} ${cdat_CMAKE_SOURCE_DIR}/cmake_modules @@ -36,6 +19,18 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ) +find_program(CONDA NAMES conda) + +if ( ${CONDA} STREQUAL "CONDA-NOTFOUND" ) + message(FATAL_ERROR "Could not locate conda, please make sure conda is installed and in your PATH") +endif() + +find_program(ACTIVATE NAMES activate) +if ( ${ACTIVATE} STREQUAL "ACTIVATE-NOTFOUND" ) + message(FATAL_ERROR "Could not locate activate, please make sure conda is installed and in your PATH") +endif() +message("[INFO] Found conda at: ${CONDA}") + if (DEFINED ENV{UVCDAT_ANONYMOUS_LOG}) if (($ENV{UVCDAT_ANONYMOUS_LOG} STREQUAL "yes") OR ($ENV{UVCDAT_ANONYMOUS_LOG} STREQUAL "no")) @@ -49,64 +44,12 @@ else() set(ENV{UVCDAT_ANONYMOUS_LOG} "no") endif() -# Disable in source build of any kind. -#============================================================================= -include(CheckBuildOutOfSource) -check_build_out_of_source("${cdat_SOURCE_DIR}" "${cdat_BINARY_DIR}" - BUILDINSOURCE) -if(BUILDINSOURCE) - set(msg "[ERROR] CDAT requires an out of source Build.") - set(msg "${msg}\nRun 'git clean -dfx' to restore source dir.") - message(FATAL_ERROR "${msg}") -endif() - # Include useful cmake scripts #============================================================================= include(cmake_utils) include(check_fortran) include(CTest) -# Enable/Disable coverage -#============================================================================= -option(CDAT_MEASURE_COVERAGE "Measure test coverage while running tests" OFF) - -if(CDAT_MEASURE_COVERAGE) - message("Coverage measurement enabled; tests will run slower.") - set(COVERAGE_PKGS "cdms2,vcs,cdutil,genutil,DV3D,vcsaddons,vcs.vtk_ui,vcs.editors,vcs.vcsvtk,regrid2") - configure_file(${cdat_CMAKE_SOURCE_DIR}/coverage_report.py.in - ${CMAKE_INSTALL_PREFIX}/bin/coverage_report - @ONLY - ) -endif() - -# Set up the test data. If UVCDAT_USE_SYSTEM_TESTDATA is ON and UVCDAT_TESTDATA -# is not set then we won't use it for testing. Otherwise we'll test either -# with the system test data or download it ourselves. -#============================================================================= -if (BUILD_TESTING) - set(UVCDAT_USE_SYSTEM_TESTDATA ON CACHE BOOL "Use UV-CDAT's test data from the system") - if(UVCDAT_USE_SYSTEM_TESTDATA) - set(UVCDAT_TESTDATA "" CACHE PATH "Location of UV-CDAT test data") - set(UVCDAT_TESTDATA_LOCATION ${UVCDAT_TESTDATA}) - else() - set(UVCDAT_TestData_GZ boonth-1-22-2013.p94m.tar.gz) - set(UVCDAT_TestData_MD5 cf47adb0b6164997fb122ccbc3bd6f92) - file(DOWNLOAD ${LLNL_URL}/${UVCDAT_TestData_GZ} ${CMAKE_BINARY_DIR}/${UVCDAT_TestData_GZ} - STATUS testdatastatus SHOW_PROGRESS EXPECTED_MD5 ${UVCDAT_TestData_MD5}) - list(GET testdatastatus 0 actualtestdatastatus) - if(actualtestdatastatus) - message("[WARNING] Unable to automatically download test data ${testdatastatus}") - else() - set(UVCDAT_TESTDATA_DIR ${CMAKE_BINARY_DIR}/UVCDAT_TestData) - file(MAKE_DIRECTORY ${UVCDAT_TESTDATA_DIR}) - execute_process( - COMMAND ${CMAKE_COMMAND} -E tar xzf ${CMAKE_BINARY_DIR}/${UVCDAT_TestData_GZ} - WORKING_DIRECTORY ${UVCDAT_TESTDATA_DIR}) - set(UVCDAT_TESTDATA_LOCATION ${UVCDAT_TESTDATA_DIR}) - endif() - endif() -endif() - # Change architecture *before* any enable_language() or project() # calls so that it's set properly to detect 64-bit-ness... #----------------------------------------------------------------------------- @@ -143,50 +86,6 @@ if(NOT GIT_PROTOCOL) set_property(CACHE GIT_PROTOCOL PROPERTY STRINGS "git://" "http://" "https://") endif() -if(GIT_PROTOCOL MATCHES "http://") - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/eof2.git.insteadof git://github.com/ajdawson/eof2.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/eofs.git.insteadof git://github.com/ajdawson/eofs.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.git://uv-cdat.llnl.gov/windfield.git.insteadof http://uv-cdat.llnl.gov/git/windfield.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/UV-CDAT/scimake.git.insteadof git://github.com/UV-CDAT/scimake.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/windspharm.git.insteadof git://github.com/ajdawson/windspharm.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) -else() - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/eof2.git.insteadof git://github.com/ajdawson/eof2.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/eofs.git.insteadof git://github.com/ajdawson/eofs.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.git://uv-cdat.llnl.gov/windfield.git.insteadof http://uv-cdat.llnl.gov/git/windfield.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/UV-CDAT/scimake.git.insteadof git://github.com/UV-CDAT/scimake.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/windspharm.git.insteadof git://github.com/ajdawson/windspharm.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) -endif() - # Checkout the baseline repository. #============================================================================= if(BUILD_TESTING) @@ -231,7 +130,7 @@ set(PARTS_BUILT_INFO "${cdat_BINARY_DIR}/build_info.txt" CACHE STRING "File wher # files in order to move them (somehow) to the OFFLINE machine where build will happen # OFF the machine has no internet access all files are suppposed to be here, pre-downloaded -option(OFFLINE_BUILD "Is there internet access, are we preping for it?" OFF) +# option(OFFLINE_BUILD "Is there internet access, are we preping for it?" OFF) option(CDAT_BUILD_PARALLEL "Build parallel components of CDAT" OFF) # OSMesa/VTK aren't playing nicely on macs. Disabling for now. @@ -242,85 +141,6 @@ cmake_dependent_option(CDAT_BUILD_OFFSCREEN "Use OSMesa for offscreen rendering. # Option to enable vtkweb for cdatweb option(CDAT_BUILD_WEB "Build in Web support (VTKWeb, etc.)" OFF) -# Option to enable CMOR -option(CDAT_BUILD_CMOR "Build CMOR" ON) - -# Option to choose between easy_install and pip (crunchy ssl/man in the middle prevents us to use pip here... -set(EGG_INSTALLER "PIP" CACHE STRING "Which package installer to use") -set_property(CACHE EGG_INSTALLER PROPERTY STRINGS "PIP" "EASY_INSTALL") -set(PIP_CERTIFICATE "" CACHE STRING "Certificate to use for PIP (LLNL issue really)") - -# Options for various types of builds -option(CDAT_USE_SYSTEM_PYTHON "Use system Python" OFF) - -# Default state -set(CDAT_BUILD_LEAN OFF) -set(CDAT_BUILD_ALL OFF) - -# Some more options -option(CDAT_BUILD_GUI "Builds GUI-based dependencies (Vistrails, ParaView, VisIt, R, etc.) " ON) -option(CDAT_BUILD_GRAPHICS "Build graphics-based dependencies (vcs, pyqt, Vistrails, ParaView, VisIt, R, etc.) " ON) -option(CDAT_BUILD_ESGF "Alias for CDAT_BUILD_LEAN" OFF) -option(CDAT_BUILD_UVCMETRICSPKG "Builds uvcmetrics package " ON) -option(CDAT_BUILD_PARAVIEW "Build ParaView rather than just VTK" OFF) -option(CDAT_DOWNLOAD_UVCMETRICS_TESTDATA "Download test data uvcmetrics package " ON) - -# If ESGF option is on then our build mode is LEAN. -if (CDAT_BUILD_ESGF) - if( (DEFINED CDAT_BUILD_MODE) AND (NOT "${CDAT_BUILD_MODE}" STREQUAL "LEAN") ) - message(WARNING "[INFO] CDAT_BUILD_ESGF enabled, forcing CDAT_BUILD_MODE to LEAN") - endif() - set(CDAT_BUILD_MODE "LEAN" CACHE STRING "Build mode for CDAT " FORCE) - set(CDAT_DOWNLOAD_SAMPLE_DATA OFF) -endif() -set(CDAT_BUILD_MODE "DEFAULT" CACHE STRING "Build mode for CDAT ") -set_property(CACHE CDAT_BUILD_MODE PROPERTY STRINGS "DEFAULT" "ALL" "LEAN") -message([INFO] BUILD MODE: ${CDAT_BUILD_MODE}) - -# Set the state of LEAN all based on the MODE -if (CDAT_BUILD_MODE STREQUAL "LEAN") - set(CDAT_BUILD_LEAN ON) - set(CDAT_BUILD_ALL OFF) -elseif (CDAT_BUILD_MODE STREQUAL "ALL") - set(CDAT_BUILD_LEAN OFF) - set(CDAT_BUILD_ALL ON) -elseif (CDAT_BUILD_MODE STREQUAL "DEFAULT") - set(CDAT_BUILD_LEAN OFF) - set(CDAT_BUILD_ALL OFF) -else() - message(FATAL_ERROR "[ERROR] Unknown CDAT_BUILD_MODE \"${CDAT_BUILD_MODE}\" VALID MODES ARE \"DEFAULT\" \"ALL\" \"LEAN\"") -endif() - -# First of all if LEAN then turn OFF GRAPHICS and PARALLEL -if (CDAT_BUILD_LEAN) - set_property(CACHE CDAT_BUILD_GRAPHICS PROPERTY VALUE OFF) - set_property(CACHE CDAT_BUILD_PARALLEL PROPERTY VALUE OFF) - set_property(CACHE CDAT_BUILD_UVCMETRICSPKG PROPERTY VALUE OFF) - set(CDMS_ONLY --enable-cdms-only) -else() - set(CDMS_ONLY "") -endif() - -# If ALL is enabled then turn ON GUI, GRAPHICS, and PARALLEL -if (CDAT_BUILD_ALL) - set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE ON) - set_property(CACHE CDAT_BUILD_GRAPHICS PROPERTY VALUE ON) - set_property(CACHE CDAT_BUILD_PARALLEL PROPERTY VALUE ON) - set_property(CACHE CDAT_BUILD_ESGF PROPERTY VALUE OFF) - set_property(CACHE CDAT_BUILD_UVCMETRICSPKG PROPERTY VALUE ON) - set(CDMS_ONLY "") -endif() - -# If no graphics then no gui as well -if (NOT CDAT_BUILD_GRAPHICS) - set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE OFF) -endif() - -# Don't build GUI if this is an offscreen-only build: -if(CDAT_BUILD_OFFSCREEN AND CDAT_BUILD_GUI) - message("[INFO] Turning off CDAT_BUILD_GUI; incompatible with CDAT_BUILD_OFFSCREEN.") - set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE OFF) -endif() set(ep_prefix ${cdat_BINARY_DIR}/build/prefix) set_property(DIRECTORY PROPERTY ep_log_dir ${cdat_BINARY_DIR}/logs) @@ -418,228 +238,16 @@ endif() # when left to create them. #============================================================================= set(CDAT_PACKAGE_CACHE_DIR -# ${CMAKE_CURRENT_BINARY_DIR}/../cdat_dependencies" "${CMAKE_CURRENT_BINARY_DIR}" CACHE PATH "Directory where source tar balls of external dependencies are kept" ) include(ExternalProject) - -file(MAKE_DIRECTORY ${cdat_EXTERNALS}) -file(MAKE_DIRECTORY ${cdat_EXTERNALS}/lib) -file(MAKE_DIRECTORY ${cdat_EXTERNALS}/bin) -file(MAKE_DIRECTORY ${cdat_EXTERNALS}/include) -file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/logs) -file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/build) -file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/sources) - -# Configure cdat command files -#============================================================================= -set(cdat_configure_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake) -set(cdat_make_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake) -set(cdat_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake) - -# Include essential packages -#============================================================================= -set(external_packages) -set(found_system_include_dirs) -set(found_system_libraries) - -include(python_pkg) -if (APPLE) - set(SB_EXTERNALS_DIR "${CMAKE_INSTALL_PREFIX}/Externals") - set(SB_DIR "${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}") -else() - set(SB_DIR "${CMAKE_INSTALL_PREFIX}") - # Helper variables to locate programs and libraries - set(SB_EXTERNALS_DIR "${CMAKE_INSTALL_PREFIX}/Externals") -endif() - -set(SB_LIB_DIR "${SB_DIR}/lib") -set(SB_BIN_DIR "${SB_DIR}/bin") - -include(basemap_pkg) include(cdat_pkg) -include(clapack_pkg) -#include(curl_pkg) -include(configobj_pkg) -include(cycler_pkg) -include(cython_pkg) -include(data_pkg) -include(esmf_pkg) -include(x264_pkg) -include(ffmpeg_pkg) -include(pyflakes_pkg) -include(pep8_pkg) -include(mccabe_pkg) -include(flake8_pkg) -include(g2clib_pkg) -include(proj4_pkg) -include(ocgis_pkg) -include(cligj_pkg) -include(click_pkg) -include(fiona_pkg) -include(pynetcdf4_pkg) -include(gdal_pkg) -include(geos_pkg) -include(gsw_pkg) -include(gui_support_pkg) -include(h5py_pkg) -include(hdf5_pkg) -include(zmq_pkg) -include(pyzmq_pkg) -include(tornado_pkg) -include(ipython_pkg) -include(jasper_pkg) -include(lapack_pkg) -include(lepl_pkg) -include(libcf_pkg) -include(lats_pkg) -include(libdrs_pkg) -include(libdrsfortran_pkg) -include(ezget_pkg) -include(cd77_pkg) -include(matplotlib_pkg) -include(six_pkg) -include(openssl_pkg) -include(cryptography_pkg) -include(enum34_pkg) -include(idna_pkg) -include(pyasn1_pkg) -include(ipaddress_pkg) -include(cffi_pkg) -include(ffi_pkg) -include(dateutils_pkg) -include(pyparsing_pkg) -include(pycparser_pkg) -include(md5_pkg) -include(mpi4py_pkg) -include(pyopenssl_pkg) -include(setuptools_pkg) -include(myproxyclient_pkg) -include(netcdf_pkg) -include(numexpr_pkg) -include(numpy_pkg) -include(mpi_pkg) -include(osmesa_pkg) -include(seawater_pkg) -include(vacumm_pkg) -if (CDAT_BUILD_PARAVIEW) - include(paraview_pkg) -else() - include(vtk_pkg) -endif() -include(pkgconfig_pkg) -include(libcdms_pkg) -include(sampledata_pkg) -include(pyspharm_pkg) -include(pytables_pkg) -include(readline_pkg) -include(r_pkg) -include(rpy2_pkg) -include(singledispatch_pkg) -include(scikits_pkg) -include(scipy_pkg) -## Part of setuptools no need to extra build it -## include(distribute_pkg) -if (NOT CDAT_USE_SYSTEM_PYTHON) - include(pip_pkg) -endif() -include(shapely_pkg) -include(pygments_pkg) -include(markupsafe_pkg) -include(jinja2_pkg) -include(docutils_pkg) -include(sphinx_pkg) -include(freetype_pkg) -include(coverage_pkg) -## C. Doutriaux: We need to replace the following with a findPackage at some point -if (APPLE) -else() - include(jpeg_pkg) - include(pixman_pkg) - include(fontconfig_pkg) - include(curses_pkg) - #include(tiff_pkg) - include(netcdfplus_pkg) -endif() -#include(geotiff_pkg) -include(cmor_pkg) -include(udunits2_pkg) -include(uuid_pkg) -# IF we build the UVCDAT Metrics package -if (CDAT_BUILD_UVCMETRICSPKG) - if (CDAT_DOWNLOAD_UVCMETRICS_TESTDATA) - set(UVCMETRICS_TEST_DATA_DIRECTORY ${CMAKE_INSTALL_PREFIX}/share/uvcmetrics/test_data CACHE PATH "DIR FOR UVCMETRICS TEST DATA" ) - endif() - include(uvcmetrics_pkg) -endif() -include(vistrails_pkg) -#include(yasm_pkg) -include(pylibxml2_pkg) -include(cdatlogger_pkg) -include(pyclimate_pkg) -include(scientificpython_pkg) -include(windspharm_pkg) -include(eof2_pkg) -include(eofs_pkg) -include(windfield_pkg) -if (CDAT_BUILD_ESGF) - include(lxml_pkg) -endif() - -if (CDAT_BUILD_GUI) - include(qt4_pkg) - if (NOT CDAT_USE_SYSTEM_PYTHON) - include(sip_pkg) - include(pyqt_pkg) - endif() - include(spyder_pkg) -endif() - -# Configure custom configure/build/install step files -#============================================================================= -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_common_environment.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_cmake_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_cmake_make_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cleanenv_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cleanenv_configure_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdatmpi_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdatmpi_configure_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/fix_install_name.py.in - ${cdat_CMAKE_BINARY_DIR}/fix_install_name.py - @ONLY -) - +# CONDA Options +set(CONDA_ENVIRONMENT_NAME ${cdat_VERSION} CACHE STRING "Name of conda environment we want to build CDAT in") +set(CONDA_CHANNEL_UVCDAT uvcdat CACHE STRING "channels to use (if more than one use '-c' between channels e.g. uvcdat/label/nightly -c uvcdat)") # Now sort and include external packages #============================================================================= include(TopologicalSort) @@ -670,368 +278,23 @@ foreach(package ${external_packages}) include("${lc_package}_external") endif() endforeach() -file(WRITE ${PARTS_BUILT_INFO} ${packages_info}) - -# Construct Include and Link variables -#============================================================================= -if(found_system_include_dirs) - list(REMOVE_DUPLICATES found_system_include_dirs) - list(REMOVE_ITEM found_system_include_dirs ${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES} ${CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES}) - set(cdat_external_include_directories) - foreach(include_dir ${found_system_include_dirs}) - set(cdat_external_include_directories "-I${include_dir} ${cdat_external_include_directories}") - endforeach() -endif() -message("[INFO] CDAT external include directories: ${cdat_external_include_directories}") - -message("[INFO] System libraries: ${found_system_libraries}") -if(found_system_libraries) - list(REMOVE_DUPLICATES found_system_libraries) - list(REMOVE_ITEM found_system_libraries ${CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES}) - set(cdat_external_link_directories) - foreach(library_dir ${found_system_libraries}) - set(cdat_external_link_directories "-L${library_dir} ${cdat_external_link_directories}") - endforeach() -endif() -message("[INFO] CDAT external link directories: ${cdat_external_link_directories}") - -# Configure remaining files -#============================================================================= - -# set candidate paths for setup_runtime scripts -# will be added to environment variables in reverse order -set(SETUP_LIBRARY_PATHS - "Externals/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} " - "Externals/lib/R/lib " - "Externals/lib " - "Externals/proj4/lib " - "Externals/lib64 " - "lib " -) -string(REPLACE ";" " " SETUP_LIBRARY_PATHS ${SETUP_LIBRARY_PATHS}) -set(SETUP_EXECUTABLE_PATHS - "Externals/paraview.app/Contents/bin " - "Library/Frameworks/Python.framework/Versions/${PYVER}/bin " - "Externals/bin " - "bin " -) -string(REPLACE ";" " " SETUP_EXECUTABLE_PATHS ${SETUP_EXECUTABLE_PATHS}) -set(SETUP_PYTHON_PATHS - "Externals/paraview.app/Contents/Python " - "Externals/lib/python${PYVER}/site-packages " - "Externals/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}/site-packages " - "lib/python${PYVER}/site-packages " -) -string(REPLACE ";" " " SETUP_PYTHON_PATHS ${SETUP_PYTHON_PATHS}) -include(GetGitRevisionDescription) -git_describe(UVCDAT_PROMPT_STRING) -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install.py.in - ${cdat_SOURCE_DIR}/installation/install.py - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/checked_get.sh.in - ${cdat_BINARY_DIR}/checked_get.sh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/predownload.py.in - ${cdat_BINARY_DIR}/predownload.py - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.sh.in - ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.sh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/reset_runtime.sh.in - ${CMAKE_INSTALL_PREFIX}/bin/reset_runtime.sh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/preofflinebuild.sh.in - ${cdat_BINARY_DIR}/preofflinebuild.sh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.csh.in - ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.csh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/reset_runtime.csh.in - ${CMAKE_INSTALL_PREFIX}/bin/reset_runtime.csh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcdat.in - ${CMAKE_INSTALL_PREFIX}/bin/uvcdat - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat.in - ${CMAKE_INSTALL_PREFIX}/bin/cdat - @ONLY -) configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest.in - ${CMAKE_INSTALL_PREFIX}/bin/runtest - @ONLY -) - - -if (BUILD_TESTING) - configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runpytest.in - ${CMAKE_INSTALL_PREFIX}/bin/runpytest + ${cdat_CMAKE_BINARY_DIR}/runtest @ONLY - ) - add_subdirectory(testing) -endif() - -# Where to install the wrapper scripts -set(WRAPPER_INSTALL_LOCATION ${CMAKE_INSTALL_PREFIX}/wrappers - CACHE PATH - "Install wrapper scripts 'cdat', 'uvcdat' and 'loadcdat' in that directory") - -add_custom_command( - OUTPUT ${WRAPPER_INSTALL_LOCATION}/loadcdat - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.sh - ${WRAPPER_INSTALL_LOCATION}/loadcdat) -add_custom_command( - OUTPUT ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.csh - ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh) -add_custom_command( - OUTPUT ${WRAPPER_INSTALL_LOCATION}/uvcdat - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_INSTALL_PREFIX}/bin/uvcdat - ${WRAPPER_INSTALL_LOCATION}/uvcdat) -add_custom_command( - OUTPUT ${WRAPPER_INSTALL_LOCATION}/cdat - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_INSTALL_PREFIX}/bin/cdat - ${WRAPPER_INSTALL_LOCATION}/cdat) - -add_custom_target(wrappers ALL DEPENDS - ${WRAPPER_INSTALL_LOCATION}/loadcdat - ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh - ${WRAPPER_INSTALL_LOCATION}/uvcdat - ${WRAPPER_INSTALL_LOCATION}/cdat) - -# Package UV-CDAT with CPACK -include(InstallRequiredSystemLibraries) - -set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "UVCDAT") -set(CPACK_PACKAGE_VENDOR "UVCDAT") -set(CPACK_PACKAGE_NAME "UVCDAT") -set(CPACK_PACKAGE_VERSION_MAJOR "2") -set(CPACK_PACKAGE_VERSION_MINOR "3") -set(CPACK_PACKAGE_VERSION_PATCH "0") -set(CPACK_PACKAGE_VERSION ${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH}) -set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/README.md") -set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LEGAL.txt") -set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Aashish Chaudhary") #required -set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX}) -set(CPACK_RPM_PACKAGE_PROVIDES /usr/local/uvcdat/bin/python /usr/local/uvcdat/bin/python2.7) -set(CPACK_DESTINATION_BIN_PREFIX "bin") - -if (APPLE) - set(SB_EXTERNALS_DIR "Externals") - set(SB_LIB_DIR "Library/Frameworks/Python.framework/Versions/2.7/lib") - set(CPACK_GENERATOR DragNDrop) - set(CPACK_DESTINATION_PREFIX "\${CMAKE_INSTALL_PREFIX}/UVCDAT.app/Contents") - set(CPACK_DESTINATION_BIN_PREFIX "${CPACK_DESTINATION_PREFIX}/MacOS") -endif() - -include(CPack) - - -install(CODE " - set(SB_EXTERNALS_DIR ${SB_EXTERNALS_DIR}) - set(SB_LIB_DIR ${SB_LIB_DIR}) - set(PYVER ${PYVER}) - set(PARAVIEW_MAJOR ${PARAVIEW_MAJOR}) - set(PARAVIEW_MINOR ${PARAVIEW_MINOR}) - set(VISIT_VERSION ${VISIT_VERSION}) - set(CDAT_BUILD_PARAVIEW ${CDAT_BUILD_PARAVIEW}) - set(SETUP_EXECUTABLE_PATHS \"${SETUP_EXECUTABLE_PATHS}\") - set(SETUP_PYTHON_PATHS \"${SETUP_PYTHON_PATHS}\") - set(SETUP_LIBRARY_PATHS \"${SETUP_LIBRARY_PATHS}\") - - file(GLOB_RECURSE programs \"${CMAKE_INSTALL_PREFIX}/bin/*\") - file(GLOB programs_images \"${CMAKE_INSTALL_PREFIX}/bin/images/*\") - file(GLOB programs_tutorials \"${CMAKE_INSTALL_PREFIX}/bin/tutorials/*\") - - if (NOT \"\${programs_images}\" STREQUAL \"\" OR NOT \"\${programs_tutorials}\" STREQUAL \"\") - list(REMOVE_ITEM programs \${programs_images} \${programs_tutorials}) - endif() - - set (resolved_programs \"\") - foreach (program \${programs}) - get_filename_component(res_program \"\${program}\" REALPATH) - set (regex_match \"\") - # Do not install uuid as its dependencies are not resolved when using - # RPMBuild - file (STRINGS \"\${res_program}\" regex_match REGEX \"uuid\") - if (\"\${regex_match}\" STREQUAL \"\") - file (STRINGS \"\${res_program}\" regex_match REGEX \"#!${CMAKE_INSTALL_PREFIX}\") - if (\"\${regex_match}\" STREQUAL \"\") - list (APPEND resolved_programs \"\${res_program}\") - endif () - endif () - endforeach() - - - file(INSTALL FILES \${resolved_programs} DESTINATION - \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/images\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/images\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/bin/images DESTINATION - \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/tutorials\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/tutorials\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/bin/tutorials DESTINATION - \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/Externals\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/Externals\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/Externals DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - REGEX \"uuid\" EXCLUDE - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/include\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/include\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/include DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/lib\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/lib\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/lib DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - # Patch cgi.py to look for installed python - if (EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\") - file (READ \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\" CGI_FILE) - SET (SEARCH_REGEX \"\\\#! /usr/local/bin/python\") - SET (REPLACEMENT_TEXT \"#! /usr/bin/env python\") - STRING (REGEX REPLACE \"\${SEARCH_REGEX}\" \"\${REPLACEMENT_TEXT}\" - MODIFIED_FILE \"\${CGI_FILE}\") - file (WRITE \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\" - \"\${MODIFIED_FILE}\") - endif () - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/share\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/share\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/share DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/wrappers\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/wrappers\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/wrappers DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/man\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/man\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/man DESTINATION - \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/vistrails\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/vistrails\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/vistrails DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/Library\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/Library\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/Library DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS ) - endif() - - file(INSTALL FILES ${cdat_BINARY_DIR}/build_info.txt DESTINATION ${CMAKE_INSTALL_PREFIX}/info) - - # Unset QT_LIB_DIR as we need to use the one in user's environment - # We need to keep in ming that we might need to build Qt on some systems - # (e.g. RH6) in which case this might break something - set(QT_LIB_DIR) - - # Configure the environment setup script to point to the installation - # Creating a temporary file that will be installed. - configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.sh.in - \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/setup_runtime.sh\" +configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash.in + ${cdat_CMAKE_BINARY_DIR}/install_cdat_from_conda.bash @ONLY - ) - - # Finally, create a symlink for python to point to point to installed python - if (EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python2.7\" - AND - NOT EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python\") - execute_process(COMMAND \${CMAKE_COMMAND} -E create_symlink - \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python2.7\" - \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python\" - ) - endif () - - if (APPLE) - configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcdat.mac.in - ${CPACK_DESTINATION_BIN_PREFIX}/uvcdat - @ONLY ) - execute_process(COMMAND \${CMAKE_COMMAND} -E copy_directory ${cdat_SOURCE_DIR}/resources - ${CPACK_DESTINATION_PREFIX}/Resources - OUTPUT_VARIABLE out - RESULT_VARIABLE res - ERROR_VARIABLE err - ) - if(NOT \${res} EQUAL 0) - message(\"Output: \${out}; Result: \${res}; Error: \${err}\") - endif() - execute_process(COMMAND \${CMAKE_COMMAND} -E copy ${cdat_CMAKE_SOURCE_DIR}/uvcdat.plist - ${CPACK_DESTINATION_PREFIX}/Info.plist - OUTPUT_VARIABLE out - RESULT_VARIABLE res - ERROR_VARIABLE err +configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/clean_cdat_from_conda.bash.in + ${cdat_CMAKE_BINARY_DIR}/clean_cdat_from_conda.bash + @ONLY ) - if(NOT \${res} EQUAL 0) - message(\"Output: \${out}; Result: \${res}; Error: \${err}\") - endif() +add_subdirectory(testing) - execute_process(COMMAND ${PYTHON_EXECUTABLE} ${cdat_CMAKE_BINARY_DIR}/fix_install_name.py - ${CPACK_DESTINATION_PREFIX} - OUTPUT_VARIABLE out - RESULT_VARIABLE res - ERROR_VARIABLE err +# Clean conda env +add_custom_target(clean-conda + COMMAND ${cdat_CMAKE_BINARY_DIR}/clean_cdat_from_conda.bash ) - if(NOT \${res} EQUAL 0) - message(\"Output: \${out}; Result: \${res}; Error: \${err}\") - endif() - endif()" - - COMPONENT superbuild -) - diff --git a/Changes.txt b/Changes.txt deleted file mode 100644 index bc7cd069ea..0000000000 --- a/Changes.txt +++ /dev/null @@ -1,3 +0,0 @@ -[updated_packages_versions]: Added distribute, added option to choose between ip and easy_install, added option to use cert for pip -[updated_packages_versions]: Upgraded Packages to latest version -1.3.1 diff --git a/Packages/Thermo/Lib/thermo.py b/Packages/Thermo/Lib/thermo.py index c2d5ccc58d..9f8cc6a93a 100644 --- a/Packages/Thermo/Lib/thermo.py +++ b/Packages/Thermo/Lib/thermo.py @@ -4,9 +4,9 @@ import genutil import unidata import vcs +import numpy from vcs import VCS_validation_functions thermo_objects = [] -import numpy def Es(T, method=None): diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index 6b1b2bf9bb..b0b862993b 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True): + baseline=True, cleanup=True, update_baselines = False, suffix="_2"): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." @@ -133,6 +133,13 @@ def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThresh print "All baselines failed! Lowest error (%f) exceeds threshold (%f)."%(bestDiff, threshold) + if update_baselines: + bestFilename2=bestFilename[:-4]+suffix+".png" + print "Update baselines is ON so we are assuming you know what you're doing" + print "Replacing baseline %s with new baseline from %s" % (bestFilename2, fname) + import shutil + shutil.copy2(fname, bestFilename2) + sp = fname.split(".") diffFilename = ".".join(sp[:-1])+"_diff."+sp[-1] print "Saving image diff at '%s'."%diffFilename diff --git a/Packages/dat/files.txt b/Packages/vcs/Share/sample_files.txt similarity index 100% rename from Packages/dat/files.txt rename to Packages/vcs/Share/sample_files.txt diff --git a/Packages/vcs/scripts/vcs_download_sample_data b/Packages/vcs/scripts/vcs_download_sample_data new file mode 100755 index 0000000000..de3829e37f --- /dev/null +++ b/Packages/vcs/scripts/vcs_download_sample_data @@ -0,0 +1,4 @@ +#!/usr/bin/env python +import vcs +vcs.download_sample_data_files() + diff --git a/Packages/vcs/setup.py b/Packages/vcs/setup.py index e3f9dd0229..06f0ef5b9d 100755 --- a/Packages/vcs/setup.py +++ b/Packages/vcs/setup.py @@ -27,6 +27,7 @@ packages=find_packages(), package_dir={'vcs': 'vcs', }, + scripts= ["scripts/vcs_download_sample_data"], data_files=[('share/vcs', ('Share/wmo_symbols.json', 'Share/data_continent_coarse', 'Share/data_continent_political', @@ -40,6 +41,7 @@ 'Share/text_icon.png', 'Share/fill_icon.png', 'Share/line_icon.png', + 'Share/sample_files.txt', 'Fonts/Adelon_Regular.ttf', 'Fonts/Arabic.ttf', 'Fonts/Athens_Greek.ttf', diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py index 33d30434f8..7c24f56cb8 100644 --- a/Packages/vcs/vcs/VTKPlots.py +++ b/Packages/vcs/vcs/VTKPlots.py @@ -1526,7 +1526,10 @@ def update_input(self, vtkobjects, array1, array2=None, update=True): float(cdutil.averager(array1, axis=" ".join(["(%s)" % S for S in array1.getAxisIds()]))) except: - meanstring = 'Mean %.4g' % array1.mean() + try: + meanstring = 'Mean %.4g' % array1.mean() + except: + meanstring = 'Mean %.4g' % numpy.mean(array1.filled()) t.SetInput(meanstring) elif att == "crdate" and tstr is not None: t.SetInput(tstr.split()[0].replace("-", "/")) diff --git a/Packages/vcs/vcs/template.py b/Packages/vcs/vcs/template.py index fd2ee2f0c8..adabacda08 100644 --- a/Packages/vcs/vcs/template.py +++ b/Packages/vcs/vcs/template.py @@ -1486,7 +1486,10 @@ def plot(self, x, slab, gm, bg=0, min=None, axis=" ".join(["(%s)" % S for S in slab.getAxisIds()]))) except: - meanstring = 'Mean %.4g' % slab.mean() + try: + meanstring = 'Mean %.4g' % slab.mean() + except: + meanstring = 'Mean %.4g' % numpy.mean(slab.filled()) tt.string = meanstring else: tt.string = str(getattr(slab, s)) diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py index fa72c9254b..734dc1a7dc 100644 --- a/Packages/vcs/vcs/utils.py +++ b/Packages/vcs/vcs/utils.py @@ -1671,7 +1671,7 @@ def creategraphicsmethod(gtype, gname='default', name=None): # datawc_ can be a float or a cdtime.reltime # TODO: Investigate why datawc is converted to a cdtime.reltime def getDataWcValue(v): - if (type(v) is type(cdtime.reltime(0, 'months since 1900'))): + if (type(v) is type(cdtime.reltime(0, 'months since 1900'))): # noqa return v.value else: return v @@ -1807,3 +1807,39 @@ def png_read_metadata(path): for i in range(0, numberOfTextChunks): m[reader.GetTextKey(i)] = reader.GetTextValue(i) return m + + +def download_sample_data_files(path=None): + import requests + import hashlib + if path is None: + path = vcs.sample_data + samples = open(os.path.join(vcs.prefix, "share", "vcs", "sample_files.txt")).readlines() + for sample in samples: + good_md5, name = sample.split() + local_filename = os.path.join(path, name) + try: + os.makedirs(os.path.dirname(local_filename)) + except: + pass + attempts = 0 + while attempts < 3: + md5 = hashlib.md5() + if os.path.exists(local_filename): + f = open(local_filename) + md5.update(f.read()) + if md5.hexdigest() == good_md5: + attempts = 5 + continue + print "Downloading:", name, "in", local_filename + r = requests.get("http://uvcdat.llnl.gov/cdat/sample_data/" + name, stream=True) + with open(local_filename, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: # filter local_filename keep-alive new chunks + f.write(chunk) + md5.update(chunk) + f.close() + if md5.hexdigest() == good_md5: + attempts = 5 + else: + attempts += 1 diff --git a/Packages/xmgrace/Lib/ValidationFunctions.py b/Packages/xmgrace/Lib/ValidationFunctions.py index b9325ba024..b2a68514cb 100644 --- a/Packages/xmgrace/Lib/ValidationFunctions.py +++ b/Packages/xmgrace/Lib/ValidationFunctions.py @@ -74,8 +74,8 @@ def isNumber(value): def checkPositiveInt(self, name, value): if not isNumber(value): raise ValueError(name + ' must be an integer') - elif (not (isinstance(value, int) or isinstance(value, long)) - and (not int(value) == value)): + elif (not (isinstance(value, int) or isinstance(value, long)) and + (not int(value) == value)): raise ValueError(name + ' must be an integer') elif value < 0: raise ValueError(name + ' must be positve') @@ -172,8 +172,7 @@ def checkSide(self, name, value): def checkLoc(self, name, value): """ check the loc (auto) or a location """ if not ( - (isinstance(value, str) and value.lower() == 'auto') - or + (isinstance(value, str) and value.lower() == 'auto') or isListorTuple(value) ): raise ValueError(name + 'must be a "auto" or a tuple/list') @@ -296,8 +295,7 @@ def checkFormat(self, name, value): def checkAuto(self, name, value): """ check for 'auto' or a value """ if not ( - (isinstance(value, str) and value.lower() == 'auto') - or + (isinstance(value, str) and value.lower() == 'auto') or isNumber(value) ): raise ValueError(name + 'must be a "auto" or a number') diff --git a/exsrc/Makefile.am.pixman b/exsrc/Makefile.am.pixman deleted file mode 100644 index e57c21c468..0000000000 --- a/exsrc/Makefile.am.pixman +++ /dev/null @@ -1,54 +0,0 @@ -lib_LTLIBRARIES = libpixman-1.la -libpixman_1_la_LDFLAGS = -version-info $(LT_VERSION_INFO) -no-undefined -export-symbols pixman.def -libpixman_1_la_LIBADD = @DEP_LIBS@ -lm -libpixman_1_la_SOURCES = \ - pixman.h \ - pixman-access.c \ - pixman-access-accessors.c \ - pixman-region.c \ - pixman-private.h \ - pixman-image.c \ - pixman-combine.c \ - pixman-compose.c \ - pixman-compose-accessors.c \ - pixman-pict.c \ - pixman-source.c \ - pixman-transformed.c \ - pixman-transformed-accessors.c \ - pixman-utils.c \ - pixman-edge.c \ - pixman-edge-accessors.c \ - pixman-edge-imp.h \ - pixman-trap.c \ - pixman-compute-region.c \ - pixman-timer.c - -libpixmanincludedir = $(includedir)/pixman-1/ -libpixmaninclude_HEADERS = pixman.h pixman-version.h -noinst_LTLIBRARIES = - -EXTRA_DIST = Makefile.win32 - -# mmx code -if USE_MMX -noinst_LTLIBRARIES += libpixman-mmx.la -libpixman_mmx_la_SOURCES = \ - pixman-mmx.c \ - pixman-mmx.h -libpixman_mmx_la_CFLAGS = $(DEP_CFLAGS) $(MMX_CFLAGS) -libpixman_mmx_la_LIBADD = $(DEP_LIBS) -libpixman_1_la_LIBADD += libpixman-mmx.la -endif - - -# sse2 code -if USE_SSE2 -noinst_LTLIBRARIES += libpixman-sse.la -libpixman_sse_la_SOURCES = \ - pixman-sse.c \ - pixman-sse.h -libpixman_sse_la_CFLAGS = $(DEP_CFLAGS) $(SSE_CFLAGS) -libpixman_sse_la_LIBADD = $(DEP_LIBS) -libpixman_1_la_LIBADD += libpixman-sse.la -endif - diff --git a/exsrc/Numeric.sh b/exsrc/Numeric.sh deleted file mode 100755 index d82ca417b4..0000000000 --- a/exsrc/Numeric.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh -PACKAGE="Numeric" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - echo "Numeric won't build on 64bit system, use numpy instead" - exit -fi -if (test "${CDMSARCH}" = "x86_64") then - echo "Numeric won't build on 64bit system, use numpy instead" - exit -fi - -# Numeric, MA, PropertiedClasses, etc. -(cd Numeric-*; ${prefix}/${version}/bin/python setup.py build ${D} install) - diff --git a/exsrc/Pmw.sh b/exsrc/Pmw.sh deleted file mode 100755 index 70629fa8ea..0000000000 --- a/exsrc/Pmw.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -PACKAGE="Pmw" -. ./prolog.sh -# Twisted. -(cd Pmw-* ; cd src; ${prefix}/${version}/bin/python setup.py build ${D} install) - diff --git a/exsrc/R.sh b/exsrc/R.sh deleted file mode 100755 index 4e2a38f556..0000000000 --- a/exsrc/R.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="R" -. ./prolog.sh -(cd R*; ./configure --enable-R-shlib --prefix=${prefix}/Externals/R ; make ; make install ; make install ; cd ${prefix}/Externals/R/lib ; ln -s Externals/R/bin/libR.so ) - diff --git a/exsrc/README.txt b/exsrc/README.txt deleted file mode 100644 index 72e35f7dcf..0000000000 --- a/exsrc/README.txt +++ /dev/null @@ -1,23 +0,0 @@ -This directory cannot be built until Python is built. - -This directory contains sources for some parts of the CDAT -system that we didn't write or which change on very slow timescales. - -./install_script /whereyouwanttoputit - -The subdirectory src contains the tarred/zipped files that are used to make -the product. A subdirectory build will be created that contains the output. -Some of these products can be tested by changing to their directory under -build and typing "make test". - -This process will unpack the tar files from the src directory if there is no -build subdirectory. Otherwise it doesn't. If you put in a new source file -into src you need to clean before building. - -Log files are created in the build subdirectory. - -Each of the pieces may be built individually using the corresponding .sh -files in this directory. Some warning errors are usual from -many of the packages and vary from architecture to architecture. - - diff --git a/exsrc/VTK_BUILD_ANSWERS.core b/exsrc/VTK_BUILD_ANSWERS.core deleted file mode 100644 index d20aa1e611..0000000000 --- a/exsrc/VTK_BUILD_ANSWERS.core +++ /dev/null @@ -1,1320 +0,0 @@ -# This is the CMakeCache file. -# For build in directory: CDAT_PREFIX/VTK -# You can edit this file to change values found and used by cmake. -# If you do not want to change any of the values, simply exit the editor. -# If you do want to change a value, simply edit, save, and exit the editor. -# The syntax for the file is as follows: -# KEY:TYPE=VALUE -# KEY is the name of a variable in the cache. -# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!. -# VALUE is the current value for the KEY. - -######################## -# EXTERNAL cache entries -######################## - -//Build the documentation (Doxygen). -BUILD_DOCUMENTATION:BOOL=OFF - -//Build VTK examples. -BUILD_EXAMPLES:BOOL=OFF - -//Build VTK with shared libraries. -BUILD_SHARED_LIBS:BOOL=ON - -//Build the testing tree. -BUILD_TESTING:BOOL=OFF - -//Path to a program. -CMAKE_AR:FILEPATH=/usr/bin/ar - -//For backwards compatibility, what version of CMake commands and -// syntax should this version of CMake allow. -CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.0 - -//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or -// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel. -// -CMAKE_BUILD_TYPE:STRING= - -//C++ compiler -CMAKE_CXX_COMPILER:STRING=c++ - -//Flags used by the compiler during all build types. -CMAKE_CXX_FLAGS:STRING= - -//Flags used by the compiler during debug builds. -CMAKE_CXX_FLAGS_DEBUG:STRING=-g - -//Flags used by the compiler during release minsize builds. -CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os - -//Flags used by the compiler during release builds (/MD /Ob1 /Oi -// /Ot /Oy /Gs will produce slightly less optimized but smaller -// files). -CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 - -//Flags used by the compiler during Release with Debug Info builds. -// -CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g - -//C compiler -CMAKE_C_COMPILER:STRING=gcc - -//Flags for C compiler. -CMAKE_C_FLAGS:STRING= - -//Flags used by the compiler during debug builds. -CMAKE_C_FLAGS_DEBUG:STRING=-g - -//Flags used by the compiler during release minsize builds. -CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os - -//Flags used by the compiler during release builds (/MD /Ob1 /Oi -// /Ot /Oy /Gs will produce slightly less optimized but smaller -// files). -CMAKE_C_FLAGS_RELEASE:STRING=-O3 - -//Flags used by the compiler during Release with Debug Info builds. -// -CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g - -//Flags used by the linker. -CMAKE_EXE_LINKER_FLAGS:STRING= - -//Flags used by the linker during debug builds. -CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -// -CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Use HP pthreads. -CMAKE_HP_PTHREADS:BOOL=OFF - -//Install path prefix, prepended onto install directories. -CMAKE_INSTALL_PREFIX:PATH=CDAT_PREFIX - -//Path to a program. -CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/gmake - -//Flags used by the linker during the creation of modules. -CMAKE_MODULE_LINKER_FLAGS:STRING= - -//Flags used by the linker during debug builds. -CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -// -CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Path to a program. -CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib - -//Flags used by the linker during the creation of dll's. -CMAKE_SHARED_LINKER_FLAGS:STRING= - -//Flags used by the linker during debug builds. -CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -// -CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Thread library used. -CMAKE_THREAD_LIBS:STRING=-lpthread - -//Use the pthreads library. -CMAKE_USE_PTHREADS:BOOL=ON - -//If true, cmake will use relative paths in makefiles and projects. -// -CMAKE_USE_RELATIVE_PATHS:BOOL=OFF - -//Use sproc libs. -CMAKE_USE_SPROC:BOOL=OFF - -//Use the win32 thread library. -CMAKE_USE_WIN32_THREADS:BOOL=OFF - -//If this value is on, makefiles will be generated without the -// .SILENT directive, and all commands will be echoed to the console -// during the make. This is useful for debugging only. With Visual -// Studio IDE projects all commands are done without /nologo. -CMAKE_VERBOSE_MAKEFILE:BOOL=OFF - -//X11 extra flags. -CMAKE_X_CFLAGS:STRING=-I/usr/X11R6/include - -//Libraries and options used in X11 programs. -CMAKE_X_LIBS:STRING=-lSM;-lICE;-lSM;-lICE;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so - -//Path to program used to compress files for transfer to the dart -// server -COMPRESSIONCOMMAND:FILEPATH=/usr/bin/gzip - -//Path to the coverage program that Dart client uses for performing -// coverage inspection -COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov - -//Path to a program. -CVSCOMMAND:FILEPATH=/usr/bin/cvs - -//Options passed to the cvs update command. -CVS_UPDATE_OPTIONS:STRING=-d -A -P - -//Limit of reported errors, -1 reports all. -DART_BUILD_ERROR_REPORT_LIMIT:BOOL=OFF - -//Limit of reported warnings, -1 reports all. -DART_BUILD_WARNING_REPORT_LIMIT:BOOL=OFF - -//If you have Dart installed, where is it located? -DART_ROOT:PATH=DART_ROOT-NOTFOUND - -//Time alloted for a test before Dart will kill the test. -DART_TESTING_TIMEOUT:STRING=1500 - -//Show the actual output of the build, or if off show a . for each -// 1024 bytes. -DART_VERBOSE_BUILD:BOOL=OFF - -//Should Dart server send email when build errors are found in -// Continuous builds? -DELIVER_CONTINUOUS_EMAIL:BOOL=OFF - -//Value Computed by CMake -DICOMParser_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/DICOMParser - -//Value Computed by CMake -DICOMParser_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/DICOMParser - -//Path to gunzip executable -GUNZIPCOMMAND:FILEPATH=/usr/bin/gunzip - -//Path to java command, used by the Dart server to create html. -// -JAVACOMMAND:FILEPATH=/usr/bin/java - -//Command used to build entire project from the command line. -MAKECOMMAND:STRING=/usr/bin/gmake -i - -//Path to Rational purify command, used for memory error detection. -// -MEMORYCHECK_COMMAND:FILEPATH=MEMORYCHECK_COMMAND-NOTFOUND - -//File that contains suppressions for the memmory checker -MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH= - -//What is the path where the file GL/gl.h can be found -OPENGL_INCLUDE_DIR:PATH=/usr/share/doc/NVIDIA_GLX-1.0/include - -//Where can one of the MesaGL or GL libraries be found -OPENGL_gl_LIBRARY:FILEPATH=/usr/lib/libGL.so - -//Where can one of the MesaGLU or GLU libraries be found -OPENGL_glu_LIBRARY:FILEPATH=/usr/lib/libGLU.so - -//What is the path where the file GL/xmesa.h can be found -OPENGL_xmesa_INCLUDE_DIR:PATH=OPENGL_xmesa_INCLUDE_DIR-NOTFOUND - -//Path to a program. -PYTHON_EXECUTABLE:FILEPATH=CDAT_PREFIX/bin/python - -//What is the path where the file Python.h can be found -PYTHON_INCLUDE_PATH:PATH=CDAT_PREFIX/include/pythonPY_VERSION - -//Where can one of the python23, python2.3, python2.3.dll, python22, -// python2.2, python2.2.dll, python21, python2.1, python2.1.dll, -// python20, python2.0, python2.0.dll, python16, python1.6, python1.6.dll, -// python15, python1.5 or python1.5.dll libraries be found -PYTHON_LIBRARY:FILEPATH=CDAT_PREFIX/lib/pythonPY_VERSION/config/libpythonPY_VERSION.a - -//Utility library needed for vtkpython -PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.so - -//Path to scp command, used by some Dart clients for submitting -// results to a Dart server (when not using ftp for submissions) -// -SCPCOMMAND:FILEPATH=/usr/bin/scp - -//Name of the computer/site where compile is being run -SITE:STRING= - -//What is the path where the file tcl.h can be found -TCL_INCLUDE_PATH:PATH=CDAT_PREFIX/include - -//Where can one of the tcl, tcl84, tcl8.4, tcl83, tcl8.3, tcl82, -// tcl8.2, tcl80 or tcl8.0 libraries be found -TCL_LIBRARY:FILEPATH=CDAT_PREFIX/lib/libtclTCLTK_VERSION.a - -//Path to a program. -TCL_TCLSH:FILEPATH=CDAT_PREFIX/bin/tclshTCLTK_VERSION - -//What is the path where the file tk.h can be found -TK_INCLUDE_PATH:PATH=CDAT_PREFIX/include - -//Where can one of the tk, tk84, tk8.4, tk83, tk8.3, tk82, tk8.2, -// tk80 or tk8.0 libraries be found -TK_LIBRARY:FILEPATH=CDAT_PREFIX/lib/libtkTCLTK_VERSION.a - -//Value Computed by CMake -VTKEXPAT_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkexpat - -//Value Computed by CMake -VTKEXPAT_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkexpat - -//Value Computed by CMake -VTKFREETYPE_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkfreetype - -//Value Computed by CMake -VTKFREETYPE_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkfreetype - -//Value Computed by CMake -VTKFTGL_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/ftgl - -//Value Computed by CMake -VTKFTGL_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/ftgl - -//Value Computed by CMake -VTKJPEG_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkjpeg - -//Value Computed by CMake -VTKJPEG_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkjpeg - -//Value Computed by CMake -VTKNETCDF_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtknetcdf - -//Value Computed by CMake -VTKNETCDF_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtknetcdf - -//Value Computed by CMake -VTKPNG_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkpng - -//Value Computed by CMake -VTKPNG_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkpng - -//Value Computed by CMake -VTKTIFF_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtktiff - -//Value Computed by CMake -VTKTIFF_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtktiff - -//Value Computed by CMake -VTKZLIB_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkzlib - -//Value Computed by CMake -VTKZLIB_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkzlib - -//Value Computed by CMake -VTK_BINARY_DIR:STATIC=CDAT_PREFIX/VTK - -//What is the path where the file VTKData.readme can be found -VTK_DATA_ROOT:PATH=CDAT_BUILD_DIR/VTK/VTKData - -//Build leak checking support into VTK. -VTK_DEBUG_LEAKS:BOOL=OFF - -//Location of the OpenGL extensions header file (glext.h). -VTK_GLEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/glext.h - -//Location of the GLX extensions header file (glxext.h). -VTK_GLXEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/glxext.h - -//Remove all legacy code completely. -VTK_LEGACY_REMOVE:BOOL=OFF - -//Silence all legacy code messages. -VTK_LEGACY_SILENT:BOOL=OFF - -//The opengl library being used supports off screen Mesa calls. -// -VTK_OPENGL_HAS_OSMESA:BOOL=OFF - -//Value Computed by CMake -VTK_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK - -//Build with static Tcl/Tk support. TCL_LIBRARY and TK_LIBRARY -// must point to the corresponding Tcl/Tk static libraries (example, -// tcl84sx.lib, tk84sx.lib). -VTK_TCL_TK_STATIC:BOOL=ON - -//Build VTK with 64 bit ids -VTK_USE_64BIT_IDS:BOOL=OFF - -//Use the ANSI standard iostream library. -VTK_USE_ANSI_STDLIB:BOOL=ON - -//Turn this option off and tests will not popup windows -VTK_USE_DISPLAY:BOOL=ON - -//Build VTK with gl2ps support. -VTK_USE_GL2PS:BOOL=ON - -//Build VTK with GUI Support -VTK_USE_GUISUPPORT:BOOL=OFF - -//Use mangled Mesa with OpenGL. -VTK_USE_MANGLED_MESA:BOOL=OFF - -//Build the vtkParallel kit. -VTK_USE_PARALLEL:BOOL=OFF - -//Build the vtkRendering kit. Needed for displaying data or using -// widgets. -VTK_USE_RENDERING:BOOL=ON - -//Build shared libraries with rpath. This makes it easy to run -// executables from the build tree when using shared libraries, -// but removes install support. -VTK_USE_RPATH:BOOL=ON - -//Use the system's expat library. -VTK_USE_SYSTEM_EXPAT:BOOL=OFF - -//Use the system's freetype library. -VTK_USE_SYSTEM_FREETYPE:BOOL=OFF - -//Use the system's jpeg library. -VTK_USE_SYSTEM_JPEG:BOOL=OFF - -//Use the system's png library. -VTK_USE_SYSTEM_PNG:BOOL=OFF - -//Use the system's tiff library. -VTK_USE_SYSTEM_TIFF:BOOL=OFF - -//Use the system's zlib library. -VTK_USE_SYSTEM_ZLIB:BOOL=OFF - -//Location of the WGL extensions header file (wglext.h). -VTK_WGLEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/wglext.h - -//Where can the hints file be found -VTK_WRAP_HINTS:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Wrapping/hints - -//Wrap VTK classes into the Java language. -VTK_WRAP_JAVA:BOOL=OFF - -//Wrap VTK classes into the Python language. -VTK_WRAP_PYTHON:BOOL=ON - -//Path to an internal program. -VTK_WRAP_PYTHON_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapPython - -//Path to an internal program. -VTK_WRAP_PYTHON_INIT_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapPythonInit - -//Wrap VTK classes into the TCL language. -VTK_WRAP_TCL:BOOL=ON - -//Path to an internal program. -VTK_WRAP_TCL_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapTcl - -//Path to an internal program. -VTK_WRAP_TCL_INIT_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapTclInit - -//What is the path where the file X11/X.h can be found -X11_X11_INCLUDE_PATH:PATH=/usr/X11R6/include - -//Where can the X11 library be found -X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.so - -//Where can the Xext library be found -X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.so - -//What is the path where the file X11/Xlib.h can be found -X11_Xlib_INCLUDE_PATH:PATH=/usr/X11R6/include - -//What is the path where the file X11/Xutil.h can be found -X11_Xutil_INCLUDE_PATH:PATH=/usr/X11R6/include - -//Dependencies for the target -vtkCommonPython_LIB_DEPENDS:STATIC=vtkCommon; - -//Dependencies for the target -vtkCommonTCL_LIB_DEPENDS:STATIC=vtkCommon;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m; - -//Dependencies for the target -vtkCommon_LIB_DEPENDS:STATIC=-lpthread;-ldl;-lm; - -//Dependencies for target -vtkDICOMParser_LIB_DEPENDS:STATIC= - -//Value Computed by CMake -vtkExodus2_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkexodus2 - -//Value Computed by CMake -vtkExodus2_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkexodus2 - -//Dependencies for the target -vtkFilteringPython_LIB_DEPENDS:STATIC=vtkFiltering;vtkCommonPython; - -//Dependencies for the target -vtkFilteringTCL_LIB_DEPENDS:STATIC=vtkFiltering;vtkCommonTCL; - -//Dependencies for the target -vtkFiltering_LIB_DEPENDS:STATIC=vtkCommon; - -//Dependencies for the target -vtkGenericFilteringPython_LIB_DEPENDS:STATIC=vtkGenericFiltering;vtkFilteringPython;vtkGraphicsPython; - -//Dependencies for the target -vtkGenericFilteringTCL_LIB_DEPENDS:STATIC=vtkGenericFiltering;vtkFilteringTCL;vtkGraphicsTCL; - -//Dependencies for the target -vtkGenericFiltering_LIB_DEPENDS:STATIC=vtkFiltering;vtkGraphics; - -//Dependencies for the target -vtkGraphicsPython_LIB_DEPENDS:STATIC=vtkGraphics;vtkFilteringPython; - -//Dependencies for the target -vtkGraphicsTCL_LIB_DEPENDS:STATIC=vtkGraphics;vtkFilteringTCL; - -//Dependencies for the target -vtkGraphics_LIB_DEPENDS:STATIC=vtkFiltering; - -//Dependencies for the target -vtkHybridPython_LIB_DEPENDS:STATIC=vtkHybrid;vtkRenderingPython;vtkIOPython; - -//Dependencies for the target -vtkHybridTCL_LIB_DEPENDS:STATIC=vtkHybrid;vtkRenderingTCL;vtkIOTCL; - -//Dependencies for the target -vtkHybrid_LIB_DEPENDS:STATIC=vtkRendering;vtkIO;vtkexoIIc; - -//Dependencies for the target -vtkIOPython_LIB_DEPENDS:STATIC=vtkIO;vtkFilteringPython; - -//Dependencies for the target -vtkIOTCL_LIB_DEPENDS:STATIC=vtkIO;vtkFilteringTCL; - -//Dependencies for the target -vtkIO_LIB_DEPENDS:STATIC=vtkFiltering;vtkDICOMParser;vtkpng;vtkzlib;vtkjpeg;vtktiff;vtkexpat; - -//Dependencies for the target -vtkImagingPython_LIB_DEPENDS:STATIC=vtkImaging;vtkFilteringPython; - -//Dependencies for the target -vtkImagingTCL_LIB_DEPENDS:STATIC=vtkImaging;vtkFilteringTCL; - -//Dependencies for the target -vtkImaging_LIB_DEPENDS:STATIC=vtkFiltering; - -//Dependencies for target -vtkNetCDF_LIB_DEPENDS:STATIC= - -//Dependencies for the target -vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=vtkRendering;CDAT_PREFIX/lib/libtkTCLTK_VERSION.a;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m; - -//Dependencies for the target -vtkRenderingPython_LIB_DEPENDS:STATIC=vtkRendering;vtkGraphicsPython;vtkImagingPython; - -//Dependencies for the target -vtkRenderingTCL_LIB_DEPENDS:STATIC=vtkRendering;vtkGraphicsTCL;vtkImagingTCL;CDAT_PREFIX/lib/libtkTCLTK_VERSION.a;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m; - -//Dependencies for the target -vtkRendering_LIB_DEPENDS:STATIC=vtkGraphics;vtkImaging;vtkIO;vtkftgl;vtkfreetype;vtkzlib;/usr/lib/libGL.so;-lXt;-lSM;-lICE;-lSM;-lICE;-lSM;-lICE;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so; - -//Dependencies for the target -vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=vtkVolumeRendering;vtkRenderingPython;vtkIOPython; - -//Dependencies for the target -vtkVolumeRenderingTCL_LIB_DEPENDS:STATIC=vtkVolumeRendering;vtkRenderingTCL;vtkIOTCL; - -//Dependencies for the target -vtkVolumeRendering_LIB_DEPENDS:STATIC=vtkRendering;vtkIO; - -//Dependencies for the target -vtkWidgetsPython_LIB_DEPENDS:STATIC=vtkWidgets;vtkRenderingPython;vtkHybridPython; - -//Dependencies for the target -vtkWidgetsTCL_LIB_DEPENDS:STATIC=vtkWidgets;vtkRenderingTCL;vtkHybridTCL; - -//Dependencies for the target -vtkWidgets_LIB_DEPENDS:STATIC=vtkRendering;vtkHybrid; - -//Dependencies for the target -vtkexoIIc_LIB_DEPENDS:STATIC=vtkNetCDF; - -//Dependencies for target -vtkexpat_LIB_DEPENDS:STATIC= - -//Dependencies for target -vtkfreetype_LIB_DEPENDS:STATIC= - -//Dependencies for the target -vtkftgl_LIB_DEPENDS:STATIC=/usr/lib/libGL.so;vtkfreetype; - -//Dependencies for target -vtkjpeg_LIB_DEPENDS:STATIC= - -//Dependencies for the target -vtkpng_LIB_DEPENDS:STATIC=vtkzlib; - -//Value Computed by CMake -vtksys_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/kwsys - -//Dependencies for target -vtksys_LIB_DEPENDS:STATIC= - -//Value Computed by CMake -vtksys_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/kwsys - -//Dependencies for the target -vtktiff_LIB_DEPENDS:STATIC=vtkzlib;vtkjpeg; - -//Dependencies for target -vtkzlib_LIB_DEPENDS:STATIC= - - -######################## -# INTERNAL cache entries -######################## - -//Executable to project name. -CDAT_PREFIX/VTK/bin/vtkWrapPython:INTERNAL=vtkWrapPython -//Executable to project name. -CDAT_PREFIX/VTK/bin/vtkWrapPythonInit:INTERNAL=vtkWrapPythonInit -//Executable to project name. -CDAT_PREFIX/VTK/bin/vtkWrapTcl:INTERNAL=vtkWrapTcl -//Executable to project name. -CDAT_PREFIX/VTK/bin/vtkWrapTclInit:INTERNAL=vtkWrapTclInit -//Advanced flag for variable: BUILD_DOCUMENTATION -BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1 -//Advanced flag for variable: BUILD_TESTING -BUILD_TESTING-ADVANCED:INTERNAL=1 -//Result of TRY_COMPILE -CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE -//Have include iostream -CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1 -//Advanced flag for variable: CMAKE_AR -CMAKE_AR-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_BUILD_TOOL -CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1 -//What is the target build tool cmake is generating for. -CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/gmake -//This is the directory where this CMakeCahe.txt was created -CMAKE_CACHEFILE_DIR:INTERNAL=CDAT_PREFIX/VTK -//Major version of cmake used to create the current loaded cache -// -CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2 -//Minor version of cmake used to create the current loaded cache -// -CMAKE_CACHE_MINOR_VERSION:INTERNAL=0 -//Major version of cmake used to create the current loaded cache -// -CMAKE_CACHE_RELEASE_VERSION:INTERNAL=patch 6 -//Path to CMake executable. -CMAKE_COMMAND:INTERNAL=CDAT_PREFIX/bin/cmake -//Advanced flag for variable: CMAKE_CTEST_COMMAND -CMAKE_CTEST_COMMAND-ADVANCED:INTERNAL=1 -//Path to ctest program executable. -CMAKE_CTEST_COMMAND:INTERNAL=CDAT_PREFIX/bin/ctest -//Advanced flag for variable: CMAKE_CXX_COMPILER -CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1 -//full path to the compiler cmake found -CMAKE_CXX_COMPILER_FULLPATH:INTERNAL=/usr/bin/c++ -//Result of TRY_COMPILE -CMAKE_CXX_COMPILER_WORKS:INTERNAL=TRUE -//Advanced flag for variable: CMAKE_CXX_FLAGS -CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_CXX_FLAGS_DEBUG -CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_CXX_FLAGS_MINSIZEREL -CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_CXX_FLAGS_RELEASE -CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO -CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_COMPILER -CMAKE_C_COMPILER-ADVANCED:INTERNAL=1 -//full path to the compiler cmake found -CMAKE_C_COMPILER_FULLPATH:INTERNAL=/usr/bin/gcc -//Result of TRY_COMPILE -CMAKE_C_COMPILER_WORKS:INTERNAL=TRUE -//Advanced flag for variable: CMAKE_C_FLAGS -CMAKE_C_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_FLAGS_DEBUG -CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_FLAGS_MINSIZEREL -CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_FLAGS_RELEASE -CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_FLAGS_RELWITHDEBINFO -CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Path to cache edit program executable. -CMAKE_EDIT_COMMAND:INTERNAL=CDAT_PREFIX/bin/ccmake -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS -CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG -CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL -// -CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE -CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO -// -CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Name of generator. -CMAKE_GENERATOR:INTERNAL=Unix Makefiles -//Have include sstream -CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=1 -//Is X11 around. -CMAKE_HAS_X:INTERNAL=1 -//Have function connect -CMAKE_HAVE_CONNECT:INTERNAL=1 -//Have function gethostbyname -CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1 -//Have include limits.h -CMAKE_HAVE_LIMITS_H:INTERNAL=1 -//Have library pthreads -CMAKE_HAVE_PTHREADS_CREATE:INTERNAL= -//Have library pthread -CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1 -//Have include pthread.h -CMAKE_HAVE_PTHREAD_H:INTERNAL=1 -//Have function remove -CMAKE_HAVE_REMOVE:INTERNAL=1 -//Have function shmat -CMAKE_HAVE_SHMAT:INTERNAL=1 -//Have include sys/prctl.h -CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=1 -//Have include unistd.h -CMAKE_HAVE_UNISTD_H:INTERNAL=1 -//Start directory with the top level CMakeLists.txt file for this -// project -CMAKE_HOME_DIRECTORY:INTERNAL=CDAT_BUILD_DIR/VTK/VTK -//Advanced flag for variable: CMAKE_HP_PTHREADS -CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1 -//Have library ICE -CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1 -//Advanced flag for variable: CMAKE_MAKE_PROGRAM -CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS -CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG -CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL -// -CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE -// -CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO -// -CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Does the compiler support ansi for scope. -CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0 -//Advanced flag for variable: CMAKE_NO_ANSI_STREAM_HEADERS -CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1 -//Does the compiler support headers like iostream. -CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0 -//Does the compiler support std::. -CMAKE_NO_STD_NAMESPACE:INTERNAL=0 -//Advanced flag for variable: CMAKE_RANLIB -CMAKE_RANLIB-ADVANCED:INTERNAL=1 -//Path to CMake installation. -CMAKE_ROOT:INTERNAL=CDAT_PREFIX/share/CMake -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS -CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG -CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL -// -CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE -// -CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO -// -CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Result of TRY_RUN -CMAKE_SIZEOF_CHAR:INTERNAL=1 -//Result of TRY_RUN -CMAKE_SIZEOF_DOUBLE:INTERNAL=8 -//Result of TRY_RUN -CMAKE_SIZEOF_FLOAT:INTERNAL=4 -//Result of TRY_RUN -CMAKE_SIZEOF_INT:INTERNAL=4 -//Result of TRY_RUN -CMAKE_SIZEOF_LONG:INTERNAL=4 -//Result of TRY_RUN -CMAKE_SIZEOF_SHORT:INTERNAL=2 -//Result of TRY_RUN -CMAKE_SIZEOF_VOID_P:INTERNAL=4 -//Advanced flag for variable: CMAKE_SKIP_RPATH -CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1 -//Whether to build with rpath. -CMAKE_SKIP_RPATH:INTERNAL=0 -//Result of TRY_COMPILE -CMAKE_STD_NAMESPACE:INTERNAL=TRUE -//Advanced flag for variable: CMAKE_THREAD_LIBS -CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1 -//uname command -CMAKE_UNAME:INTERNAL=/bin/uname -//Advanced flag for variable: CMAKE_USE_PTHREADS -CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_USE_RELATIVE_PATHS -CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_USE_SPROC -CMAKE_USE_SPROC-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_USE_WIN32_THREADS -CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_VERBOSE_MAKEFILE -CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1 -//Result of TRY_RUN -CMAKE_WORDS_BIGENDIAN:INTERNAL=0 -//Advanced flag for variable: CMAKE_X_CFLAGS -CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_X_LIBS -CMAKE_X_LIBS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: COMPRESSIONCOMMAND -COMPRESSIONCOMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: COVERAGE_COMMAND -COVERAGE_COMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CVSCOMMAND -CVSCOMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CVS_UPDATE_OPTIONS -CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1 -//Path to an executable -CommonCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx -//Advanced flag for variable: DART_BUILD_ERROR_REPORT_LIMIT -DART_BUILD_ERROR_REPORT_LIMIT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DART_BUILD_WARNING_REPORT_LIMIT -DART_BUILD_WARNING_REPORT_LIMIT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DART_ROOT -DART_ROOT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DART_TESTING_TIMEOUT -DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DART_VERBOSE_BUILD -DART_VERBOSE_BUILD-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DELIVER_CONTINUOUS_EMAIL -DELIVER_CONTINUOUS_EMAIL-ADVANCED:INTERNAL=1 -//Single output directory for building all executables. -EXECUTABLE_OUTPUT_PATH:INTERNAL=CDAT_PREFIX/VTK/bin -//Path to an executable -FilteringCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering/Testing/Cxx -//Advanced flag for variable: GUNZIPCOMMAND -GUNZIPCOMMAND-ADVANCED:INTERNAL=1 -//Path to an executable -GenericFilteringCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering/Testing/Cxx -//Path to an executable -GraphicsCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics/Testing/Cxx -//Have symbol alloca -HAVE_ALLOCA:INTERNAL=1 -//Have include HAVE_ALLOCA_H -HAVE_ALLOCA_H:INTERNAL=1 -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE -//Have include fcntl.h -HAVE_FCNTL_H:INTERNAL=1 -//NetCDF test -HAVE_FTRUNCATE:INTERNAL=1 -//Result of TRY_COMPILE -HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_FLOAT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_INT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_LONG:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_OFF_T:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_SHORT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_UNSIGNED_CHAR:INTERNAL=TRUE -//Have include HAVE_STDDEF_H -HAVE_STDDEF_H:INTERNAL=1 -//Have include HAVE_STDINT_H -HAVE_STDINT_H:INTERNAL=1 -//Have include HAVE_STDIO_H -HAVE_STDIO_H:INTERNAL=1 -//Have include HAVE_STDLIB_H -HAVE_STDLIB_H:INTERNAL=1 -//Have symbol strerror -HAVE_STRERROR:INTERNAL=1 -//Have include HAVE_STRING_H -HAVE_STRING_H:INTERNAL=1 -//NetCDF test -HAVE_ST_BLKSIZE:INTERNAL=1 -//Have include HAVE_SYS_STAT_H -HAVE_SYS_STAT_H:INTERNAL=1 -//Have include HAVE_SYS_TYPES_H -HAVE_SYS_TYPES_H:INTERNAL=1 -//Have include unistd.h -HAVE_UNISTD_H:INTERNAL=1 -//Result of TRY_COMPILE -HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE -//Result of TRY_COMPILE -HAVE_WORDS_BIGENDIAN:INTERNAL=TRUE -//Path to an executable -IOCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO/Testing/Cxx -//Path to an executable -ImagingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging/Testing/Cxx -//Advanced flag for variable: JAVACOMMAND -JAVACOMMAND-ADVANCED:INTERNAL=1 -//Result of TRY_COMPILE -KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE -//Result of TRY_COMPILE -KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE -//Single output directory for building all libraries. -LIBRARY_OUTPUT_PATH:INTERNAL=CDAT_PREFIX/VTK/bin -//Advanced flag for variable: MAKECOMMAND -MAKECOMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: MEMORYCHECK_COMMAND -MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: MEMORYCHECK_SUPPRESSIONS_FILE -MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: OPENGL_INCLUDE_DIR -OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1 -//Advanced flag for variable: OPENGL_gl_LIBRARY -OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1 -//Advanced flag for variable: OPENGL_glu_LIBRARY -OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1 -//Advanced flag for variable: OPENGL_xmesa_INCLUDE_DIR -OPENGL_xmesa_INCLUDE_DIR-ADVANCED:INTERNAL=1 -//Advanced flag for variable: PYTHON_EXECUTABLE -PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: PYTHON_UTIL_LIBRARY -PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1 -//Path to an executable -RenderingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering/Testing/Cxx -//Advanced flag for variable: SCPCOMMAND -SCPCOMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: SITE -SITE-ADVANCED:INTERNAL=1 -//Result of TRY_RUN -SIZEOF_DOUBLE:INTERNAL=8 -//Result of TRY_RUN -SIZEOF_FLOAT:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_INT:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_LONG:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_OFF_T:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_PTRDIFF_T:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_SHORT:INTERNAL=2 -//Result of TRY_RUN -SIZEOF_SIZE_T:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_SSIZE_T:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_UNSIGNED_CHAR:INTERNAL=1 -//Have include STDC_HEADERS -STDC_HEADERS:INTERNAL=1 -//This value is not used by VTK. -TCL_LIBRARY_DEBUG:INTERNAL=TCL_LIBRARY_DEBUG-NOTFOUND -//Advanced flag for variable: TCL_STUB_LIBRARY -TCL_STUB_LIBRARY-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TCL_STUB_LIBRARY:INTERNAL=CDAT_PREFIX/lib/libtclstubTCLTK_VERSION.a -//Advanced flag for variable: TCL_STUB_LIBRARY_DEBUG -TCL_STUB_LIBRARY_DEBUG-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TCL_STUB_LIBRARY_DEBUG:INTERNAL=TCL_STUB_LIBRARY_DEBUG-NOTFOUND -//Advanced flag for variable: TCL_TCLSH -TCL_TCLSH-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TK_LIBRARY_DEBUG:INTERNAL=TK_LIBRARY_DEBUG-NOTFOUND -//Advanced flag for variable: TK_STUB_LIBRARY -TK_STUB_LIBRARY-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TK_STUB_LIBRARY:INTERNAL=CDAT_PREFIX/lib/libtkstubTCLTK_VERSION.a -//Advanced flag for variable: TK_STUB_LIBRARY_DEBUG -TK_STUB_LIBRARY_DEBUG-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TK_STUB_LIBRARY_DEBUG:INTERNAL=TK_STUB_LIBRARY_DEBUG-NOTFOUND -//This value is not used by VTK. -TK_WISH:INTERNAL=/usr/bin/wish -//Path to an executable -TestCxxFeatures_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx -//Path to an executable -TestInstantiator_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx -//Path to an executable -VTKBenchMark_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering/Testing/Cxx -//Result of TRY_COMPILE -VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE -//Result of TRY_RUN -VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0 -//Result of TRY_COMPILE -VTK_CMAKE_EXTENSIONS_COMPILED:INTERNAL=TRUE -//Support for C++ type bool -VTK_COMPILER_HAS_BOOL:INTERNAL=1 -//Support for full template specialization syntax -VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1 -//Advanced flag for variable: VTK_DEBUG_LEAKS -VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1 -//Disables the automatic initialization of Tk widgets when loading -// the rendering library. -VTK_DISABLE_TK_INIT:INTERNAL=OFF -//Support for C++ explict templates -VTK_EXPLICIT_TEMPLATES:INTERNAL=1 -//Advanced flag for variable: VTK_GLEXT_FILE -VTK_GLEXT_FILE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_GLXEXT_FILE -VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1 -//Result of TRY_COMPILE -VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS:INTERNAL=FALSE -//Already set VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS -VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS_TESTED:INTERNAL=1 -//Have include iosfwd -VTK_HAVE_ANSI_STREAMS:INTERNAL=1 -//Have include iostream.h -VTK_HAVE_OLD_STREAMS:INTERNAL=1 -//Have include strstream.h -VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=1 -//Have include strstrea.h -VTK_HAVE_OLD_STRSTREA_H:INTERNAL= -//Whether istream supports long long -VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1 -//Advanced flag for variable: VTK_LEGACY_REMOVE -VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_LEGACY_SILENT -VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_OPENGL_HAS_OSMESA -VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1 -//Whether ostream supports long long -VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1 -//OpenGL extensions parser. -VTK_PARSEOGLEXT_EXE:INTERNAL=CDAT_PREFIX/VTK/bin/vtkParseOGLExt -//Result of TRY_RUN -VTK_SIZEOF_LONG_LONG:INTERNAL=8 -//Path to the Tcl support library files. -VTK_TCL_SUPPORT_LIBRARY_PATH:INTERNAL=CDAT_PREFIX/include/../lib/tclTCLTK_VERSION -//Very few users should worry about this option. If VTK is built -// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared -// Tcl/Tk bundled inside a project with no library support files -// (ex: ParaViewComplete), this variable should be set to ON and -// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH -// should point to the directories that hold those files (typically, -// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation, -// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source -// repository). Once this variable is set to ON, support files -// will automatically be copied to the build directory and the -// executables will try to use that location to initialize Tcl/Tk. -// -VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=ON -//Advanced flag for variable: VTK_TCL_TK_STATIC -VTK_TCL_TK_STATIC-ADVANCED:INTERNAL=1 -//Path to the Tk support library files. -VTK_TK_SUPPORT_LIBRARY_PATH:INTERNAL=CDAT_PREFIX/include/../lib/tkTCLTK_VERSION -//Whether char is signed. -VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1 -//Result of TRY_COMPILE -VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE -//Advanced flag for variable: VTK_USE_64BIT_IDS -VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_ANSI_STDLIB -VTK_USE_ANSI_STDLIB-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_DISPLAY -VTK_USE_DISPLAY-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_GL2PS -VTK_USE_GL2PS-ADVANCED:INTERNAL=1 -//Have function glXGetProcAddressARB -VTK_USE_GLX_GET_PROC_ADDRESS_ARB:INTERNAL=1 -//Advanced flag for variable: VTK_USE_GUISUPPORT -VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_MANGLED_MESA -VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_EXPAT -VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_FREETYPE -VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_JPEG -VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_PNG -VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_TIFF -VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_ZLIB -VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WGLEXT_FILE -VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_HINTS -VTK_WRAP_HINTS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_PYTHON_EXE -VTK_WRAP_PYTHON_EXE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_PYTHON_INIT_EXE -VTK_WRAP_PYTHON_INIT_EXE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_TCL_EXE -VTK_WRAP_TCL_EXE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_TCL_INIT_EXE -VTK_WRAP_TCL_INIT_EXE-ADVANCED:INTERNAL=1 -//Path to an executable -VolumeRenderingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering/Testing/Cxx -//Result of TRY_RUN -WORDS_BIGENDIAN:INTERNAL=0 -//Path to an executable -WidgetsCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets/Testing/Cxx -//Have library /usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so -// -X11_LIB_X11_SOLO:INTERNAL=1 -//Advanced flag for variable: X11_X11_INCLUDE_PATH -X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1 -//Advanced flag for variable: X11_X11_LIB -X11_X11_LIB-ADVANCED:INTERNAL=1 -//Advanced flag for variable: X11_Xext_LIB -X11_Xext_LIB-ADVANCED:INTERNAL=1 -//Advanced flag for variable: X11_Xlib_INCLUDE_PATH -X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1 -//Advanced flag for variable: X11_Xutil_INCLUDE_PATH -X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1 -//Path to an executable -mkg3states_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtktiff -//Path to a library -vtkCommonPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common -//Whether a library is static, shared or module. -vtkCommonPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkCommonTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common -//Whether a library is static, shared or module. -vtkCommonTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkCommon_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common -//Whether a library is static, shared or module. -vtkCommon_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkDICOMParser_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/DICOMParser -//Whether a library is static, shared or module. -vtkDICOMParser_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkFilteringPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering -//Whether a library is static, shared or module. -vtkFilteringPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkFilteringTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering -//Whether a library is static, shared or module. -vtkFilteringTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkFiltering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering -//Whether a library is static, shared or module. -vtkFiltering_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkGenericFilteringPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering -//Whether a library is static, shared or module. -vtkGenericFilteringPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkGenericFilteringTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering -//Whether a library is static, shared or module. -vtkGenericFilteringTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkGenericFiltering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering -//Whether a library is static, shared or module. -vtkGenericFiltering_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkGraphicsPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics -//Whether a library is static, shared or module. -vtkGraphicsPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkGraphicsTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics -//Whether a library is static, shared or module. -vtkGraphicsTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkGraphics_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics -//Whether a library is static, shared or module. -vtkGraphics_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkHybridPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid -//Whether a library is static, shared or module. -vtkHybridPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkHybridTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid -//Whether a library is static, shared or module. -vtkHybridTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkHybrid_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid -//Whether a library is static, shared or module. -vtkHybrid_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkIOPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO -//Whether a library is static, shared or module. -vtkIOPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkIOTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO -//Whether a library is static, shared or module. -vtkIOTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkIO_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO -//Whether a library is static, shared or module. -vtkIO_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkImagingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging -//Whether a library is static, shared or module. -vtkImagingPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkImagingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging -//Whether a library is static, shared or module. -vtkImagingTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkImaging_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging -//Whether a library is static, shared or module. -vtkImaging_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkNetCDF_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtknetcdf -//Whether a library is static, shared or module. -vtkNetCDF_LIBRARY_TYPE:INTERNAL=SHARED -//Path to an executable -vtkParseOGLExt_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/ParseOGLExt -//Path to a library -vtkRenderingPythonTkWidgets_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering -//Whether a library is static, shared or module. -vtkRenderingPythonTkWidgets_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkRenderingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering -//Whether a library is static, shared or module. -vtkRenderingPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkRenderingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering -//Whether a library is static, shared or module. -vtkRenderingTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkRendering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering -//Whether a library is static, shared or module. -vtkRendering_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkVolumeRenderingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering -//Whether a library is static, shared or module. -vtkVolumeRenderingPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkVolumeRenderingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering -//Whether a library is static, shared or module. -vtkVolumeRenderingTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkVolumeRendering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering -//Whether a library is static, shared or module. -vtkVolumeRendering_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkWidgetsPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets -//Whether a library is static, shared or module. -vtkWidgetsPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkWidgetsTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets -//Whether a library is static, shared or module. -vtkWidgetsTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkWidgets_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets -//Whether a library is static, shared or module. -vtkWidgets_LIBRARY_TYPE:INTERNAL=SHARED -//Path to an executable -vtkWrapPythonInit_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping -//Path to an executable -vtkWrapPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping -//Path to an executable -vtkWrapTclInit_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping -//Path to an executable -vtkWrapTcl_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping -//Path to an executable -vtk_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping/Tcl -//Path to a library -vtkexoIIc_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkexodus2 -//Whether a library is static, shared or module. -vtkexoIIc_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkexpat_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkexpat -//Whether a library is static, shared or module. -vtkexpat_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkfreetype_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkfreetype -//Whether a library is static, shared or module. -vtkfreetype_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkftgl_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/ftgl -//Whether a library is static, shared or module. -vtkftgl_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkjpeg_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkjpeg -//Whether a library is static, shared or module. -vtkjpeg_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkpng_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkpng -//Whether a library is static, shared or module. -vtkpng_LIBRARY_TYPE:INTERNAL=SHARED -//Path to an executable -vtkpython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping/Python -//Path to a library -vtksys_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/kwsys -//Whether a library is static, shared or module. -vtksys_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtktiff_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtktiff -//Whether a library is static, shared or module. -vtktiff_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkzlib_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkzlib -//Whether a library is static, shared or module. -vtkzlib_LIBRARY_TYPE:INTERNAL=SHARED - diff --git a/exsrc/blas.sh b/exsrc/blas.sh deleted file mode 100755 index 921446f3d9..0000000000 --- a/exsrc/blas.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -PACKAGE="blas" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -if ( test $FC = "gfortran") then - CPPFLAGS="-DpgiFortran"; export CPPFLAGS -fi -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then -( CXX=""; export CXX \ -) -fi - -cd blas*;\ - # Add f77 support -unset PGI; \ - echo $FC ; \ - env FORTRAN=${FC} make; cp libblas.a ${prefix}/Externals/lib; \ - - diff --git a/exsrc/cairo.sh b/exsrc/cairo.sh deleted file mode 100755 index 7954914830..0000000000 --- a/exsrc/cairo.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -PACKAGE="cairo" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -PKG_CONFIG=${prefix}/Externals/bin/pkg-config -export PKG_CONFIG -(cd cairo-* ; ./configure --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/clean_script b/exsrc/clean_script deleted file mode 100755 index 185cc2b0e8..0000000000 --- a/exsrc/clean_script +++ /dev/null @@ -1,2 +0,0 @@ -/bin/rm -fr build >/dev/null 2>&1 -find . -name 'config.cache' -print -exec rm {} \; diff --git a/exsrc/cmake.sh b/exsrc/cmake.sh deleted file mode 100755 index 069754011e..0000000000 --- a/exsrc/cmake.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh -PACKAGE="cmake" -. ./prolog.sh -( cd cmake*; \ - ./configure --prefix=${prefix}/Externals; \ - make; make install -) diff --git a/exsrc/curl.sh b/exsrc/curl.sh deleted file mode 100755 index 951fa4c538..0000000000 --- a/exsrc/curl.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="curl" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd curl* ; ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install ) - diff --git a/exsrc/detect_fortran.py b/exsrc/detect_fortran.py deleted file mode 100644 index 17c0c5661a..0000000000 --- a/exsrc/detect_fortran.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python -import os,sys - -def detect_fortran_compiler(full_path=True): - - - fortrans = """ -g77 -gfortran -f90 -f95 -g95 -xlf90 -fort77 -pgf77 -pgf90 -cf77 -xlf -ghf77 -""" - if os.environ.has_key('FC'): - return os.environ['FC'] - - for f in fortrans.split(): - i,o=os.popen4('which '+f) - ln=o.readlines() - o.close() - i.close() - if (ln!=[]) and (not 'no' in ln[0].lower().split()) and (not 'not' in ln[0].lower().split()) : - if full_path : - return ln[0].strip() - else: - return f - -if __name__=="__main__": - print detect_fortran_compiler() diff --git a/exsrc/ffmpeg.sh b/exsrc/ffmpeg.sh deleted file mode 100755 index 50c6b59498..0000000000 --- a/exsrc/ffmpeg.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -PACKAGE="ffmpeg" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -PKG_CONFIG=${prefix}/Externals/bin/pkg-config -export PKG_CONFIG -(cd ffmpeg ; ./configure --enable-pthreads --enable-gpl --enable-pp --enable-swscaler --enable-x11grab --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/find_site.py b/exsrc/find_site.py deleted file mode 100644 index 39d76dbff4..0000000000 --- a/exsrc/find_site.py +++ /dev/null @@ -1,9 +0,0 @@ -# helper routine for installing Pmw since it has no installer. -import sys, os -for x in sys.path: - y = os.path.basename(x) - if y == 'site-packages': - print x - break -else: #If there is none such as on older windows versions - print sys.path[-1] diff --git a/exsrc/fontconfig.sh b/exsrc/fontconfig.sh deleted file mode 100755 index 060f335fb1..0000000000 --- a/exsrc/fontconfig.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -PACKAGE="fontconfig" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -PKG_CONFIG=${prefix}/Externals/bin/pkg-config -export PKG_CONFIG - -(cd fontconfig-* ; ./configure --prefix=${prefix}/Externals --enable-libxml2 --with-freetype-config=${prefix}/Externals/bin/freetype-config ; make ; make install ) - diff --git a/exsrc/freetype.sh b/exsrc/freetype.sh deleted file mode 100755 index a540ae58f6..0000000000 --- a/exsrc/freetype.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="freetype" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd freetype-* ; ./configure --prefix=${prefix}/Externals ; make ; make install ; ln -s ${prefix}/Externals/include/freetype2/freetype ${prefix}/Externals/include/freetype ) - diff --git a/exsrc/gdal.sh b/exsrc/gdal.sh deleted file mode 100755 index 714a94bb5b..0000000000 --- a/exsrc/gdal.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="gdal" -. ./prolog.sh -(cd gdal* ; ./configure --with-libtiff=internal --with-gif=internal --without-cfitsio --prefix=${prefix}/Externals ; make ; make install; ${prefix}/${version}/bin/python setup.py install ) - diff --git a/exsrc/ghostscript.sh b/exsrc/ghostscript.sh deleted file mode 100755 index 0a100777be..0000000000 --- a/exsrc/ghostscript.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="ghostscript" -. ./prolog.sh -(mkdir -p ${prefix}/Externals/share/ghostscript ; cd ghostscript-*; ln -s ../libpng-1.2.8 libpng ; ln -s ../jpeg-6b jpeg ; ./configure --prefix=${prefix}/Externals ; make ; make install ; mv ../fonts ${prefix}/Externals/share/ghostscript ) - diff --git a/exsrc/gifmerge.sh b/exsrc/gifmerge.sh deleted file mode 100755 index 85a4ac810f..0000000000 --- a/exsrc/gifmerge.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="gifmerge" -. ./prolog.sh -(cd gifmerge* ; make ; mv gifmerge ${prefix}/Externals/bin ) - diff --git a/exsrc/gifsicle.sh b/exsrc/gifsicle.sh deleted file mode 100755 index 6ebe09f5fb..0000000000 --- a/exsrc/gifsicle.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="gifsicle" -. ./prolog.sh -(cd gifsicle*; ./configure --prefix=${prefix}/Externals ; make install ) - diff --git a/exsrc/gplot.sh b/exsrc/gplot.sh deleted file mode 100755 index 2b588cd1f0..0000000000 --- a/exsrc/gplot.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -PACKAGE=gplot -. ./prolog.sh -d=`uname` -(cd gplot; make -f Makefile.${d} ; mv gplot ${prefix}/Externals/bin ) - diff --git a/exsrc/hdf.sh b/exsrc/hdf.sh deleted file mode 100755 index f4a8cbf539..0000000000 --- a/exsrc/hdf.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="HDF" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd HDF* ; env CFLAGS=-DHAVE_NETCDF CXXFLAGS=-DHAVE_NETCDF ./configure --enable-fortran=no --disable-shared --with-jpeg=${prefix}/External/HDF --prefix=${prefix}/Externals/HDF ; make ; make install ; cp -pf ${prefix}/Externals/HDF/bin/* ${prefix}/Externals/bin ) - diff --git a/exsrc/install_script.obsolete b/exsrc/install_script.obsolete deleted file mode 100755 index 1097976094..0000000000 --- a/exsrc/install_script.obsolete +++ /dev/null @@ -1,1154 +0,0 @@ -#!/bin/sh - -if [ -n "$PYTHONPATH" ]; then - echo "PYTHONPATH environment variable should not be set!" - exit 1 -fi - -if [ -n "$PYTHONHOME" ]; then - echo "PYTHONHOME environment variable should not be set!" - exit 1 -fi - -cdmsonly=no - -OS=`uname` -NetCDF=yes -dap=no -hdf=no -freetype=yes -numpy=yes -scipy=yes -ipython=yes -cairo=yes -ffmpeg=yes -blas=yes -lapack=yes -lapack95=yes - -echo $OS -if [ "$OS" = "Linux" ]; then - pbmplus=no - netpbm=no -elif [ "$OS" = "Darwin" ]; then - pbmplus=no - netpbm=yes -elif [ "$OS" = "CYGWIN_NT-5.1" ]; then - pbmplus=no - netpbm=yes -elif [ "$OS" = "CYGWIN_NT-6.0" ]; then - pbmplus=no - netpbm=yes -else - netpbm=no - pbmplus=yes -fi - -s=$1; shift; -Pyfort=yes -Numeric=no -XGKS=yes -Pmw=yes -gplot=no -gifsicle=yes -R=no -VTK=no -ghostscript=no -ioapi=no -ncfortran=no - -while [ "$#" -ne 0 ] -do - # Translate $1 to lowercase - MYOPT=`echo $1 | tr 'A-Z' 'a-z'` - if [ "$MYOPT" = "--help" ]; then - echo " Builds external software required by CDAT." - echo " Packages builds are:" - echo " numpy 1.3.0.1 (on)" - echo " scipy 0.5.2.1 (on)" - echo " ipython 0.8 (off) (includes ipython1 and Twisted 2.5.0)" - echo " freetype 2.3.4 (on)" - echo " cairo 1.4.12 (on)" - echo " ffmpeg (11/4/2007) (on)" - echo " Pyfort 8.5.5 (on)" - echo " jpeg 6b (on)" - echo " libpng 1.2.8 (on)" - echo " Ghostscript 8.50 with jpeg 6b and libpng 1.2.8 (on)" - echo " NetCDF 3.6.1" - echo " NetCDF-Fortran 3.6.1 (off) to build NetCDF with Fortran" - echo " XGKS (on) with plug to freetype fonts" - echo " Numeric 23.1 (on)" - echo " Pmw 1.3 (on)" - echo " gplot (off)" - echo " gifsicle 1.35 (on)" - echo " netpbm 10.27 (on Linux/Mac, off otherwise)" - echo " pbmplus (off Linux/Mac, on otherwise)" - echo " gifmerge (on)" - echo " opendap 3.5: libdap 3.5.3 libnc-dap 3.5.2" - echo " HDF 4.2.r1 (off)" - echo " R 2.5.0 (off)" - echo " ioapi 3.0 (off) will turn off opendap and on NetCDF-Fortran" - echo " gdal 1.4.3 (off) turned on by ioapi" - echo " proj 4.4.9 (off) turned on by ioapi" - echo " Packages can be turned on/off using --enable-PACKAGE --disable-PACKAGE" - echo " You can build a single Package by passing --PACKAGE-only" - echo " If you already built externals before, or do not wish to build them because you think you already have them" - echo " pass: --disable-externals-build" - echo " This will only build python-based externals" - echo " Notes:" - echo " opendap is very unlikely to build on any non standard platform" - - - exit 1 - fi - if [ "$MYOPT" = "--cdms-only" ]; then - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--ioapi-only" ]; then - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - ioapi=yes - ncfortran=yes - NetCDF=no - dap=no - Numeric=no - hdf=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--numeric-only" ]; then - Numeric=yes - dap=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - ioapi=no - hdf=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--opendap-only" ]; then - Numeric=no - dap=yes - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--hdf4-only" ]; then - Numeric=no - dap=no - hdf=yes - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--netcdf-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=yes - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--netcdf-fortran-only" ]; then - Numeric=no - hdf=no - dap=no - NetCDF=no - ncfortran=yes - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--pyfort-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=yes - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--xgks-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=yes - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--pmw-only" ]; then - Numeric=no - dap=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=yes - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--gplot-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=yes - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--gifsicle-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=yes - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--netpbm-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=yes - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--pbmplus-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=yes - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--gifmerge-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=yes - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--r-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - R=yes - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi -# if [ "$MYOPT" = "--vtk-only" ]; then -# Numeric=no -# dap=no -# hdf=no -# NetCDF=no -# Pyfort=no -# XGKS=no -# Pmw=no -# gplot=no -# gifsicle=no -# netpbm=no -# pbmplus=no -# gifmerge=no -# VTK=yes -# ghostscript=no -# freetype=no -# numpy=no -# scipy=no -# ipython=no -# fi - if [ "$MYOPT" = "--ghostscript-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=yes - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--freetype-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=yes - cairo=no - ffmpeg=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--numpy-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=yes - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--scipy-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=yes - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--ipython-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=yes - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--cairo-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=yes - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--ffmpeg-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=yes - numpy=no - scipy=no - ipython=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--blas-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=no - numpy=no - scipy=no - ipython=no - blas=yes - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--lapack-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=no - numpy=no - scipy=no - ipython=no - blas=no - lapack=yes - lapack95=no - fi - if [ "$MYOPT" = "--lapack95-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=no - numpy=no - scipy=no - ipython=no - blas=no - lapack=no - lapack95=yes - fi -# Turn Off Options..... - if [ "$MYOPT" = "--disable-opendap" ]; then - dap=no - if [ $ioapi = no ]; then - NetCDF=yes - fi - echo "Turning opendap Off" - fi - if [ "$MYOPT" = "--disable-hdf4" ]; then - hdf=no - echo "Turning hdf4 Off" - fi - if [ "$MYOPT" = "--disable-ioapi" ]; then - ioapi=no - echo "Turning ioapi Off" - fi - if [ "$MYOPT" = "--disable-ghostscript" ]; then - ghostscript=no - echo "Turning ghostscript Off" - fi - if [ "$MYOPT" = "--disable-pyfort" ]; then - Pyfort=no - echo "Turning Pyfort Off" - fi - if [ "$MYOPT" = "--disable-numeric" ]; then - Numeric=no - echo "Turning Numeric Off" - fi - if [ "$MYOPT" = "--disable-xgks" ]; then - XGKS=no - echo "Turning XGKS Off" - fi - if [ "$MYOPT" = "--disable-pmw" ]; then - Pmw=no - echo "Turning Pmw Off" - fi - if [ "$MYOPT" = "--disable-gplot" ]; then - gplot=no - echo "Turning gplot Off" - fi - if [ "$MYOPT" = "--disable-gifsicle" ]; then - gifsicle=no - echo "Turning gifsicle Off" - fi - if [ "$MYOPT" = "--disable-netpbm" ]; then - netpbm=no - echo "Turning netpbm Off" - fi - if [ "$MYOPT" = "--disable-pbmplus" ]; then - pbmplus=no - echo "Turning pbmplus Off" - fi - if [ "$MYOPT" = "--disable-gifmerge" ]; then - gifmerge=no - echo "Turning gifmerge Off" - fi - if [ "$MYOPT" = "--disable-netcdf" ]; then - NetCDF=no - echo "Turning NetCDF Off" - fi - if [ "$MYOPT" = "--disable-r" ]; then - R=no - echo "Turning R Off" - fi -# if [ "$MYOPT" = "--disable-vtk" ]; then -# VTK=no -# echo "Turning VTK Off" -# fi - if [ "$MYOPT" = "--disable-freetype" ]; then - freetype=no - echo "Turning freetype Off" - fi - if [ "$MYOPT" = "--disable-numpy" ]; then - numpy=no - echo "Turning numpy Off" - fi - if [ "$MYOPT" = "--disable-scipy" ]; then - scipy=no - echo "Turning scipy Off" - fi - if [ "$MYOPT" = "--disable-ipython" ]; then - ipython=no - echo "Turning ipython Off" - fi - if [ "$MYOPT" = "--disable-cairo" ]; then - cairo=no - echo "Turning cairo Off" - fi - if [ "$MYOPT" = "--disable-ffmpeg" ]; then - ffmpeg=no - echo "Turning ffmpeg Off" - fi - if [ "$MYOPT" = "--disable-blas" ]; then - blas=no - echo "Turning blas Off" - fi - if [ "$MYOPT" = "--disable-lapack" ]; then - lapack=no - lapack95=no - echo "Turning lapack and lapack95 Off" - fi - if [ "$MYOPT" = "--disable-lapack95" ]; then - lapack95=no - echo "Turning lapack95 Off" - fi -# Turn On Options..... - if [ "$MYOPT" = "--enable-ioapi" ]; then - ioapi=yes - NetCDF=no - ncfortran=yes - echo "Turning ioapi On" - fi - if [ "$MYOPT" = "--enable-opendap" ]; then - dap=yes - echo "Turning opendap On" - fi - if [ "$MYOPT" = "--enable-pyfort" ]; then - Pyfort=yes - echo "Turning Pyfort On" - fi - if [ "$MYOPT" = "--enable-ghostscript" ]; then - ghostscript=yes - echo "Turning Ghostscript On" - fi - if [ "$MYOPT" = "--enable-numeric" ]; then - Numeric=yes - echo "Turning Numeric On" - fi - if [ "$MYOPT" = "--enable-xgks" ]; then - XGKS=yes - echo "Turning XGKS On" - fi - if [ "$MYOPT" = "--enable-pmw" ]; then - Pmw=yes - echo "Turning Pmw On" - fi - if [ "$MYOPT" = "--enable-gplot" ]; then - gplot=yes - echo "Turning gplot On" - fi - if [ "$MYOPT" = "--enable-gifsicle" ]; then - gifsicle=yes - echo "Turning gifsicle On" - fi - if [ "$MYOPT" = "--enable-netpbm" ]; then - netpbm=yes - echo "Turning netpbm On" - fi - if [ "$MYOPT" = "--enable-pbmplus" ]; then - pbmplus=yes - echo "Turning pbmplus On" - fi - if [ "$MYOPT" = "--enable-gifmerge" ]; then - gifmerge=yes - echo "Turning gifmerge On" - fi - if [ "$MYOPT" = "--enable-netcdf" ]; then - NetCDF=yes - echo "Turning NetCDF On" - fi - if [ "$MYOPT" = "--enable-r" ]; then - R=yes - echo "Turning R On" - fi - if [ "$MYOPT" = "--enable-hdf4" ]; then - hdf=yes - echo "Turning hdf4 On" - fi -# if [ "$MYOPT" = "--enable-vtk" ]; then -# VTK=yes -# echo "Turning VTK On" -# fi - if [ "$MYOPT" = "--enable-freetype" ]; then - freetype=yes - echo "Turning freetype On" - fi - if [ "$MYOPT" = "--enable-numpy" ]; then - numpy=yes - echo "Turning numpy On" - fi - if [ "$MYOPT" = "--enable-scipy" ]; then - scipy=yes - echo "Turning scipy On, do not turn off blas and lapack if they're not on your system" - fi - if [ "$MYOPT" = "--enable-ipython" ]; then - ipython=yes - echo "Turning ipython On" - fi - if [ "$MYOPT" = "--enable-cairo" ]; then - cairo=yes - echo "Turning cairo On" - fi - if [ "$MYOPT" = "--enable-ffmpeg" ]; then - ffmpeg=yes - echo "Turning ffmpeg On" - fi - if [ "$MYOPT" = "--enable-blas" ]; then - blas=yes - echo "Turning blas On" - fi - if [ "$MYOPT" = "--enable-lapack" ]; then - lapack=yes - echo "Turning lapack On" - fi - if [ "$MYOPT" = "--enable-ffmpeg" ]; then - lapack=yes - lapack95=yes - echo "Turning lapack and lapack95 On" - fi - if [ "$MYOPT" = "--disable-externals-build" ]; then - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=no - XGKS=no - dap=no - hdf=no - NetCDF=no - blas=no - lapack=no - lapack95=no - fi - shift -done - -## Make sure we don't build NetCDF if opendap is there... -if [ "$dap" = "yes" ]; then - NetCDF=no -fi - -d=`pwd` -echo "Building external software that CDAT requires." -echo "See $d/build for logs of the build." -echo "Any Package can be NOT build by passing --disable-Package" -echo "If you wish to build 1 Package only, pass --Package-only" -echo "Packages are: netcdf, netcdf-fortran, opendap, pyfort, numeric, xgks, pmw, gplot, gifsicle," -echo " netpbm, pbmplus, gifmerge, r, ghostscript, ioapi, hdf4, freetype, cairo" -echo "Note R is not built by default: Pass --enable-r to build R library (Linux only)." -#echo "Note VTK is not built by default: Pass --enable-vtk to build (linux only)." -echo "opendap MIGHT work on solaris but probably won't, try to build separately" -echo "Warning errors in these builds are expected." - - -#(./prolog.sh $s) || (echo "Unpack of tar files failed."; exit 1) -mkdir -p build -if [ "$Pyfort" = "yes" ]; then - echo "Building Pyfort (Fortran/C interface)" - (./pyfort.sh $s 2>&1 | tee build/pyfort.LOG > ../logs/pyfort.LOG) || (echo "Build of Pyfort failed."; exit 1) -fi -if [ "$ghostscript" = "yes" ]; then - echo "Building Ghostscript" - (./ghostscript.sh $s 2>&1 | tee build/ghostscript.LOG > ../logs/ghostscript.LOG) || (echo "Build of ghostscript failed."; exit 1) -fi -if [ "$ffmpeg" = "yes" ]; then - echo "Building ffmpeg" - (./ffmpeg.sh $s 2>&1 | tee build/ffmpeg.LOG > ../logs/ffmpeg.LOG) || (echo "Build of ffmpeg failed."; exit 1) -fi -if [ "$freetype" = "yes" ]; then - echo "Building Freetype" - (./freetype.sh $s 2>&1 | tee build/freetype.LOG > ../logs/freetype.LOG) || (echo "Build of freetype failed."; exit 1) -fi -if [ "$cairo" = "yes" ]; then - echo "Building necessary libs for cairo" - echo " Building xml" - (./libxml.sh $s 2>&1 | tee build/libxml.LOG > ../logs/libxml.LOG) || (echo "Build of libxml failed."; exit 1) - echo " Building libpixman" - (./libpixman.sh $s 2>&1 | tee build/libpixman.LOG > ../logs/libpixman.LOG) || (echo "Build of libpixman failed."; exit 1) - echo " Building libpng" - (./libpng.sh $s 2>&1 | tee build/libpng.LOG > ../logs/libpng.LOG) || (echo "Build of libpng failed."; exit 1) - echo " Building pkgconfig" - (./pkgconfig.sh $s 2>&1 | tee build/pkgconfig.LOG > ../logs/pkgconfig.LOG) || (echo "Build of pkgconfig failed."; exit 1) - echo " Building fontconfig" - (./fontconfig.sh $s 2>&1 | tee build/fontconfig.LOG > ../logs/fontconfig.LOG) || (echo "Build of fontconfig failed."; exit 1) - echo "Building Cairo" - (./cairo.sh $s 2>&1 | tee build/cairo.LOG > ../logs/cairo.LOG) || (echo "Build of cairo failed."; exit 1) -fi -if [ "$NetCDF" = "yes" ]; then - echo "Building netcdf without Fortran support" - (./netcdf.sh $s 2>&1 | tee build/netcdf.LOG > ../logs/netcdf.LOG ) || (echo "Build of netcdf without fortran failed."; exit 1) -fi -if [ "$hdf" = "yes" ]; then - echo "Building HDF" - echo "... Building JPEG library required by HDF" - (./jpeg.sh $s 2>&1 | tee build/jpeg.LOG > ../logs/jpeg.LOG ) || (echo "Build of jpeg."; exit 1) - echo "... Building HDF4 library" - (./hdf.sh $s 2>&1 | tee build/hdf.LOG > ../logs/hdf.LOG ) || (echo "Build of hdf."; exit 1) -fi -if [ "$ncfortran" = "yes" ]; then - echo "Building netcdf with Fortran support" - (./netcdf_fortran.sh $s 2>&1 | tee build/netcdf.LOG > ../logs/netcdf.LOG ) || (echo "Build of netcdf with fortran failed."; exit 1) -fi -if [ "$blas" = "yes" ]; then - echo "Building blas" - (./blas.sh $s 2>&1 | tee build/blas.LOG > ../logs/blas.LOG ) || (echo "Build of blas failed."; exit 1) -fi -if [ "$lapack" = "yes" ]; then - echo "Building lapack" - (./lapack.sh $s 2>&1 | tee build/lapack.LOG > ../logs/lapack.LOG ) || (echo "Build of lapack failed."; exit 1) -fi -if [ "$lapack95" = "yes" ]; then - echo "Building lapack95" - (./lapack95.sh $s 2>&1 | tee build/lapack95.LOG > ../logs/lapack95.LOG ) || (echo "Build of lapack95 failed."; exit 1) -fi -if [ "$numpy" = "yes" ]; then - if [ "$BLAS" = "" ]; then - BLAS=SETBLAS - export BLAS - fi - if [ "$LAPACK" = "" ]; then - LAPACK=SETLAPACK - export LAPACK - fi - echo "Building numpy" ${BLAS} ${LAPACK} - (./numpy.sh $s 2>&1 | tee build/numpy.LOG > ../logs/numpy.LOG ) || (echo "Build of numpy failed."; exit 1) -fi -if [ "$scipy" = "yes" ]; then - if [ "$BLAS" = "" ]; then - BLAS=SETBLAS - export BLAS - fi - if [ "$LAPACK" = "" ]; then - LAPACK=SETLAPACK - export LAPACK - fi - echo "Building scipy" - (./scipy.sh $s 2>&1 | tee build/scipy.LOG > ../logs/scipy.LOG ) || (echo "Build of scipy failed."; exit 1) -fi -if [ "$ipython" = "yes" ]; then - echo "Building ipython and its dependencies" - echo "... Building setuptools (with zope interface)" - (./setuptools.sh $s 2>&1 | tee build/setuptools.LOG > ../logs/setuptools.LOG ) || (echo "Build of setuptools failed."; exit 1) - echo "... Building Twisted (with zope interface)" - (./twisted.sh $s 2>&1 | tee build/twisted.LOG > ../logs/twisted.LOG ) || (echo "Build of Twisted failed."; exit 1) - echo "... Building ipython1" - (./ipython1.sh $s 2>&1 | tee build/ipython1.LOG > ../logs/ipython1.LOG ) || (echo "Build of ipython1 failed."; exit 1) - echo "... Building ipython" - (./ipython.sh $s 2>&1 | tee build/ipython.LOG > ../logs/ipython.LOG ) || (echo "Build of ipython failed."; exit 1) -fi -if [ "$ioapi" = "yes" ]; then - echo "Building IOAPI and its dependencies" - echo "... Building ioapi" - (./ioapi.sh $s 2>&1 | tee build/ioapi.LOG > ../logs/ioapi.LOG ) || (echo "Build of ioapi failed."; exit 1) - echo "... Building proj" - (./proj.sh $s 2>&1 | tee build/proj.LOG > ../logs/proj.LOG ) || (echo "Build of proj failed."; exit 1) - echo "... Building gdal" - (./gdal.sh $s 2>&1 | tee build/gdal.LOG > ../logs/gdal.LOG ) || (echo "Build of gdal failed."; exit 1) -fi -if [ "$XGKS" = "yes" ]; then - echo "Building xgks header files and fonts. (graphics display)" - (./xgks.sh $s 2>&1 | tee build/xgks.LOG > ../logs/xgks.LOG ) || (echo "Build of xgks failed."; exit 1) -fi -if [ "$Numeric" = "yes" ]; then - echo "Building Numeric (numerical operations, masked arrays, etc...)" - (./Numeric.sh $s 2>&1 | tee build/Numeric.LOG > ../logs/Numeric.LOG) || (echo "Build of Numeric failed."; exit 1) -fi -if [ "$dap" = "yes" ]; then - echo "Building opendap (client side only)" - echo "... Building curl required by opendap" - (./curl.sh $s 2>&1 | tee build/curl.LOG > ../logs/curl.LOG) || (echo "Build of curl failed";exit 1) - echo "... Building libxml required by opendap" - (./libxml.sh $s 2>&1 | tee build/libxml.LOG > ../logs/libxml.LOG) || (echo "Build of libxml failed";exit 1) - echo "... Building libdap required by opendap" - (./libdap.sh $s 2>&1 | tee build/libdap.LOG > ../logs/libdap.LOG) || (echo "Build of libdap failed";exit 1) - echo "... Building ncdap required by opendap, replaces standard netCDF libraries" - (./libnc-dap.sh $s 2>&1 | tee build/libnc-dap.LOG > ../logs/libnc-dap.LOG) || (echo "Build of libncdap failed";exit 1) -fi -if [ "$Pmw" = "yes" ]; then - echo "Building Pmw (Python Mega Widget, to design GUIs)" - (./Pmw.sh $s 2>&1 | tee build/Pmw.LOG > ../logs/Pmw.LOG) || (echo "Build of Pmw failed."; exit 1) -fi -if [ "$gplot" = "yes" ]; then - echo "Building gplot (postscript output)" - (./gplot.sh $s 2>&1 | tee build/gplot.LOG > ../logs/gplot.LOG) || (echo "Build of gplot failed, try manualy."; exit 1) -fi -if [ "$gifsicle" = "yes" ]; then - echo "Building gifsicle (for animated GIF output)" - (./gifsicle.sh $s 2>&1 | tee build/gifsicle.LOG > ../logs/gifsicle.LOG) || (echo "Build of gifsicle failed."; exit 1) -fi -if [ "$netpbm" = "yes" ]; then - echo "Building netpbm (for GIF output)" - (./netpbm.sh $s 2>&1 | tee build/netpbm.LOG > ../logs/netpbm.LOG) || (echo "Build of netpbm failed."; exit 1) -fi -if [ "$pbmplus" = "yes" ]; then - echo "Building pbmplus (for GIF output)" - (./pbmplus.sh $s 2>&1 | tee build/pbmplus.LOG > ../logs/pbmplus.LOG) || (echo "Build of pbmplus failed."; exit 1) -fi -if [ "$gifmerge" = "yes" ]; then - echo "Building gifmerge (for GIF output)" - (./gifmerge.sh $s 2>&1 | tee build/gifmerge.LOG > ../logs/gifmerge.LOG) || (echo "Build of gifmerge failed."; exit 1) -fi -if [ "$R" = "yes" ]; then - echo "Building R statistical library" - (./R.sh $s 2>&1 | tee build/R.LOG > ../logs/R.LOG) || (echo "Build of R failed";exit 1) -fi -#if [ "$VTK" = "yes" ]; then -# echo "Building cmake (required by VTK)" -# (./cmake.sh $s 2>&1 | tee build/cmake.LOG > ../logs/cmake.LOG) || (echo "Build of cmake failed";exit 1) -# echo "Building VTK" -# (./vtk.sh $s 2>&1 | tee build/VTK.LOG > ../logs/VTK.LOG) || (echo "Build of VTK failed";exit 1) -#fi -echo "Done with building the external software." diff --git a/exsrc/ioapi.sh b/exsrc/ioapi.sh deleted file mode 100755 index a2f973a902..0000000000 --- a/exsrc/ioapi.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/sh -BUILD=`pwd` -export BUILD -PACKAGE="ioapi" -. ./prolog.sh -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -(cd ioapi*/ioapi; \ - # build the library - make -f Makefile.nocpl; \ - # go to the object/lib directory - # and run ranlib (only needed for Darwin) - # but doesn't effect the build - cd ../neutral_g77; \ - ranlib libioapi.a; \ - - # copy the library to pyIoapi contrib package - # and the installation directory (prefix) -# echo "Copying IOAPI library to pyIoapi package" ; \ -# cp libioapi.a ../../../../contrib/pyIoapi/Src/lib_external; \ - cp libioapi.a ${prefix}/Externals/lib; -) diff --git a/exsrc/ipython.sh b/exsrc/ipython.sh deleted file mode 100755 index 66166ce8ac..0000000000 --- a/exsrc/ipython.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="ipython" -. ./prolog.sh -# ipython. -(cd ipython-* ; ${prefix}/${version}/bin/python setup.py build ${D} install) diff --git a/exsrc/ipython1.sh b/exsrc/ipython1.sh deleted file mode 100755 index db6b6e84e4..0000000000 --- a/exsrc/ipython1.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -PACKAGE="ipython1" -. ./prolog.sh -# ipython1. -(cd ipython1*; ${prefix}/${version}/bin/python setup.py build ${D} install) - diff --git a/exsrc/jpeg.sh b/exsrc/jpeg.sh deleted file mode 100755 index 206570e3ac..0000000000 --- a/exsrc/jpeg.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="jpeg" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(mkdir ${prefix}/Externals/HDF ; mkdir ${prefix}/Externals/HDF/lib ; mkdir ${prefix}/Externals/HDF/include ; cd jpeg* ; ./configure --prefix=${prefix}/Externals/HDF ; make ; mv libjpeg.a ${prefix}/Externals/HDF/lib ; cp *.h ${prefix}/Externals/HDF/include ) - diff --git a/exsrc/lapack.sh b/exsrc/lapack.sh deleted file mode 100755 index 73df47e3de..0000000000 --- a/exsrc/lapack.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -PACKAGE="lapack-lite" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -if ( test $FC = "gfortran") then - CPPFLAGS="-DpgiFortran"; export CPPFLAGS -fi -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then -( CXX=""; export CXX \ -) -fi - -cd lapack-lite*;\ - # Add f77 support -unset PGI; \ - echo $FC ; \ - env LOADER=${FC} FORTRAN=${FC} BLAS=${prefix}/Externals/libblas.a make; cp liblapack.a libtmglib.a ${prefix}/Externals/lib; \ - - diff --git a/exsrc/lapack95.sh b/exsrc/lapack95.sh deleted file mode 100755 index b4344cd6d0..0000000000 --- a/exsrc/lapack95.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -PACKAGE="lapack95" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -if ( test $FC = "gfortran") then - CPPFLAGS="-DpgiFortran"; export CPPFLAGS -fi -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then -( CXX=""; export CXX \ -) -fi - -cd lapack95*/SRC;\ - # Add f77 support -unset PGI; \ - echo $FC ; \ - env LAPACK_PATH=${prefix}/Externals/lib make; cp ../lapack95.a ${prefix}/Externals/lib/liblapack95.a; cp ../lapack95_modules/* ${prefix}/Externals/include \ - - diff --git a/exsrc/libcf.sh b/exsrc/libcf.sh deleted file mode 100755 index 5e0add5c34..0000000000 --- a/exsrc/libcf.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/sh - -PACKAGE="libcf" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -NC4LOC=`grep NC4LOC ../config.log | sed 's/NC4LOC=//' | sed "s/'//"` -HDF5LOC=`grep HDF5LOC ../config.log | sed 's/HDF5LOC=//' | sed "s/'//"` - -echo "prefix is ${prefix}" -echo "using netcdf at $NC4LOC, using hdf5 at $HDF5LOC" - -(cd libcf*; \ - mkdir ${prefix}/Externals/libcf ; \ - mkdir ${prefix}/Externals/NetCDF ; \ - ./configure --prefix=${prefix}/Externals/NetCDF --with-netcdf=$NC4LOC --with-hdf5=$HDF5LOC --enable-shared; \ - make; make install -) - diff --git a/exsrc/libdap.sh b/exsrc/libdap.sh deleted file mode 100755 index d79e566c8c..0000000000 --- a/exsrc/libdap.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -PACKAGE="libdap" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" - export CXX="g++ -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" - export CXX="g++ -fPIC" -fi -(cd libdap* ; env PATH=${prefix}/Externals/OpenDAP/bin\:${PATH} ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install ) - diff --git a/exsrc/libnc-dap.sh b/exsrc/libnc-dap.sh deleted file mode 100755 index de5bb66fc2..0000000000 --- a/exsrc/libnc-dap.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -PACKAGE="libnc-dap" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" - export CXX="g++ -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" - export CXX="g++ -fPIC" -fi -(cd libnc-dap* ; env PATH=${prefix}/Externals/OpenDAP/bin\:${PATH} ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install ; cp -pf ${prefix}/Externals/OpenDAP/bin/* ${prefix}/Externals/bin ) - diff --git a/exsrc/libpixman.sh b/exsrc/libpixman.sh deleted file mode 100755 index 2b8c09e00a..0000000000 --- a/exsrc/libpixman.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="pixman" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd pixman* ; ./configure --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/libpng.sh b/exsrc/libpng.sh deleted file mode 100755 index 2cb505cc3c..0000000000 --- a/exsrc/libpng.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="libpng" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd libpng* ; ./configure --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/libxml.sh b/exsrc/libxml.sh deleted file mode 100755 index de23dc8cb4..0000000000 --- a/exsrc/libxml.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="libxml" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd libxml2* ; ./configure --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/netcdf.sh b/exsrc/netcdf.sh deleted file mode 100755 index 6222460fd5..0000000000 --- a/exsrc/netcdf.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/sh -PACKAGE="netcdf" -. ./prolog.sh -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then - CXX=""; export CXX -fi - -echo "prefix is"${prefix} -# Define compilation flags for itanium based NEC TX-7 (and gcc) -> ia64 -# Also define compilation flags for SGI Altrix (and gcc) -> ia64 -# Same for AMD Opteron based HP Proliant DL585 -> x86_64 -# export CFLAGS="$CFLAGS -fpic -O" -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CFLAGS="$CFLAGS -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CFLAGS="$CFLAGS -fPIC" -fi - -if (test `uname ` = "CYGWIN_NT-5.1") then -(cd netcdf*; \ - FC=''; export FC; \ - F90='';export F90; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF ; \ - ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -elif (test `uname ` = "CYGWIN_NT-6.0") then -(cd netcdf*; \ - FC=''; export FC; \ - F90='';export F90; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF ; \ - ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -else -(cd netcdf*; \ - FC=''; export FC; \ - F90='';export F90; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF ; \ - ./configure --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -fi diff --git a/exsrc/netcdf_fortran.sh b/exsrc/netcdf_fortran.sh deleted file mode 100755 index bbf4c98865..0000000000 --- a/exsrc/netcdf_fortran.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -PACKAGE="netcdf" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -if ( test $FC = "gfortran") then - CPPFLAGS="-DpgiFortran"; export CPPFLAGS -fi -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then -( CXX=""; export CXX \ -) -fi - -if (test `uname ` = "CYGWIN_NT-5.1") then -(cd netcdf*; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF; \ - ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -elif (test `uname ` = "CYGWIN_NT-6.0") then -(cd netcdf*; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF; \ - ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -else -(cd netcdf*;\ - # Add f77 support - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF; \ - ./configure --prefix=${prefix}/Externals/NetCDF; \ - make; make install; \ -) -fi - diff --git a/exsrc/netpbm.input.conf b/exsrc/netpbm.input.conf deleted file mode 100644 index a7f73f85fe..0000000000 --- a/exsrc/netpbm.input.conf +++ /dev/null @@ -1,19 +0,0 @@ - - - -static - -none -none -none -none - - - - - - - - - - diff --git a/exsrc/netpbm.input.conf.Cygwin b/exsrc/netpbm.input.conf.Cygwin deleted file mode 100644 index 5bd669a10c..0000000000 --- a/exsrc/netpbm.input.conf.Cygwin +++ /dev/null @@ -1,18 +0,0 @@ - -gnu - -static - -none -none -none - - - - - - - - - - diff --git a/exsrc/netpbm.input.conf.Darwin b/exsrc/netpbm.input.conf.Darwin deleted file mode 100644 index 81ee298864..0000000000 --- a/exsrc/netpbm.input.conf.Darwin +++ /dev/null @@ -1,19 +0,0 @@ - - - - -static - -none -none -none -none - - - - - - - - - diff --git a/exsrc/netpbm.input.conf.sun b/exsrc/netpbm.input.conf.sun deleted file mode 100644 index ae45aa38cb..0000000000 --- a/exsrc/netpbm.input.conf.sun +++ /dev/null @@ -1,20 +0,0 @@ - - -cc -sun - - -static -none -none -none - - - - - - - - - - diff --git a/exsrc/netpbm.input.inst b/exsrc/netpbm.input.inst deleted file mode 100644 index c9167ec1e6..0000000000 --- a/exsrc/netpbm.input.inst +++ /dev/null @@ -1,9 +0,0 @@ -INST_PREFIX -CDAT_PREFIX - - - - - -N - diff --git a/exsrc/netpbm.sh b/exsrc/netpbm.sh deleted file mode 100755 index 1e5d071806..0000000000 --- a/exsrc/netpbm.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -PACKAGE="netpbm" -OS=`uname` -if ( test "${OS}" = 'Darwin' ) then - echo "Darwin" ; - CONF_FILE=netpbm.input.conf.Darwin; -elif ( test "${OS}" = 'sunOS' ) then - echo "Sun OS"; - CONF_FILE=netpbm.input.conf.sun; -elif ( test "${OS}" = 'Linux' ) then - echo "GNU Linux"; - CONF_FILE=netpbm.input.conf; -elif ( test "${OS}" = 'CYGWIN_NT-5.1' ) then - echo "GNU Build for Cygwin"; - CONF_FILE=netpbm.input.conf.Cygwin; -elif ( test "${OS}" = 'CYGWIN_NT-6.0' ) then - echo "GNU Build for Cygwin"; - CONF_FILE=netpbm.input.conf.Cygwin; -else - echo "Platform not tested, using GNU conf file"; - echo "If hangs or fails try manually or use pbmplus"; -fi -. ./prolog.sh -( - cd netpbm*; \ - BUILD_DIR=`pwd`;\ - sed -e 's@CDAT_PREFIX@'${prefix}'/Externals@g' \ - -e 's@INST_PREFIX@'${BUILD_DIR}'/TMP@g' \ - ../../netpbm.input.inst > netpbm.input.inst.feed ; \ - ./configure < ../../${CONF_FILE} ; \ - make ; \ - make package pkgdir=${BUILD_DIR}/TMP; \ - ./installnetpbm < netpbm.input.inst.feed ; \ - rm -rf ${BUILD_DIR}/TMP -) diff --git a/exsrc/numpy.sh b/exsrc/numpy.sh deleted file mode 100755 index a1560bcbb3..0000000000 --- a/exsrc/numpy.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/sh -PACKAGE="numpy" -. ./prolog.sh -# Handle x86_64 arch -CDATARCH=`uname -m` -if (test "${CDATARCH}" = "x86_64") then - cd numpy-* - cat >site.cfg <, et al. - * - * This software is licensed as described in the file COPYING, which - * you should have received as part of this distribution. The terms - * are also available at http://curl.haxx.se/docs/copyright.html. - * - * You may opt to use, copy, modify, merge, publish, distribute and/or sell - * copies of the Software, and permit persons to whom the Software is - * furnished to do so, under the terms of the COPYING file. - * - * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY - * KIND, either express or implied. - * - * $Id: multi.c,v 1.2 2007-03-15 19:22:13 andy Exp $ - ***************************************************************************/ - -#include "setup.h" -#include -#include - -#ifdef HAVE_SYS_TYPES_H -#include -#endif -#ifdef HAVE_SYS_SOCKET_H -#include -#endif -#ifdef HAVE_UNISTD_H -#include -#endif - -#include - -#include "urldata.h" -#include "transfer.h" -#include "url.h" -#include "connect.h" -#include "progress.h" -#include "memory.h" -#include "easyif.h" -#include "multiif.h" -#include "sendf.h" -#include "timeval.h" - -/* The last #include file should be: */ -#include "memdebug.h" - -struct Curl_message { - /* the 'CURLMsg' is the part that is visible to the external user */ - struct CURLMsg extmsg; - struct Curl_message *next; -}; - -typedef enum { - CURLM_STATE_INIT, /* start in this state */ - CURLM_STATE_CONNECT, /* resolve/connect has been sent off */ - CURLM_STATE_WAITRESOLVE, /* awaiting the resolve to finalize */ - CURLM_STATE_WAITCONNECT, /* awaiting the connect to finalize */ - CURLM_STATE_PROTOCONNECT, /* completing the protocol-specific connect - phase */ - CURLM_STATE_WAITDO, /* wait for our turn to send the request */ - CURLM_STATE_DO, /* start send off the request (part 1) */ - CURLM_STATE_DOING, /* sending off the request (part 1) */ - CURLM_STATE_DO_MORE, /* send off the request (part 2) */ - CURLM_STATE_DO_DONE, /* done sending off request */ - CURLM_STATE_WAITPERFORM, /* wait for our turn to read the response */ - CURLM_STATE_PERFORM, /* transfer data */ - CURLM_STATE_TOOFAST, /* wait because limit-rate exceeded */ - CURLM_STATE_DONE, /* post data transfer operation */ - CURLM_STATE_COMPLETED, /* operation complete */ - CURLM_STATE_CANCELLED, /* cancelled */ - - CURLM_STATE_LAST /* not a true state, never use this */ -} CURLMstate; - -/* we support N sockets per easy handle. Set the corresponding bit to what - action we should wait for */ -#define MAX_SOCKSPEREASYHANDLE 5 -#define GETSOCK_READABLE (0x00ff) -#define GETSOCK_WRITABLE (0xff00) - -struct closure { - struct closure *next; /* a simple one-way list of structs */ - struct SessionHandle *easy_handle; -}; - -struct Curl_one_easy { - /* first, two fields for the linked list of these */ - struct Curl_one_easy *next; - struct Curl_one_easy *prev; - - struct SessionHandle *easy_handle; /* the easy handle for this unit */ - struct connectdata *easy_conn; /* the "unit's" connection */ - - CURLMstate state; /* the handle's state */ - CURLcode result; /* previous result */ - - struct Curl_message *msg; /* A pointer to one single posted message. - Cleanup should be done on this pointer NOT on - the linked list in Curl_multi. This message - will be deleted when this handle is removed - from the multi-handle */ - int msg_num; /* number of messages left in 'msg' to return */ - - /* Array with the plain socket numbers this handle takes care of, in no - particular order. Note that all sockets are added to the sockhash, where - the state etc are also kept. This array is mostly used to detect when a - socket is to be removed from the hash. See singlesocket(). */ - curl_socket_t sockets[MAX_SOCKSPEREASYHANDLE]; - int numsocks; -}; - -#define CURL_MULTI_HANDLE 0x000bab1e - -#define GOOD_MULTI_HANDLE(x) \ - ((x)&&(((struct Curl_multi *)x)->type == CURL_MULTI_HANDLE)) -#define GOOD_EASY_HANDLE(x) \ - (((struct SessionHandle *)x)->magic == CURLEASY_MAGIC_NUMBER) - -/* This is the struct known as CURLM on the outside */ -struct Curl_multi { - /* First a simple identifier to easier detect if a user mix up - this multi handle with an easy handle. Set this to CURL_MULTI_HANDLE. */ - long type; - - /* We have a linked list with easy handles */ - struct Curl_one_easy easy; - - int num_easy; /* amount of entries in the linked list above. */ - int num_msgs; /* amount of messages in the easy handles */ - int num_alive; /* amount of easy handles that are added but have not yet - reached COMPLETE state */ - - /* callback function and user data pointer for the *socket() API */ - curl_socket_callback socket_cb; - void *socket_userp; - - /* Hostname cache */ - struct curl_hash *hostcache; - - /* timetree points to the splay-tree of time nodes to figure out expire - times of all currently set timers */ - struct Curl_tree *timetree; - - /* 'sockhash' is the lookup hash for socket descriptor => easy handles (note - the pluralis form, there can be more than one easy handle waiting on the - same actual socket) */ - struct curl_hash *sockhash; - - /* Whether pipelining is enabled for this multi handle */ - bool pipelining_enabled; - - /* shared connection cache */ - struct conncache *connc; - - /* list of easy handles kept around for doing nice connection closures */ - struct closure *closure; - - /* timer callback and user data pointer for the *socket() API */ - curl_multi_timer_callback timer_cb; - void *timer_userp; - time_t timer_lastcall; /* the fixed time for the timeout for the previous - callback */ -}; - -static bool multi_conn_using(struct Curl_multi *multi, - struct SessionHandle *data); -static void singlesocket(struct Curl_multi *multi, - struct Curl_one_easy *easy); -static void add_closure(struct Curl_multi *multi, - struct SessionHandle *data); -static int update_timer(struct Curl_multi *multi); - -#ifdef CURLDEBUG -static const char *statename[]={ - "INIT", - "CONNECT", - "WAITRESOLVE", - "WAITCONNECT", - "PROTOCONNECT", - "WAITDO", - "DO", - "DOING", - "DO_MORE", - "DO_DONE", - "WAITPERFORM", - "PERFORM", - "TOOFAST", - "DONE", - "COMPLETED", - "CANCELLED" -}; - -void curl_multi_dump(CURLM *multi_handle); -#endif - -/* always use this function to change state, to make debugging easier */ -static void multistate(struct Curl_one_easy *easy, CURLMstate state) -{ -#ifdef CURLDEBUG - long index = -1; -#endif - CURLMstate oldstate = easy->state; - - if(oldstate == state) - /* don't bother when the new state is the same as the old state */ - return; - - easy->state = state; - -#ifdef CURLDEBUG - if(easy->state > CURLM_STATE_CONNECT && - easy->state < CURLM_STATE_COMPLETED) - index = easy->easy_conn->connectindex; - - infof(easy->easy_handle, - "STATE: %s => %s handle %p; (connection #%ld) \n", - statename[oldstate], statename[easy->state], - (char *)easy, index); -#endif - if(state == CURLM_STATE_COMPLETED) - /* changing to COMPLETED means there's one less easy handle 'alive' */ - easy->easy_handle->multi->num_alive--; -} - -/* - * We add one of these structs to the sockhash for a particular socket - */ - -struct Curl_sh_entry { - struct SessionHandle *easy; - time_t timestamp; - long inuse; - int action; /* what action READ/WRITE this socket waits for */ - curl_socket_t socket; /* mainly to ease debugging */ - void *socketp; /* settable by users with curl_multi_assign() */ -}; -/* bits for 'action' having no bits means this socket is not expecting any - action */ -#define SH_READ 1 -#define SH_WRITE 2 - -/* make sure this socket is present in the hash for this handle */ -static struct Curl_sh_entry *sh_addentry(struct curl_hash *sh, - curl_socket_t s, - struct SessionHandle *data) -{ - struct Curl_sh_entry *there = - Curl_hash_pick(sh, (char *)&s, sizeof(curl_socket_t)); - struct Curl_sh_entry *check; - - if(there) - /* it is present, return fine */ - return there; - - /* not present, add it */ - check = calloc(sizeof(struct Curl_sh_entry), 1); - if(!check) - return NULL; /* major failure */ - check->easy = data; - check->socket = s; - - /* make/add new hash entry */ - if(NULL == Curl_hash_add(sh, (char *)&s, sizeof(curl_socket_t), check)) { - free(check); - return NULL; /* major failure */ - } - - return check; /* things are good in sockhash land */ -} - - -/* delete the given socket + handle from the hash */ -static void sh_delentry(struct curl_hash *sh, curl_socket_t s) -{ - struct Curl_sh_entry *there = - Curl_hash_pick(sh, (char *)&s, sizeof(curl_socket_t)); - - if(there) { - /* this socket is in the hash */ - /* We remove the hash entry. (This'll end up in a call to - sh_freeentry().) */ - Curl_hash_delete(sh, (char *)&s, sizeof(curl_socket_t)); - } -} - -/* - * free a sockhash entry - */ -static void sh_freeentry(void *freethis) -{ - struct Curl_sh_entry *p = (struct Curl_sh_entry *) freethis; - - free(p); -} - -/* - * sh_init() creates a new socket hash and returns the handle for it. - * - * Quote from README.multi_socket: - * - * "Some tests at 7000 and 9000 connections showed that the socket hash lookup - * is somewhat of a bottle neck. Its current implementation may be a bit too - * limiting. It simply has a fixed-size array, and on each entry in the array - * it has a linked list with entries. So the hash only checks which list to - * scan through. The code I had used so for used a list with merely 7 slots - * (as that is what the DNS hash uses) but with 7000 connections that would - * make an average of 1000 nodes in each list to run through. I upped that to - * 97 slots (I believe a prime is suitable) and noticed a significant speed - * increase. I need to reconsider the hash implementation or use a rather - * large default value like this. At 9000 connections I was still below 10us - * per call." - * - */ -static struct curl_hash *sh_init(void) -{ - return Curl_hash_alloc(97, sh_freeentry); -} - -CURLM *curl_multi_init(void) -{ - struct Curl_multi *multi = (void *)calloc(sizeof(struct Curl_multi), 1); - - if(!multi) - return NULL; - - multi->type = CURL_MULTI_HANDLE; - - multi->hostcache = Curl_mk_dnscache(); - if(!multi->hostcache) { - /* failure, free mem and bail out */ - free(multi); - return NULL; - } - - multi->sockhash = sh_init(); - if(!multi->sockhash) { - /* failure, free mem and bail out */ - Curl_hash_destroy(multi->hostcache); - free(multi); - return NULL; - } - - multi->connc = Curl_mk_connc(CONNCACHE_MULTI, -1); - if(!multi->connc) { - Curl_hash_destroy(multi->hostcache); - free(multi); - return NULL; - } - - return (CURLM *) multi; -} - -CURLMcode curl_multi_add_handle(CURLM *multi_handle, - CURL *easy_handle) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - struct closure *cl; - struct closure *prev=NULL; - - /* First, make some basic checks that the CURLM handle is a good handle */ - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - /* Verify that we got a somewhat good easy handle too */ - if(!GOOD_EASY_HANDLE(easy_handle)) - return CURLM_BAD_EASY_HANDLE; - - /* Prevent users to add the same handle more than once! */ - if(((struct SessionHandle *)easy_handle)->multi) - /* possibly we should create a new unique error code for this condition */ - return CURLM_BAD_EASY_HANDLE; - - /* Now, time to add an easy handle to the multi stack */ - easy = (struct Curl_one_easy *)calloc(sizeof(struct Curl_one_easy), 1); - if(!easy) - return CURLM_OUT_OF_MEMORY; - - cl = multi->closure; - while(cl) { - struct closure *next = cl->next; - if(cl->easy_handle == (struct SessionHandle *)easy_handle) { - /* remove this handle from the closure list */ - free(cl); - if(prev) - prev->next = next; - else - multi->closure = next; - break; /* no need to continue since this handle can only be present once - in the list */ - } - cl = next; - } - - /* set the easy handle */ - easy->easy_handle = easy_handle; - multistate(easy, CURLM_STATE_INIT); - - /* for multi interface connections, we share DNS cache automatically if the - easy handle's one is currently private. */ - if (easy->easy_handle->dns.hostcache && - (easy->easy_handle->dns.hostcachetype == HCACHE_PRIVATE)) { - Curl_hash_destroy(easy->easy_handle->dns.hostcache); - easy->easy_handle->dns.hostcache = NULL; - easy->easy_handle->dns.hostcachetype = HCACHE_NONE; - } - - if (!easy->easy_handle->dns.hostcache || - (easy->easy_handle->dns.hostcachetype == HCACHE_NONE)) { - easy->easy_handle->dns.hostcache = multi->hostcache; - easy->easy_handle->dns.hostcachetype = HCACHE_MULTI; - } - - if(easy->easy_handle->state.connc) { - if(easy->easy_handle->state.connc->type == CONNCACHE_PRIVATE) { - /* kill old private version */ - Curl_rm_connc(easy->easy_handle->state.connc); - /* point out our shared one instead */ - easy->easy_handle->state.connc = multi->connc; - } - /* else it is already using multi? */ - } - else - /* point out our shared one */ - easy->easy_handle->state.connc = multi->connc; - - /* Make sure the type is setup correctly */ - easy->easy_handle->state.connc->type = CONNCACHE_MULTI; - - /* We add this new entry first in the list. We make our 'next' point to the - previous next and our 'prev' point back to the 'first' struct */ - easy->next = multi->easy.next; - easy->prev = &multi->easy; - - /* make 'easy' the first node in the chain */ - multi->easy.next = easy; - - /* if there was a next node, make sure its 'prev' pointer links back to - the new node */ - if(easy->next) - easy->next->prev = easy; - - Curl_easy_addmulti(easy_handle, multi_handle); - - /* make the SessionHandle struct refer back to this struct */ - easy->easy_handle->set.one_easy = easy; - - /* increase the node-counter */ - multi->num_easy++; - - if((multi->num_easy * 4) > multi->connc->num) { - /* We want the connection cache to have plenty room. Before we supported - the shared cache every single easy handle had 5 entries in their cache - by default. */ - CURLcode res = Curl_ch_connc(easy_handle, multi->connc, - multi->connc->num*4); - if(res != CURLE_OK) - /* TODO: we need to do some cleaning up here! */ - return CURLM_OUT_OF_MEMORY; - } - - /* increase the alive-counter */ - multi->num_alive++; - - update_timer(multi); - return CURLM_OK; -} - -#if 0 -/* Debug-function, used like this: - * - * Curl_hash_print(multi->sockhash, debug_print_sock_hash); - * - * Enable the hash print function first by editing hash.c - */ -static void debug_print_sock_hash(void *p) -{ - struct Curl_sh_entry *sh = (struct Curl_sh_entry *)p; - - fprintf(stderr, " [easy %p/magic %x/socket %d]", - (void *)sh->easy, sh->easy->magic, sh->socket); -} -#endif - -CURLMcode curl_multi_remove_handle(CURLM *multi_handle, - CURL *curl_handle) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - - /* First, make some basic checks that the CURLM handle is a good handle */ - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - /* Verify that we got a somewhat good easy handle too */ - if(!GOOD_EASY_HANDLE(curl_handle)) - return CURLM_BAD_EASY_HANDLE; - - /* scan through the list and remove the 'curl_handle' */ - easy = multi->easy.next; - while(easy) { - if(easy->easy_handle == (struct SessionHandle *)curl_handle) - break; - easy=easy->next; - } - - if(easy) { - bool premature = (bool)(easy->state != CURLM_STATE_COMPLETED); - - /* If the 'state' is not INIT or COMPLETED, we might need to do something - nice to put the easy_handle in a good known state when this returns. */ - if(premature) - /* this handle is "alive" so we need to count down the total number of - alive connections when this is removed */ - multi->num_alive--; - - if (easy->easy_handle->state.is_in_pipeline && - easy->state > CURLM_STATE_DO) { - /* If the handle is in a pipeline and has finished sending off its - request, we need to remember the fact that we want to remove this - handle but do the actual removal at a later time */ - easy->easy_handle->state.cancelled = TRUE; - return CURLM_OK; - } - - /* The timer must be shut down before easy->multi is set to NULL, - else the timenode will remain in the splay tree after - curl_easy_cleanup is called. */ - Curl_expire(easy->easy_handle, 0); - - if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) { - /* clear out the usage of the shared DNS cache */ - easy->easy_handle->dns.hostcache = NULL; - easy->easy_handle->dns.hostcachetype = HCACHE_NONE; - } - - /* if we have a connection we must call Curl_done() here so that we - don't leave a half-baked one around */ - if(easy->easy_conn) { - /* Set up the association right */ - easy->easy_conn->data = easy->easy_handle; - - /* Curl_done() clears the conn->data field to lose the association - between the easy handle and the connection */ - Curl_done(&easy->easy_conn, easy->result, premature); - - if(easy->easy_conn) - /* the connection is still alive, set back the association to enable - the check below to trigger TRUE */ - easy->easy_conn->data = easy->easy_handle; - } - - /* If this easy_handle was the last one in charge for one or more - connections a the shared connection cache, we might need to keep this - handle around until either A) the connection is closed and killed - properly, or B) another easy_handle uses the connection. - - The reason why we need to have a easy_handle associated with a live - connection is simply that some connections will need a handle to get - closed down properly. Currently, the only connections that need to keep - a easy_handle handle around are using FTP(S). Such connections have - the PROT_CLOSEACTION bit set. - - Thus, we need to check for all connections in the shared cache that - points to this handle and are using PROT_CLOSEACTION. If there's any, - we need to add this handle to the list of "easy handles kept around for - nice connection closures". - */ - if(multi_conn_using(multi, easy->easy_handle)) { - /* There's at least one connection using this handle so we must keep - this handle around. We also keep the connection cache pointer - pointing to the shared one since that will be used on close as - well. */ - easy->easy_handle->state.shared_conn = multi; - - /* this handle is still being used by a shared connection cache and - thus we leave it around for now */ - add_closure(multi, easy->easy_handle); - } - - if(easy->easy_handle->state.connc->type == CONNCACHE_MULTI) { - /* if this was using the shared connection cache we clear the pointer - to that since we're not part of that handle anymore */ - easy->easy_handle->state.connc = NULL; - - /* and modify the connectindex since this handle can't point to the - connection cache anymore */ - if(easy->easy_conn) - easy->easy_conn->connectindex = -1; - } - - /* change state without using multistate(), only to make singlesocket() do - what we want */ - easy->state = CURLM_STATE_COMPLETED; - singlesocket(multi, easy); /* to let the application know what sockets - that vanish with this handle */ - - Curl_easy_addmulti(easy->easy_handle, NULL); /* clear the association - to this multi handle */ - - /* make the previous node point to our next */ - if(easy->prev) - easy->prev->next = easy->next; - /* make our next point to our previous node */ - if(easy->next) - easy->next->prev = easy->prev; - - easy->easy_handle->set.one_easy = NULL; /* detached */ - - /* NOTE NOTE NOTE - We do not touch the easy handle here! */ - if (easy->msg) - free(easy->msg); - free(easy); - - multi->num_easy--; /* one less to care about now */ - - update_timer(multi); - return CURLM_OK; - } - else - return CURLM_BAD_EASY_HANDLE; /* twasn't found */ -} - -bool Curl_multi_canPipeline(struct Curl_multi* multi) -{ - return multi->pipelining_enabled; -} - -static int waitconnect_getsock(struct connectdata *conn, - curl_socket_t *sock, - int numsocks) -{ - if(!numsocks) - return GETSOCK_BLANK; - - sock[0] = conn->sock[FIRSTSOCKET]; - return GETSOCK_WRITESOCK(0); -} - -static int domore_getsock(struct connectdata *conn, - curl_socket_t *sock, - int numsocks) -{ - if(!numsocks) - return GETSOCK_BLANK; - - /* When in DO_MORE state, we could be either waiting for us - to connect to a remote site, or we could wait for that site - to connect to us. It makes a difference in the way: if we - connect to the site we wait for the socket to become writable, if - the site connects to us we wait for it to become readable */ - sock[0] = conn->sock[SECONDARYSOCKET]; - - return GETSOCK_WRITESOCK(0); -} - -/* returns bitmapped flags for this handle and its sockets */ -static int multi_getsock(struct Curl_one_easy *easy, - curl_socket_t *socks, /* points to numsocks number - of sockets */ - int numsocks) -{ - if (easy->easy_handle->state.pipe_broke) { - return 0; - } - - if (easy->state > CURLM_STATE_CONNECT && - easy->state < CURLM_STATE_COMPLETED) { - /* Set up ownership correctly */ - easy->easy_conn->data = easy->easy_handle; - } - - switch(easy->state) { - case CURLM_STATE_TOOFAST: /* returns 0, so will not select. */ - default: - /* this will get called with CURLM_STATE_COMPLETED when a handle is - removed */ - return 0; - - case CURLM_STATE_WAITRESOLVE: - return Curl_resolv_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_PROTOCONNECT: - return Curl_protocol_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_DOING: - return Curl_doing_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_WAITCONNECT: - return waitconnect_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_DO_MORE: - return domore_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_PERFORM: - case CURLM_STATE_WAITPERFORM: - return Curl_single_getsock(easy->easy_conn, socks, numsocks); - } - -} - -CURLMcode curl_multi_fdset(CURLM *multi_handle, - fd_set *read_fd_set, fd_set *write_fd_set, - fd_set *exc_fd_set, int *max_fd) -{ - /* Scan through all the easy handles to get the file descriptors set. - Some easy handles may not have connected to the remote host yet, - and then we must make sure that is done. */ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - int this_max_fd=-1; - curl_socket_t sockbunch[MAX_SOCKSPEREASYHANDLE]; - int bitmap; - int i; - (void)exc_fd_set; /* not used */ - - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - easy=multi->easy.next; - while(easy) { - bitmap = multi_getsock(easy, sockbunch, MAX_SOCKSPEREASYHANDLE); - - for(i=0; i< MAX_SOCKSPEREASYHANDLE; i++) { - curl_socket_t s = CURL_SOCKET_BAD; - - if(bitmap & GETSOCK_READSOCK(i)) { - FD_SET(sockbunch[i], read_fd_set); - s = sockbunch[i]; - } - if(bitmap & GETSOCK_WRITESOCK(i)) { - FD_SET(sockbunch[i], write_fd_set); - s = sockbunch[i]; - } - if(s == CURL_SOCKET_BAD) - /* this socket is unused, break out of loop */ - break; - else { - if((int)s > this_max_fd) - this_max_fd = (int)s; - } - } - - easy = easy->next; /* check next handle */ - } - - *max_fd = this_max_fd; - - return CURLM_OK; -} - -static CURLMcode multi_runsingle(struct Curl_multi *multi, - struct Curl_one_easy *easy) -{ - struct Curl_message *msg = NULL; - bool connected; - bool async; - bool protocol_connect; - bool dophase_done; - bool done; - CURLMcode result = CURLM_OK; - struct Curl_transfer_keeper *k; - - do { - - if(!GOOD_EASY_HANDLE(easy->easy_handle)) - return CURLM_BAD_EASY_HANDLE; - - if (easy->easy_handle->state.pipe_broke) { - infof(easy->easy_handle, "Pipe broke: handle 0x%x, url = %s\n", - easy, easy->easy_handle->reqdata.path); - if(easy->easy_handle->state.is_in_pipeline) { - /* Head back to the CONNECT state */ - multistate(easy, CURLM_STATE_CONNECT); - result = CURLM_CALL_MULTI_PERFORM; - easy->result = CURLE_OK; - } else { - easy->result = CURLE_COULDNT_CONNECT; - multistate(easy, CURLM_STATE_COMPLETED); - } - - easy->easy_handle->state.pipe_broke = FALSE; - easy->easy_conn = NULL; - break; - } - - if (easy->state > CURLM_STATE_CONNECT && - easy->state < CURLM_STATE_COMPLETED) { - /* Make sure we set the connection's current owner */ - easy->easy_conn->data = easy->easy_handle; - } - - if (CURLM_STATE_WAITCONNECT <= easy->state && - easy->state <= CURLM_STATE_DO && - easy->easy_handle->change.url_changed) { - char *gotourl; - Curl_posttransfer(easy->easy_handle); - - easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE); - /* We make sure that the pipe broken flag is reset - because in this case, it isn't an actual break */ - easy->easy_handle->state.pipe_broke = FALSE; - if(CURLE_OK == easy->result) { - gotourl = strdup(easy->easy_handle->change.url); - if(gotourl) { - easy->easy_handle->change.url_changed = FALSE; - easy->result = Curl_follow(easy->easy_handle, gotourl, FALSE); - if(CURLE_OK == easy->result) - multistate(easy, CURLM_STATE_CONNECT); - else - free(gotourl); - } - else { - easy->result = CURLE_OUT_OF_MEMORY; - multistate(easy, CURLM_STATE_COMPLETED); - break; - } - } - } - - easy->easy_handle->change.url_changed = FALSE; - - switch(easy->state) { - case CURLM_STATE_INIT: - /* init this transfer. */ - easy->result=Curl_pretransfer(easy->easy_handle); - - if(CURLE_OK == easy->result) { - /* after init, go CONNECT */ - multistate(easy, CURLM_STATE_CONNECT); - result = CURLM_CALL_MULTI_PERFORM; - - easy->easy_handle->state.used_interface = Curl_if_multi; - } - break; - - case CURLM_STATE_CONNECT: - /* Connect. We get a connection identifier filled in. */ - Curl_pgrsTime(easy->easy_handle, TIMER_STARTSINGLE); - easy->result = Curl_connect(easy->easy_handle, &easy->easy_conn, - &async, &protocol_connect); - - if(CURLE_OK == easy->result) { - /* Add this handle to the send pipeline */ - Curl_addHandleToPipeline(easy->easy_handle, - easy->easy_conn->send_pipe); - - if(async) - /* We're now waiting for an asynchronous name lookup */ - multistate(easy, CURLM_STATE_WAITRESOLVE); - else { - /* after the connect has been sent off, go WAITCONNECT unless the - protocol connect is already done and we can go directly to - WAITDO! */ - result = CURLM_CALL_MULTI_PERFORM; - - if(protocol_connect) { - multistate(easy, CURLM_STATE_WAITDO); - } else { - multistate(easy, CURLM_STATE_WAITCONNECT); - } - } - } - break; - - case CURLM_STATE_WAITRESOLVE: - /* awaiting an asynch name resolve to complete */ - { - struct Curl_dns_entry *dns = NULL; - - /* check if we have the name resolved by now */ - easy->result = Curl_is_resolved(easy->easy_conn, &dns); - - if(dns) { - /* Perform the next step in the connection phase, and then move on - to the WAITCONNECT state */ - easy->result = Curl_async_resolved(easy->easy_conn, - &protocol_connect); - - if(CURLE_OK != easy->result) - /* if Curl_async_resolved() returns failure, the connection struct - is already freed and gone */ - easy->easy_conn = NULL; /* no more connection */ - else { - /* call again please so that we get the next socket setup */ - result = CURLM_CALL_MULTI_PERFORM; - if(protocol_connect) - multistate(easy, CURLM_STATE_DO); - else - multistate(easy, CURLM_STATE_WAITCONNECT); - } - } - - if(CURLE_OK != easy->result) { - /* failure detected */ - Curl_disconnect(easy->easy_conn); /* disconnect properly */ - easy->easy_conn = NULL; /* no more connection */ - break; - } - } - break; - - case CURLM_STATE_WAITCONNECT: - /* awaiting a completion of an asynch connect */ - easy->result = Curl_is_connected(easy->easy_conn, - FIRSTSOCKET, - &connected); - if(connected) - easy->result = Curl_protocol_connect(easy->easy_conn, - &protocol_connect); - - if(CURLE_OK != easy->result) { - /* failure detected */ - Curl_disconnect(easy->easy_conn); /* close the connection */ - easy->easy_conn = NULL; /* no more connection */ - break; - } - - if(connected) { - if(!protocol_connect) { - /* We have a TCP connection, but 'protocol_connect' may be false - and then we continue to 'STATE_PROTOCONNECT'. If protocol - connect is TRUE, we move on to STATE_DO. */ - multistate(easy, CURLM_STATE_PROTOCONNECT); - } - else { - /* after the connect has completed, go WAITDO */ - multistate(easy, CURLM_STATE_WAITDO); - - result = CURLM_CALL_MULTI_PERFORM; - } - } - break; - - case CURLM_STATE_PROTOCONNECT: - /* protocol-specific connect phase */ - easy->result = Curl_protocol_connecting(easy->easy_conn, - &protocol_connect); - if(protocol_connect) { - /* after the connect has completed, go WAITDO */ - multistate(easy, CURLM_STATE_WAITDO); - result = CURLM_CALL_MULTI_PERFORM; - } - else if(easy->result) { - /* failure detected */ - Curl_posttransfer(easy->easy_handle); - Curl_done(&easy->easy_conn, easy->result, FALSE); - Curl_disconnect(easy->easy_conn); /* close the connection */ - easy->easy_conn = NULL; /* no more connection */ - } - break; - - case CURLM_STATE_WAITDO: - /* Wait for our turn to DO when we're pipelining requests */ -#ifdef CURLDEBUG - infof(easy->easy_handle, "Conn %d send pipe %d inuse %d athead %d\n", - easy->easy_conn->connectindex, - easy->easy_conn->send_pipe->size, - easy->easy_conn->writechannel_inuse, - Curl_isHandleAtHead(easy->easy_handle, - easy->easy_conn->send_pipe)); -#endif - if (!easy->easy_conn->writechannel_inuse && - Curl_isHandleAtHead(easy->easy_handle, - easy->easy_conn->send_pipe)) { - /* Grab the channel */ - easy->easy_conn->writechannel_inuse = TRUE; - multistate(easy, CURLM_STATE_DO); - result = CURLM_CALL_MULTI_PERFORM; - } - break; - - case CURLM_STATE_DO: - if(easy->easy_handle->set.connect_only) { - /* keep connection open for application to use the socket */ - easy->easy_conn->bits.close = FALSE; - multistate(easy, CURLM_STATE_DONE); - easy->result = CURLE_OK; - result = CURLM_OK; - } - else { - /* Perform the protocol's DO action */ - easy->result = Curl_do(&easy->easy_conn, - &dophase_done); - - if(CURLE_OK == easy->result) { - - if(!dophase_done) { - /* DO was not completed in one function call, we must continue - DOING... */ - multistate(easy, CURLM_STATE_DOING); - result = CURLM_OK; - } - - /* after DO, go DO_DONE... or DO_MORE */ - else if(easy->easy_conn->bits.do_more) { - /* we're supposed to do more, but we need to sit down, relax - and wait a little while first */ - multistate(easy, CURLM_STATE_DO_MORE); - result = CURLM_OK; - } - else { - /* we're done with the DO, now DO_DONE */ - easy->result = Curl_readwrite_init(easy->easy_conn); - if(CURLE_OK == easy->result) { - multistate(easy, CURLM_STATE_DO_DONE); - result = CURLM_CALL_MULTI_PERFORM; - } - } - } - else { - /* failure detected */ - Curl_posttransfer(easy->easy_handle); - Curl_done(&easy->easy_conn, easy->result, FALSE); - Curl_disconnect(easy->easy_conn); /* close the connection */ - easy->easy_conn = NULL; /* no more connection */ - } - } - break; - - case CURLM_STATE_DOING: - /* we continue DOING until the DO phase is complete */ - easy->result = Curl_protocol_doing(easy->easy_conn, - &dophase_done); - if(CURLE_OK == easy->result) { - if(dophase_done) { - /* after DO, go PERFORM... or DO_MORE */ - if(easy->easy_conn->bits.do_more) { - /* we're supposed to do more, but we need to sit down, relax - and wait a little while first */ - multistate(easy, CURLM_STATE_DO_MORE); - result = CURLM_OK; - } - else { - /* we're done with the DO, now DO_DONE */ - easy->result = Curl_readwrite_init(easy->easy_conn); - if(CURLE_OK == easy->result) { - multistate(easy, CURLM_STATE_DO_DONE); - result = CURLM_CALL_MULTI_PERFORM; - } - } - } /* dophase_done */ - } - else { - /* failure detected */ - Curl_posttransfer(easy->easy_handle); - Curl_done(&easy->easy_conn, easy->result, FALSE); - Curl_disconnect(easy->easy_conn); /* close the connection */ - easy->easy_conn = NULL; /* no more connection */ - } - break; - - case CURLM_STATE_DO_MORE: - /* Ready to do more? */ - easy->result = Curl_is_connected(easy->easy_conn, - SECONDARYSOCKET, - &connected); - if(connected) { - /* - * When we are connected, DO MORE and then go DO_DONE - */ - easy->result = Curl_do_more(easy->easy_conn); - - if(CURLE_OK == easy->result) - easy->result = Curl_readwrite_init(easy->easy_conn); - else - /* Remove ourselves from the send pipeline */ - Curl_removeHandleFromPipeline(easy->easy_handle, - easy->easy_conn->send_pipe); - - if(CURLE_OK == easy->result) { - multistate(easy, CURLM_STATE_DO_DONE); - result = CURLM_CALL_MULTI_PERFORM; - } - } - break; - - case CURLM_STATE_DO_DONE: - /* Remove ourselves from the send pipeline */ - Curl_removeHandleFromPipeline(easy->easy_handle, - easy->easy_conn->send_pipe); - /* Add ourselves to the recv pipeline */ - Curl_addHandleToPipeline(easy->easy_handle, - easy->easy_conn->recv_pipe); - multistate(easy, CURLM_STATE_WAITPERFORM); - result = CURLM_CALL_MULTI_PERFORM; - break; - - case CURLM_STATE_WAITPERFORM: -#ifdef CURLDEBUG - infof(easy->easy_handle, "Conn %d recv pipe %d inuse %d athead %d\n", - easy->easy_conn->connectindex, - easy->easy_conn->recv_pipe->size, - easy->easy_conn->readchannel_inuse, - Curl_isHandleAtHead(easy->easy_handle, - easy->easy_conn->recv_pipe)); -#endif - /* Wait for our turn to PERFORM */ - if (!easy->easy_conn->readchannel_inuse && - Curl_isHandleAtHead(easy->easy_handle, - easy->easy_conn->recv_pipe)) { - /* Grab the channel */ - easy->easy_conn->readchannel_inuse = TRUE; - multistate(easy, CURLM_STATE_PERFORM); - result = CURLM_CALL_MULTI_PERFORM; - } - break; - - case CURLM_STATE_TOOFAST: /* limit-rate exceeded in either direction */ - /* if both rates are within spec, resume transfer */ - Curl_pgrsUpdate(easy->easy_conn); - if ( ( ( easy->easy_handle->set.max_send_speed == 0 ) || - ( easy->easy_handle->progress.ulspeed < - easy->easy_handle->set.max_send_speed ) ) && - ( ( easy->easy_handle->set.max_recv_speed == 0 ) || - ( easy->easy_handle->progress.dlspeed < - easy->easy_handle->set.max_recv_speed ) ) - ) - multistate(easy, CURLM_STATE_PERFORM); - break; - - case CURLM_STATE_PERFORM: - /* check if over speed */ - if ( ( ( easy->easy_handle->set.max_send_speed > 0 ) && - ( easy->easy_handle->progress.ulspeed > - easy->easy_handle->set.max_send_speed ) ) || - ( ( easy->easy_handle->set.max_recv_speed > 0 ) && - ( easy->easy_handle->progress.dlspeed > - easy->easy_handle->set.max_recv_speed ) ) - ) { - /* Transfer is over the speed limit. Change state. TODO: Call - * Curl_expire() with the time left until we're targeted to be below - * the speed limit again. */ - multistate(easy, CURLM_STATE_TOOFAST ); - break; - } - - /* read/write data if it is ready to do so */ - easy->result = Curl_readwrite(easy->easy_conn, &done); - - k = &easy->easy_handle->reqdata.keep; - - if (!(k->keepon & KEEP_READ)) { - /* We're done reading */ - easy->easy_conn->readchannel_inuse = FALSE; - } - - if (!(k->keepon & KEEP_WRITE)) { - /* We're done writing */ - easy->easy_conn->writechannel_inuse = FALSE; - } - - if(easy->result) { - /* The transfer phase returned error, we mark the connection to get - * closed to prevent being re-used. This is becasue we can't - * possibly know if the connection is in a good shape or not now. */ - easy->easy_conn->bits.close = TRUE; - - if(CURL_SOCKET_BAD != easy->easy_conn->sock[SECONDARYSOCKET]) { - /* if we failed anywhere, we must clean up the secondary socket if - it was used */ - sclose(easy->easy_conn->sock[SECONDARYSOCKET]); - easy->easy_conn->sock[SECONDARYSOCKET] = CURL_SOCKET_BAD; - } - Curl_posttransfer(easy->easy_handle); - Curl_done(&easy->easy_conn, easy->result, FALSE); - } - else if(TRUE == done) { - char *newurl; - bool retry = Curl_retry_request(easy->easy_conn, &newurl); - - /* call this even if the readwrite function returned error */ - Curl_posttransfer(easy->easy_handle); - - /* When we follow redirects, must to go back to the CONNECT state */ - if(easy->easy_handle->reqdata.newurl || retry) { - Curl_removeHandleFromPipeline(easy->easy_handle, - easy->easy_conn->recv_pipe); - if(!retry) { - /* if the URL is a follow-location and not just a retried request - then figure out the URL here */ - newurl = easy->easy_handle->reqdata.newurl; - easy->easy_handle->reqdata.newurl = NULL; - } - easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE); - if(easy->result == CURLE_OK) - easy->result = Curl_follow(easy->easy_handle, newurl, retry); - if(CURLE_OK == easy->result) { - multistate(easy, CURLM_STATE_CONNECT); - result = CURLM_CALL_MULTI_PERFORM; - } - else - /* Since we "took it", we are in charge of freeing this on - failure */ - free(newurl); - } - else { - /* after the transfer is done, go DONE */ - multistate(easy, CURLM_STATE_DONE); - result = CURLM_CALL_MULTI_PERFORM; - } - } - - break; - - case CURLM_STATE_DONE: - /* Remove ourselves from the receive pipeline */ - Curl_removeHandleFromPipeline(easy->easy_handle, - easy->easy_conn->recv_pipe); - easy->easy_handle->state.is_in_pipeline = FALSE; - - if (easy->easy_conn->bits.stream_was_rewound) { - /* This request read past its response boundary so we quickly - let the other requests consume those bytes since there is no - guarantee that the socket will become active again */ - result = CURLM_CALL_MULTI_PERFORM; - } - - if (!easy->easy_handle->state.cancelled) { - /* post-transfer command */ - easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE); - - /* after we have DONE what we're supposed to do, go COMPLETED, and - it doesn't matter what the Curl_done() returned! */ - multistate(easy, CURLM_STATE_COMPLETED); - } - - break; - - case CURLM_STATE_COMPLETED: - if (easy->easy_handle->state.cancelled) - /* Go into the CANCELLED state if we were cancelled */ - multistate(easy, CURLM_STATE_CANCELLED); - - /* this is a completed transfer, it is likely to still be connected */ - - /* This node should be delinked from the list now and we should post - an information message that we are complete. */ - break; - - case CURLM_STATE_CANCELLED: - /* Cancelled transfer, wait to be cleaned up */ - break; - - default: - return CURLM_INTERNAL_ERROR; - } - - if(CURLM_STATE_COMPLETED != easy->state) { - if(CURLE_OK != easy->result) { - /* - * If an error was returned, and we aren't in completed state now, - * then we go to completed and consider this transfer aborted. - */ - easy->easy_handle->state.is_in_pipeline = FALSE; - easy->easy_handle->state.pipe_broke = FALSE; - - if(easy->easy_conn) { - /* if this has a connection, unsubscribe from the pipelines */ - easy->easy_conn->writechannel_inuse = FALSE; - easy->easy_conn->readchannel_inuse = FALSE; - } - multistate(easy, CURLM_STATE_COMPLETED); - } - } - - } while (easy->easy_handle->change.url_changed); - - if ((CURLM_STATE_COMPLETED == easy->state) && !easy->msg) { - if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) { - /* clear out the usage of the shared DNS cache */ - easy->easy_handle->dns.hostcache = NULL; - easy->easy_handle->dns.hostcachetype = HCACHE_NONE; - } - - /* now add a node to the Curl_message linked list with this info */ - msg = (struct Curl_message *)malloc(sizeof(struct Curl_message)); - - if(!msg) - return CURLM_OUT_OF_MEMORY; - - msg->extmsg.msg = CURLMSG_DONE; - msg->extmsg.easy_handle = easy->easy_handle; - msg->extmsg.data.result = easy->result; - msg->next = NULL; - - easy->msg = msg; - easy->msg_num = 1; /* there is one unread message here */ - - multi->num_msgs++; /* increase message counter */ - } - - return result; -} - - -CURLMcode curl_multi_perform(CURLM *multi_handle, int *running_handles) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - CURLMcode returncode=CURLM_OK; - struct Curl_tree *t; - - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - easy=multi->easy.next; - while(easy) { - CURLMcode result; - - if (easy->easy_handle->state.cancelled && - easy->state == CURLM_STATE_CANCELLED) { - /* Remove cancelled handles once it's safe to do so */ - Curl_multi_rmeasy(multi_handle, easy->easy_handle); - easy->easy_handle = NULL; - easy = easy->next; - continue; - } - - result = multi_runsingle(multi, easy); - if(result) - returncode = result; - - easy = easy->next; /* operate on next handle */ - } - - /* - * Simply remove all expired timers from the splay since handles are dealt - * with unconditionally by this function and curl_multi_timeout() requires - * that already passed/handled expire times are removed from the splay. - */ - do { - struct timeval now = Curl_tvnow(); - int key = now.tv_sec; /* drop the usec part */ - - multi->timetree = Curl_splaygetbest(key, multi->timetree, &t); - if (t) { - struct SessionHandle *d = t->payload; - struct timeval* tv = &d->state.expiretime; - - /* clear the expire times within the handles that we remove from the - splay tree */ - tv->tv_sec = 0; - tv->tv_usec = 0; - } - - } while(t); - - *running_handles = multi->num_alive; - - if ( CURLM_OK == returncode ) - update_timer(multi); - return returncode; -} - -/* This is called when an easy handle is cleanup'ed that is part of a multi - handle */ -void Curl_multi_rmeasy(void *multi_handle, CURL *easy_handle) -{ - curl_multi_remove_handle(multi_handle, easy_handle); -} - - -CURLMcode curl_multi_cleanup(CURLM *multi_handle) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - struct Curl_one_easy *nexteasy; - int i; - struct closure *cl; - struct closure *n; - - if(GOOD_MULTI_HANDLE(multi)) { - multi->type = 0; /* not good anymore */ - Curl_hash_destroy(multi->hostcache); - Curl_hash_destroy(multi->sockhash); - - /* go over all connections that have close actions */ - for(i=0; i< multi->connc->num; i++) { - if(multi->connc->connects[i] && - multi->connc->connects[i]->protocol & PROT_CLOSEACTION) { - Curl_disconnect(multi->connc->connects[i]); - multi->connc->connects[i] = NULL; - } - } - /* now walk through the list of handles we kept around only to be - able to close connections "properly" */ - cl = multi->closure; - while(cl) { - cl->easy_handle->state.shared_conn = NULL; /* no more shared */ - if(cl->easy_handle->state.closed) - /* close handle only if curl_easy_cleanup() already has been called - for this easy handle */ - Curl_close(cl->easy_handle); - n = cl->next; - free(cl); - cl= n; - } - - Curl_rm_connc(multi->connc); - - /* remove all easy handles */ - easy = multi->easy.next; - while(easy) { - nexteasy=easy->next; - if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) { - /* clear out the usage of the shared DNS cache */ - easy->easy_handle->dns.hostcache = NULL; - easy->easy_handle->dns.hostcachetype = HCACHE_NONE; - } - - /* Clear the pointer to the connection cache */ - easy->easy_handle->state.connc = NULL; - - Curl_easy_addmulti(easy->easy_handle, NULL); /* clear the association */ - - if (easy->msg) - free(easy->msg); - free(easy); - easy = nexteasy; - } - - free(multi); - - return CURLM_OK; - } - else - return CURLM_BAD_HANDLE; -} - -CURLMsg *curl_multi_info_read(CURLM *multi_handle, int *msgs_in_queue) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - - *msgs_in_queue = 0; /* default to none */ - - if(GOOD_MULTI_HANDLE(multi)) { - struct Curl_one_easy *easy; - - if(!multi->num_msgs) - return NULL; /* no messages left to return */ - - easy=multi->easy.next; - while(easy) { - if(easy->msg_num) { - easy->msg_num--; - break; - } - easy = easy->next; - } - if(!easy) - return NULL; /* this means internal count confusion really */ - - multi->num_msgs--; - *msgs_in_queue = multi->num_msgs; - - return &easy->msg->extmsg; - } - else - return NULL; -} - -/* - * singlesocket() checks what sockets we deal with and their "action state" - * and if we have a different state in any of those sockets from last time we - * call the callback accordingly. - */ -static void singlesocket(struct Curl_multi *multi, - struct Curl_one_easy *easy) -{ - curl_socket_t socks[MAX_SOCKSPEREASYHANDLE]; - int i; - struct Curl_sh_entry *entry; - curl_socket_t s; - int num; - unsigned int curraction; - - memset(&socks, 0, sizeof(socks)); - for(i=0; i< MAX_SOCKSPEREASYHANDLE; i++) - socks[i] = CURL_SOCKET_BAD; - - /* Fill in the 'current' struct with the state as it is now: what sockets to - supervise and for what actions */ - curraction = multi_getsock(easy, socks, MAX_SOCKSPEREASYHANDLE); - - /* We have 0 .. N sockets already and we get to know about the 0 .. M - sockets we should have from now on. Detect the differences, remove no - longer supervised ones and add new ones */ - - /* walk over the sockets we got right now */ - for(i=0; (i< MAX_SOCKSPEREASYHANDLE) && - (curraction & (GETSOCK_READSOCK(i) | GETSOCK_WRITESOCK(i))); - i++) { - int action = CURL_POLL_NONE; - - s = socks[i]; - - /* get it from the hash */ - entry = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s)); - - if(curraction & GETSOCK_READSOCK(i)) - action |= CURL_POLL_IN; - if(curraction & GETSOCK_WRITESOCK(i)) - action |= CURL_POLL_OUT; - - if(entry) { - /* yeps, already present so check if it has the same action set */ - if(entry->action == action) - /* same, continue */ - continue; - } - else { - /* this is a socket we didn't have before, add it! */ - entry = sh_addentry(multi->sockhash, s, easy->easy_handle); - if(!entry) - /* fatal */ - return; - } - - multi->socket_cb(easy->easy_handle, - s, - action, - multi->socket_userp, - entry ? entry->socketp : NULL); - - entry->action = action; /* store the current action state */ - } - - num = i; /* number of sockets */ - - /* when we've walked over all the sockets we should have right now, we must - make sure to detect sockets that are removed */ - for(i=0; i< easy->numsocks; i++) { - int j; - s = easy->sockets[i]; - for(j=0; jsockhash, (char *)&s, sizeof(s)); - if(entry) { - /* just a precaution, this socket really SHOULD be in the hash already - but in case it isn't, we don't have to tell the app to remove it - either since it never got to know about it */ - multi->socket_cb(easy->easy_handle, - s, - CURL_POLL_REMOVE, - multi->socket_userp, - entry ? entry->socketp : NULL); - - sh_delentry(multi->sockhash, s); - } - } - } - - memcpy(easy->sockets, socks, num*sizeof(curl_socket_t)); - easy->numsocks = num; -} - -static CURLMcode multi_socket(struct Curl_multi *multi, - bool checkall, - curl_socket_t s, - int *running_handles) -{ - CURLMcode result = CURLM_OK; - struct SessionHandle *data = NULL; - struct Curl_tree *t; - - if(checkall) { - struct Curl_one_easy *easyp; - /* *perform() deals with running_handles on its own */ - result = curl_multi_perform(multi, running_handles); - - /* walk through each easy handle and do the socket state change magic - and callbacks */ - easyp=multi->easy.next; - while(easyp) { - singlesocket(multi, easyp); - easyp = easyp->next; - } - - /* or should we fall-through and do the timer-based stuff? */ - return result; - } - else if (s != CURL_SOCKET_TIMEOUT) { - - struct Curl_sh_entry *entry = - Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s)); - - if(!entry) - /* unmatched socket, major problemo! */ - return CURLM_BAD_SOCKET; /* better return code? */ - - data = entry->easy; - - if(data->magic != CURLEASY_MAGIC_NUMBER) - /* bad bad bad bad bad bad bad */ - return CURLM_INTERNAL_ERROR; - - result = multi_runsingle(multi, data->set.one_easy); - - if(result == CURLM_OK) - /* get the socket(s) and check if the state has been changed since - last */ - singlesocket(multi, data->set.one_easy); - - /* Now we fall-through and do the timer-based stuff, since we don't want - to force the user to have to deal with timeouts as long as at least one - connection in fact has traffic. */ - - data = NULL; /* set data to NULL again to avoid calling multi_runsingle() - in case there's no need to */ - } - - /* - * The loop following here will go on as long as there are expire-times left - * to process in the splay and 'data' will be re-assigned for every expired - * handle we deal with. - */ - do { - int key; - struct timeval now; - - /* the first loop lap 'data' can be NULL */ - if(data) { - result = multi_runsingle(multi, data->set.one_easy); - - if(result == CURLM_OK) - /* get the socket(s) and check if the state has been changed since - last */ - singlesocket(multi, data->set.one_easy); - } - - /* Check if there's one (more) expired timer to deal with! This function - extracts a matching node if there is one */ - - now = Curl_tvnow(); - key = now.tv_sec; /* drop the usec part */ - - multi->timetree = Curl_splaygetbest(key, multi->timetree, &t); - if(t) { - /* assign 'data' to be the easy handle we just removed from the splay - tree */ - data = t->payload; - /* clear the expire time within the handle we removed from the - splay tree */ - data->state.expiretime.tv_sec = 0; - data->state.expiretime.tv_usec = 0; - } - - } while(t); - - *running_handles = multi->num_alive; - return result; -} - -CURLMcode curl_multi_setopt(CURLM *multi_handle, - CURLMoption option, ...) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - CURLMcode res = CURLM_OK; - va_list param; - - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - va_start(param, option); - - switch(option) { - case CURLMOPT_SOCKETFUNCTION: - multi->socket_cb = va_arg(param, curl_socket_callback); - break; - case CURLMOPT_SOCKETDATA: - multi->socket_userp = va_arg(param, void *); - break; - case CURLMOPT_PIPELINING: - multi->pipelining_enabled = (bool)(0 != va_arg(param, long)); - break; - case CURLMOPT_TIMERFUNCTION: - multi->timer_cb = va_arg(param, curl_multi_timer_callback); - break; - case CURLMOPT_TIMERDATA: - multi->timer_userp = va_arg(param, void *); - break; - default: - res = CURLM_UNKNOWN_OPTION; - break; - } - va_end(param); - return res; -} - - -CURLMcode curl_multi_socket_all(CURLM *multi_handle, int *running_handles) - -{ - CURLMcode result = multi_socket((struct Curl_multi *)multi_handle, - TRUE, CURL_SOCKET_BAD, running_handles); - if (CURLM_OK == result) - update_timer((struct Curl_multi *)multi_handle); - return result; -} - -static CURLMcode multi_timeout(struct Curl_multi *multi, - long *timeout_ms) -{ - if(multi->timetree) { - /* we have a tree of expire times */ - struct timeval now = Curl_tvnow(); - - /* splay the lowest to the bottom */ - multi->timetree = Curl_splay(0, multi->timetree); - - /* At least currently, the splay key is a time_t for the expire time */ - *timeout_ms = (multi->timetree->key - now.tv_sec) * 1000 - - now.tv_usec/1000; - if(*timeout_ms < 0) - /* 0 means immediately */ - *timeout_ms = 0; - } - else - *timeout_ms = -1; - - return CURLM_OK; -} - -CURLMcode curl_multi_timeout(CURLM *multi_handle, - long *timeout_ms) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - - /* First, make some basic checks that the CURLM handle is a good handle */ - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - return multi_timeout(multi, timeout_ms); -} - -/* - * Tell the application it should update its timers, if it subscribes to the - * update timer callback. - */ -static int update_timer(struct Curl_multi *multi) -{ - long timeout_ms; - if (!multi->timer_cb) - return 0; - if ( multi_timeout(multi, &timeout_ms) != CURLM_OK ) - return -1; - if ( timeout_ms < 0 ) - return 0; - - /* When multi_timeout() is done, multi->timetree points to the node with the - * timeout we got the (relative) time-out time for. We can thus easily check - * if this is the same (fixed) time as we got in a previous call and then - * avoid calling the callback again. */ - if(multi->timetree->key == multi->timer_lastcall) - return 0; - - multi->timer_lastcall = multi->timetree->key; - - return multi->timer_cb((CURLM*)multi, timeout_ms, multi->timer_userp); -} - -/* given a number of milliseconds from now to use to set the 'act before - this'-time for the transfer, to be extracted by curl_multi_timeout() */ -void Curl_expire(struct SessionHandle *data, long milli) -{ - struct Curl_multi *multi = data->multi; - struct timeval *nowp = &data->state.expiretime; - int rc; - - /* this is only interesting for multi-interface using libcurl, and only - while there is still a multi interface struct remaining! */ - if(!multi) - return; - - if(!milli) { - /* No timeout, clear the time data. */ - if(nowp->tv_sec) { - /* Since this is an cleared time, we must remove the previous entry from - the splay tree */ - rc = Curl_splayremovebyaddr(multi->timetree, - &data->state.timenode, - &multi->timetree); - if(rc) - infof(data, "Internal error clearing splay node = %d\n", rc); - infof(data, "Expire cleared\n"); - nowp->tv_sec = 0; - nowp->tv_usec = 0; - } - } - else { - struct timeval set; - int rest; - - set = Curl_tvnow(); - set.tv_sec += milli/1000; - set.tv_usec += (milli%1000)*1000; - - rest = (int)(set.tv_usec - 1000000); - if(rest > 0) { - /* bigger than a full microsec */ - set.tv_sec++; - set.tv_usec -= 1000000; - } - - if(nowp->tv_sec) { - /* This means that the struct is added as a node in the splay tree. - Compare if the new time is earlier, and only remove-old/add-new if it - is. */ - long diff = curlx_tvdiff(set, *nowp); - if(diff > 0) - /* the new expire time was later so we don't change this */ - return; - - /* Since this is an updated time, we must remove the previous entry from - the splay tree first and then re-add the new value */ - rc = Curl_splayremovebyaddr(multi->timetree, - &data->state.timenode, - &multi->timetree); - if(rc) - infof(data, "Internal error removing splay node = %d\n", rc); - } - - *nowp = set; -#if 0 - infof(data, "Expire at %ld / %ld (%ldms)\n", - (long)nowp->tv_sec, (long)nowp->tv_usec, milli); -#endif - data->state.timenode.payload = data; - multi->timetree = Curl_splayinsert((int)nowp->tv_sec, - multi->timetree, - &data->state.timenode); - } -#if 0 - Curl_splayprint(multi->timetree, 0, TRUE); -#endif -} - -CURLMcode curl_multi_assign(CURLM *multi_handle, - curl_socket_t s, void *hashp) -{ - struct Curl_sh_entry *there = NULL; - struct Curl_multi *multi = (struct Curl_multi *)multi_handle; - - if(s != CURL_SOCKET_BAD) - there = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(curl_socket_t)); - - if(!there) - return CURLM_BAD_SOCKET; - - there->socketp = hashp; - - return CURLM_OK; -} - -static bool multi_conn_using(struct Curl_multi *multi, - struct SessionHandle *data) -{ - /* any live CLOSEACTION-connections pointing to the give 'data' ? */ - int i; - - for(i=0; i< multi->connc->num; i++) { - if(multi->connc->connects[i] && - (multi->connc->connects[i]->data == data) && - multi->connc->connects[i]->protocol & PROT_CLOSEACTION) - return TRUE; - } - - return FALSE; -} - -/* Add the given data pointer to the list of 'closure handles' that are kept - around only to be able to close some connections nicely - just make sure - that this handle isn't already added, like for the cases when an easy - handle is removed, added and removed again... */ -static void add_closure(struct Curl_multi *multi, - struct SessionHandle *data) -{ - int i; - struct closure *cl = (struct closure *)calloc(sizeof(struct closure), 1); - struct closure *p=NULL; - struct closure *n; - if(cl) { - cl->easy_handle = data; - cl->next = multi->closure; - multi->closure = cl; - } - - p = multi->closure; - cl = p->next; /* start immediately on the second since the first is the one - we just added and it is _very_ likely to actually exist - used in the cache since that's the whole purpose of adding - it to this list! */ - - /* When adding, scan through all the other currently kept handles and see if - there are any connections still referring to them and kill them if not. */ - while(cl) { - bool inuse = FALSE; - for(i=0; i< multi->connc->num; i++) { - if(multi->connc->connects[i] && - (multi->connc->connects[i]->data == cl->easy_handle)) { - inuse = TRUE; - break; - } - } - - n = cl->next; - - if(!inuse) { - /* cl->easy_handle is now killable */ - infof(data, "Delayed kill of easy handle %p\n", cl->easy_handle); - /* unmark it as not having a connection around that uses it anymore */ - cl->easy_handle->state.shared_conn= NULL; - Curl_close(cl->easy_handle); - if(p) - p->next = n; - else - multi->closure = n; - free(cl); - } - else - p = cl; - - cl = n; - } - -} - -#ifdef CURLDEBUG -void curl_multi_dump(CURLM *multi_handle) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - int i; - fprintf(stderr, "* Multi status: %d handles, %d alive\n", - multi->num_easy, multi->num_alive); - for(easy=multi->easy.next; easy; easy = easy->next) { - if(easy->state != CURLM_STATE_COMPLETED) { - /* only display handles that are not completed */ - fprintf(stderr, "handle %p, state %s, %d sockets\n", - (void *)easy->easy_handle, - statename[easy->state], easy->numsocks); - for(i=0; i < easy->numsocks; i++) { - curl_socket_t s = easy->sockets[i]; - struct Curl_sh_entry *entry = - Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s)); - - fprintf(stderr, "%d ", (int)s); - if(!entry) { - fprintf(stderr, "INTERNAL CONFUSION\n"); - continue; - } - fprintf(stderr, "[%s %s] ", - entry->action&CURL_POLL_IN?"RECVING":"", - entry->action&CURL_POLL_OUT?"SENDING":""); - } - if(easy->numsocks) - fprintf(stderr, "\n"); - } - } -} -#endif diff --git a/exsrc/src/h5diff_correct_ansi.c b/exsrc/src/h5diff_correct_ansi.c deleted file mode 100644 index a15e3ff278..0000000000 --- a/exsrc/src/h5diff_correct_ansi.c +++ /dev/null @@ -1,2222 +0,0 @@ -/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * - * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * - * All rights reserved. * - * * - * This file is part of HDF5. The full HDF5 copyright notice, including * - * terms governing use, modification, and redistribution, is contained in * - * the files COPYING and Copyright.html. COPYING can be found at the root * - * of the source code distribution tree; Copyright.html can be found at the * - * root level of an installed copy of the electronic HDF5 document set and * - * is linked from the top-level documents page. It can also be found at * - * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * - * access to either file, you may request a copy from help@hdfgroup.org. * - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ - -#include - -#include "H5private.h" -#include "h5tools.h" -#include "h5tools_utils.h" -#include "h5diff.h" -#include "ph5diff.h" - -/* - * Debug printf macros. The prefix allows output filtering by test scripts. - */ -#ifdef H5DIFF_DEBUG -#define h5diffdebug(x) fprintf(stderr, "h5diff debug: " x) -#define h5diffdebug2(x1, x2) fprintf(stderr, "h5diff debug: " x1, x2) -#define h5diffdebug3(x1, x2, x3) fprintf(stderr, "h5diff debug: " x1, x2, x3) -#define h5diffdebug4(x1, x2, x3, x4) fprintf(stderr, "h5diff debug: " x1, x2, x3, x4) -#define h5diffdebug5(x1, x2, x3, x4, x5) fprintf(stderr, "h5diff debug: " x1, x2, x3, x4, x5) -#else -#define h5diffdebug(x) -#define h5diffdebug2(x1, x2) -#define h5diffdebug3(x1, x2, x3) -#define h5diffdebug4(x1, x2, x3, x4) -#define h5diffdebug5(x1, x2, x3, x4, x5) -#endif - - -/*------------------------------------------------------------------------- - * Function: print_objname - * - * Purpose: check if object name is to be printed, only when: - * 1) verbose mode - * 2) when diff was found (normal mode) - *------------------------------------------------------------------------- - */ -int print_objname (diff_opt_t * options, hsize_t nfound) -{ - return ((options->m_verbose || nfound) && !options->m_quiet) ? 1 : 0; -} - -/*------------------------------------------------------------------------- - * Function: do_print_objname - * - * Purpose: print object name - * - *------------------------------------------------------------------------- - */ -void do_print_objname (const char *OBJ, const char *path1, const char *path2, diff_opt_t * opts) -{ - /* if verbose level is higher than 0, put space line before - * displaying any object or symbolic links. This improves - * readability of the output. - */ - if (opts->m_verbose_level >= 1) - parallel_print("\n"); - parallel_print("%-7s: <%s> and <%s>\n", OBJ, path1, path2); -} - -/*------------------------------------------------------------------------- - * Function: do_print_attrname - * - * Purpose: print attribute name - * - *------------------------------------------------------------------------- - */ -void -do_print_attrname (const char *attr, const char *path1, const char *path2) -{ - parallel_print("%-7s: <%s> and <%s>\n", attr, path1, path2); -} - -/*------------------------------------------------------------------------- - * Function: print_warn - * - * Purpose: check print warning condition. - * Return: - * 1 if verbose mode - * 0 if not verbos mode - * Programmer: Jonathan Kim - * Date: Feb 4, 2010 - *------------------------------------------------------------------------- - */ -static int print_warn(diff_opt_t *options) -{ - return ((options->m_verbose))?1:0; -} - - -#ifdef H5_HAVE_PARALLEL -/*------------------------------------------------------------------------- - * Function: phdiff_dismiss_workers - * - * Purpose: tell all workers to end. - * - * Return: none - * - * Programmer: Albert Cheng - * - * Date: Feb 6, 2005 - * - *------------------------------------------------------------------------- - */ -void phdiff_dismiss_workers(void) -{ - int i; - for(i=1; i0) && g_Parallel) - { - printf("%s", outBuff); - - if(overflow_file) - { - int tmp; - rewind(overflow_file); - while((tmp = getc(overflow_file)) >= 0) - putchar(tmp); - fclose(overflow_file); - overflow_file = NULL; - } - - fflush(stdout); - memset(outBuff, 0, OUTBUFF_SIZE); - outBuffOffset = 0; - } - else if( (outBuffOffset>0) && !g_Parallel) - { - fprintf(stderr, "h5diff error: outBuffOffset>0, but we're not in parallel!\n"); - } -} - -/*------------------------------------------------------------------------- - * Function: print_incoming_data - * - * Purpose: special function that prints any output that has been sent to the manager - * and is currently sitting in the incoming message queue - * - * Return: none - * - * Programmer: Leon Arber - * - * Date: March 7, 2005 - * - *------------------------------------------------------------------------- - */ - -static void print_incoming_data(void) -{ - char data[PRINT_DATA_MAX_SIZE+1]; - int incomingMessage; - MPI_Status Status; - - do - { - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &incomingMessage, &Status); - if(incomingMessage) - { - memset(data, 0, PRINT_DATA_MAX_SIZE+1); - MPI_Recv(data, PRINT_DATA_MAX_SIZE, MPI_CHAR, Status.MPI_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &Status); - - printf("%s", data); - } - } while(incomingMessage); -} -#endif - -/*------------------------------------------------------------------------- - * Function: is_valid_options - * - * Purpose: check if options are valid - * - * Return: - * 1 : Valid - * 0 : Not valid - * - * Programmer: Jonathan Kim - * - * Date: Feb 17, 2010 - * - *------------------------------------------------------------------------*/ -static int is_valid_options(diff_opt_t *options) -{ - int ret=1; /* init to valid */ - - /*----------------------------------------------- - * no -q(quiet) with -v (verbose) or -r (report) */ - if(options->m_quiet && (options->m_verbose || options->m_report)) - { - parallel_print("Error: -q (quiet mode) cannot be added to verbose or report modes\n"); - options->err_stat=1; - ret = 0; - goto out; - } - - /* ------------------------------------------------------- - * only allow --no-dangling-links along with --follow-symlinks */ - if(options->no_dangle_links && !options->follow_links) - { - parallel_print("Error: --no-dangling-links must be used along with --follow-symlinks option.\n"); - options->err_stat=1; - ret = 0; - goto out; - } - -out: - - return ret; -} - -/*------------------------------------------------------------------------- - * Function: is_exclude_path - * - * Purpose: check if 'paths' are part of exclude path list - * - * Return: - * 1 - excluded path - * 0 - not excluded path - * - * Programmer: Jonathan Kim - * Date: Aug 23, 2010 - *------------------------------------------------------------------------*/ -static int is_exclude_path (char * path, h5trav_type_t type, diff_opt_t *options) -{ - struct exclude_path_list * exclude_path_ptr; - int ret_cmp; - int ret = 0; - int len_grp; - - /* check if exclude path option is given */ - if (!options->exclude_path) - goto out; - - /* assign to local exclude list pointer */ - exclude_path_ptr = options->exclude; - - /* search objects in exclude list */ - while (NULL != exclude_path_ptr) - { - /* if given object is group, exclude its members as well */ - if (exclude_path_ptr->obj_type == H5TRAV_TYPE_GROUP) - { - ret_cmp = HDstrncmp(exclude_path_ptr->obj_path, path, - strlen(exclude_path_ptr->obj_path)); - if (ret_cmp == 0) - { - /* check if given path belong to an excluding group, if so - * exclude it as well. - * This verifies if “/grp1/dset1” is only under “/grp1”, but - * not under “/grp1xxx/” group. - */ - len_grp = HDstrlen(exclude_path_ptr->obj_path); - if (path[len_grp] == '/') - { - /* belong to excluded group! */ - ret = 1; - break; /* while */ - } - } - } - /* exclude target is not group, just exclude the object */ - else - { - ret_cmp = HDstrcmp(exclude_path_ptr->obj_path, path); - if (ret_cmp == 0) - { - /* excluded non-group object */ - ret = 1; - /* assign type as scan progress, which is sufficient to - * determine type for excluding groups from the above if. */ - exclude_path_ptr->obj_type = type; - break; /* while */ - } - } - exclude_path_ptr = exclude_path_ptr->next; - } - -out: - return ret; -} - - -/*------------------------------------------------------------------------- - * Function: free_exclude_path_list - * - * Purpose: free exclud object list from diff options - * - * Programmer: Jonathan Kim - * Date: Aug 23, 2010 - *------------------------------------------------------------------------*/ -static void free_exclude_path_list(diff_opt_t *options) -{ - struct exclude_path_list * curr = options->exclude; - struct exclude_path_list * next; - - while (NULL != curr) - { - next = curr->next; - HDfree(curr); - curr = next; - } -} - -/*------------------------------------------------------------------------- - * Function: build_match_list - * - * Purpose: get list of matching path_name from info1 and info2 - * - * Note: - * Find common objects; the algorithm used for this search is the - * cosequential match algorithm and is described in - * Folk, Michael; Zoellick, Bill. (1992). File Structures. Addison-Wesley. - * Moved out from diff_match() to make code more flexible. - * - * Parameter: - * table_out [OUT] : return the list - * - * Programmer: Jonathan Kim - * - * Date: Aug 18, 2010 - *------------------------------------------------------------------------*/ -static void build_match_list (const char *objname1, trav_info_t *info1, const char *objname2, trav_info_t *info2, trav_table_t ** table_out, diff_opt_t *options) -{ - unsigned i; - size_t curr1 = 0; - size_t curr2 = 0; - unsigned infile[2]; - char * path1_lp; - char * path2_lp; - h5trav_type_t type1_l; - h5trav_type_t type2_l; - int path1_offset = 0; - int path2_offset = 0; - int cmp; - trav_table_t *table; - size_t idx; - - /* init */ - trav_table_init( &table ); - - /* - * This is necessary for the case that given objects are group and - * have different names (ex: obj1 is /grp1 and obj2 is /grp5). - * All the objects belong to given groups are the cadidates. - * So prepare to compare paths without the group names. - */ - /* if obj1 is not root */ - if (HDstrcmp (objname1,"/") != 0) - path1_offset = HDstrlen(objname1); - /* if obj2 is not root */ - if (HDstrcmp (objname2,"/") != 0) - path2_offset = HDstrlen(objname2); - - /*-------------------------------------------------- - * build the list - */ - while(curr1 < info1->nused && curr2 < info2->nused) - { - - path1_lp = (info1->paths[curr1].path) + path1_offset; - path2_lp = (info2->paths[curr2].path) + path2_offset; - type1_l = info1->paths[curr1].type; - type2_l = info2->paths[curr2].type; - - /* criteria is string compare */ - cmp = HDstrcmp(path1_lp, path2_lp); - - if(cmp == 0) { - if(!is_exclude_path(path1_lp, type1_l, options)) - { - infile[0] = 1; - infile[1] = 1; - trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table); - /* if the two point to the same target object, - * mark that in table */ - if (info1->paths[curr1].fileno == info2->paths[curr2].fileno && - info1->paths[curr1].objno == info2->paths[curr2].objno ) - { - idx = table->nobjs - 1; - table->objs[idx].is_same_trgobj = 1; - } - } - curr1++; - curr2++; - } /* end if */ - else if(cmp < 0) - { - if(!is_exclude_path(path1_lp, type1_l, options)) - { - infile[0] = 1; - infile[1] = 0; - trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table); - } - curr1++; - } /* end else-if */ - else - { - if (!is_exclude_path(path2_lp, type2_l, options)) - { - infile[0] = 0; - infile[1] = 1; - trav_table_addflags(infile, path2_lp, info2->paths[curr2].type, table); - } - curr2++; - } /* end else */ - } /* end while */ - - /* list1 did not end */ - infile[0] = 1; - infile[1] = 0; - while(curr1 < info1->nused) - { - if(!is_exclude_path(path1_lp, type1_l, options)) - { - path1_lp = (info1->paths[curr1].path) + path1_offset; - trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table); - } - curr1++; - } /* end while */ - - /* list2 did not end */ - infile[0] = 0; - infile[1] = 1; - while(curr2 < info2->nused) - { - if (!is_exclude_path(path2_lp, type2_l, options)) - { - path2_lp = (info2->paths[curr2].path) + path2_offset; - trav_table_addflags(infile, path2_lp, info2->paths[curr2].type, table); - } - curr2++; - } /* end while */ - - free_exclude_path_list (options); - /*------------------------------------------------------ - * print the list - */ - if(options->m_verbose) - { - parallel_print("\n"); - /* if given objects is group under root */ - if (HDstrcmp (objname1,"/") || HDstrcmp (objname2,"/")) - parallel_print("group1 group2\n"); - else - parallel_print("file1 file2\n"); - parallel_print("---------------------------------------\n"); - for(i = 0; i < table->nobjs; i++) - { - char c1, c2; - c1 = (table->objs[i].flags[0]) ? 'x' : ' '; - c2 = (table->objs[i].flags[1]) ? 'x' : ' '; - parallel_print("%5c %6c %-15s\n", c1, c2, table->objs[i].name); - } /* end for */ - parallel_print ("\n"); - } /* end if */ - - *table_out = table; -} - - -/*------------------------------------------------------------------------- - * Function: trav_grp_objs - * - * Purpose: - * Call back function from h5trav_visit(). - * - * Programmer: Jonathan Kim - * - * Date: Aug 16, 2010 - *------------------------------------------------------------------------*/ -static herr_t trav_grp_objs(const char *path, const H5O_info_t *oinfo, - const char *already_visited, void *udata) -{ - trav_info_visit_obj(path, oinfo, already_visited, udata); - - return 0; -} - -/*------------------------------------------------------------------------- - * Function: trav_grp_symlinks - * - * Purpose: - * Call back function from h5trav_visit(). - * Track and extra checkings while visiting all symbolic-links. - * - * Programmer: Jonathan Kim - * - * Date: Aug 16, 2010 - *------------------------------------------------------------------------*/ -static herr_t trav_grp_symlinks(const char *path, const H5L_info_t *linfo, - void *udata) -{ - trav_info_t *tinfo = (trav_info_t *)udata; - diff_opt_t *opts = (diff_opt_t *)tinfo->opts; - int ret; - h5tool_link_info_t lnk_info; - const char *ext_fname; - const char *ext_path; - - /* init linkinfo struct */ - memset(&lnk_info, 0, sizeof(h5tool_link_info_t)); - - if (!opts->follow_links) - { - trav_info_visit_lnk(path, linfo, tinfo); - goto done; - } - - switch(linfo->type) - { - case H5L_TYPE_SOFT: - ret = H5tools_get_symlink_info(tinfo->fid, path, &lnk_info, opts->follow_links); - /* error */ - if (ret < 0) - goto done; - /* no dangling link option given and detect dangling link */ - else if (ret == 0) - { - tinfo->symlink_visited.dangle_link = TRUE; - trav_info_visit_lnk(path, linfo, tinfo); - if (opts->no_dangle_links) - opts->err_stat = 1; /* make dgangling link is error */ - goto done; - } - - /* check if already visit the target object */ - if(symlink_is_visited( &(tinfo->symlink_visited), linfo->type, NULL, lnk_info.trg_path)) - goto done; - - /* add this link as visited link */ - if(symlink_visit_add( &(tinfo->symlink_visited), linfo->type, NULL, lnk_info.trg_path) < 0) - goto done; - - if(h5trav_visit(tinfo->fid, path, TRUE, TRUE, - trav_grp_objs,trav_grp_symlinks, tinfo) < 0) - { - parallel_print("Error: Could not get file contents\n"); - opts->err_stat = 1; - goto done; - } - break; - - case H5L_TYPE_EXTERNAL: - ret = H5tools_get_symlink_info(tinfo->fid, path, &lnk_info, opts->follow_links); - /* error */ - if (ret < 0) - goto done; - /* no dangling link option given and detect dangling link */ - else if (ret == 0) - { - tinfo->symlink_visited.dangle_link = TRUE; - trav_info_visit_lnk(path, linfo, tinfo); - if (opts->no_dangle_links) - opts->err_stat = 1; /* make dgangling link is error */ - goto done; - } - - if(H5Lunpack_elink_val(lnk_info.trg_path, linfo->u.val_size, NULL, &ext_fname, &ext_path) < 0) - goto done; - - /* check if already visit the target object */ - if(symlink_is_visited( &(tinfo->symlink_visited), linfo->type, ext_fname, ext_path)) - goto done; - - /* add this link as visited link */ - if(symlink_visit_add( &(tinfo->symlink_visited), linfo->type, ext_fname, ext_path) < 0) - goto done; - - if(h5trav_visit(tinfo->fid, path, TRUE, TRUE, - trav_grp_objs,trav_grp_symlinks, tinfo) < 0) - { - parallel_print("Error: Could not get file contents\n"); - opts->err_stat = 1; - goto done; - } - break; - default: - ; - break; - } /* end of switch */ - -done: - if (lnk_info.trg_path) - HDfree(lnk_info.trg_path); - return 0; -} - - -/*------------------------------------------------------------------------- - * Function: h5diff - * - * Purpose: public function, can be called in an application program. - * return differences between 2 HDF5 files - * - * Return: Number of differences found. - * - * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu - * - * Date: October 22, 2003 - * - *------------------------------------------------------------------------- - */ -hsize_t h5diff(const char *fname1, - const char *fname2, - const char *objname1, - const char *objname2, - diff_opt_t *options) -{ - hid_t file1_id = (-1); - hid_t file2_id = (-1); - char filenames[2][MAX_FILENAME]; - hsize_t nfound = 0; - int i; - int l_ret; - const char * obj1fullname = NULL; - const char * obj2fullname = NULL; - /* init to group type */ - h5trav_type_t obj1type = H5TRAV_TYPE_GROUP; - h5trav_type_t obj2type = H5TRAV_TYPE_GROUP; - /* for single object */ - H5O_info_t oinfo1, oinfo2; /* object info */ - trav_info_t *info1_obj = NULL; - trav_info_t *info2_obj = NULL; - /* for group object */ - trav_info_t *info1_grp = NULL; - trav_info_t *info2_grp = NULL; - /* local pointer */ - trav_info_t *info1_lp; - trav_info_t *info2_lp; - /* link info from specified object */ - H5L_info_t src_linfo1; - H5L_info_t src_linfo2; - /* link info from member object */ - h5tool_link_info_t trg_linfo1; - h5tool_link_info_t trg_linfo2; - /* list for common objects */ - trav_table_t *match_list = NULL; - - /* init filenames */ - HDmemset(filenames, 0, MAX_FILENAME * 2); - /* init link info struct */ - HDmemset(&trg_linfo1, 0, sizeof(h5tool_link_info_t)); - HDmemset(&trg_linfo2, 0, sizeof(h5tool_link_info_t)); - - /*------------------------------------------------------------------------- - * check invalid combination of options - *-----------------------------------------------------------------------*/ - if(!is_valid_options(options)) - goto out; - - options->cmn_objs = 1; /* eliminate warning */ - - /*------------------------------------------------------------------------- - * open the files first; if they are not valid, no point in continuing - *------------------------------------------------------------------------- - */ - - /* disable error reporting */ - H5E_BEGIN_TRY - { - /* open file 1 */ - if((file1_id = h5tools_fopen(fname1, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) - { - parallel_print("h5diff: <%s>: unable to open file\n", fname1); - options->err_stat = 1; - goto out; - } /* end if */ - - - /* open file 2 */ - if((file2_id = h5tools_fopen(fname2, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) - { - parallel_print("h5diff: <%s>: unable to open file\n", fname2); - options->err_stat = 1; - goto out; - } /* end if */ - /* enable error reporting */ - } H5E_END_TRY; - - /*------------------------------------------------------------------------- - * Initialize the info structs - *------------------------------------------------------------------------- - */ - trav_info_init(fname1, file1_id, &info1_obj); - trav_info_init(fname2, file2_id, &info2_obj); - - /* if any object is specified */ - if (objname1) - { - /* malloc 2 more for "/" and end-of-line */ - obj1fullname = (char*)HDcalloc(HDstrlen(objname1) + 2, sizeof(char)); - obj2fullname = (char*)HDcalloc(HDstrlen(objname2) + 2, sizeof(char)); - - /* make the given object1 fullpath, start with "/" */ - if (HDstrncmp(objname1, "/", 1)) - { - HDstrcpy(obj1fullname, "/"); - HDstrcat(obj1fullname, objname1); - } - else - HDstrcpy(obj1fullname, objname1); - - /* make the given object2 fullpath, start with "/" */ - if (HDstrncmp(objname2, "/", 1)) - { - HDstrcpy(obj2fullname, "/"); - HDstrcat(obj2fullname, objname2); - } - else - HDstrcpy(obj2fullname, objname2); - - /*---------------------------------------------------------- - * check if obj1 is root, group, single object or symlink - */ - if(!HDstrcmp(obj1fullname, "/")) - { - obj1type = H5TRAV_TYPE_GROUP; - } - else - { - /* check if link itself exist */ - if(H5Lexists(file1_id, obj1fullname, H5P_DEFAULT) <= 0) - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj1fullname, fname1); - options->err_stat = 1; - goto out; - } - /* get info from link */ - if(H5Lget_info(file1_id, obj1fullname, &src_linfo1, H5P_DEFAULT) < 0) - { - parallel_print("Unable to get link info from <%s>\n", obj1fullname); - goto out; - } - - info1_lp = info1_obj; - - /* - * check the type of specified path for hard and symbolic links - */ - if(src_linfo1.type == H5L_TYPE_HARD) - { - /* optional data pass */ - info1_obj->opts = (diff_opt_t*)options; - - if(H5Oget_info_by_name(file1_id, obj1fullname, &oinfo1, H5P_DEFAULT) < 0) - { - parallel_print("Error: Could not get file contents\n"); - options->err_stat = 1; - goto out; - } - obj1type = oinfo1.type; - trav_info_add(info1_obj, obj1fullname, obj1type); - } - else if (src_linfo1.type == H5L_TYPE_SOFT) - { - obj1type = H5TRAV_TYPE_LINK; - trav_info_add(info1_obj, obj1fullname, obj1type); - } - else if (src_linfo1.type == H5L_TYPE_EXTERNAL) - { - obj1type = H5TRAV_TYPE_UDLINK; - trav_info_add(info1_obj, obj1fullname, obj1type); - } - } - - /*---------------------------------------------------------- - * check if obj2 is root, group, single object or symlink - */ - if(!HDstrcmp(obj2fullname, "/")) - { - obj2type = H5TRAV_TYPE_GROUP; - } - else - { - /* check if link itself exist */ - if(H5Lexists(file2_id, obj2fullname, H5P_DEFAULT) <= 0) - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj2fullname, fname2); - options->err_stat = 1; - goto out; - } - /* get info from link */ - if(H5Lget_info(file2_id, obj2fullname, &src_linfo2, H5P_DEFAULT) < 0) - { - parallel_print("Unable to get link info from <%s>\n", obj2fullname); - goto out; - } - - info2_lp = info2_obj; - - /* - * check the type of specified path for hard and symbolic links - */ - if(src_linfo2.type == H5L_TYPE_HARD) - { - /* optional data pass */ - info2_obj->opts = (diff_opt_t*)options; - - if(H5Oget_info_by_name(file2_id, obj2fullname, &oinfo2, H5P_DEFAULT) < 0) - { - parallel_print("Error: Could not get file contents\n"); - options->err_stat = 1; - goto out; - } - obj2type = oinfo2.type; - trav_info_add(info2_obj, obj2fullname, obj2type); - } - else if (src_linfo2.type == H5L_TYPE_SOFT) - { - obj2type = H5TRAV_TYPE_LINK; - trav_info_add(info2_obj, obj2fullname, obj2type); - } - else if (src_linfo2.type == H5L_TYPE_EXTERNAL) - { - obj2type = H5TRAV_TYPE_UDLINK; - trav_info_add(info2_obj, obj2fullname, obj2type); - } - } - } - /* if no object specified */ - else - { - /* set root group */ - obj1fullname = (char*)HDcalloc(2, sizeof(char)); - HDstrcat(obj1fullname, "/"); - obj2fullname = (char*)HDcalloc(2, sizeof(char)); - HDstrcat(obj2fullname, "/"); - } - - /* - * If verbose options is used, need to traverse thorugh the list of objects - * in the group to print out objects information. - * Use h5tools_is_obj_same() to improve performance by skipping - * comparing details of same objects. - */ - if(!(options->m_verbose || options->m_report)) - { - if (h5tools_is_obj_same(file1_id,obj1fullname,file2_id,obj2fullname)!=0) - goto out; - } - - /*--------------------------------------------- - * check for following symlinks - */ - if (options->follow_links) - { - /* pass how to handle printing warning to linkinfo option */ - if(print_warn(options)) - trg_linfo1.opt.msg_mode = trg_linfo2.opt.msg_mode = 1; - - /*------------------------------- - * check symbolic link (object1) - */ - l_ret = H5tools_get_symlink_info(file1_id, obj1fullname, &trg_linfo1, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj1fullname); - options->err_stat = 1; - goto out; - } - else - { - if(options->m_verbose) - parallel_print("obj1 <%s> is a dangling link.\n", obj1fullname); - nfound++; - print_found(nfound); - goto out; - } - } - else if(l_ret < 0) /* fail */ - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj1fullname, fname1); - options->err_stat = 1; - goto out; - } - else if(l_ret != 2) /* symbolic link */ - obj1type = trg_linfo1.trg_type; - - /*------------------------------- - * check symbolic link (object2) - */ - l_ret = H5tools_get_symlink_info(file2_id, obj2fullname, &trg_linfo2, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj2fullname); - options->err_stat = 1; - goto out; - } - else - { - if(options->m_verbose) - parallel_print("obj2 <%s> is a dangling link.\n", obj2fullname); - nfound++; - print_found(nfound); - goto out; - } - } - else if(l_ret < 0) /* fail */ - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj2fullname, fname2); - options->err_stat = 1; - goto out; - } - else if(l_ret != 2) /* symbolic link */ - obj2type = trg_linfo2.trg_type; - } /* end of if follow symlinks */ - - - /* if both obj1 and obj2 are group */ - if (obj1type == H5TRAV_TYPE_GROUP && obj2type == H5TRAV_TYPE_GROUP) - { - - /* - * traverse group1 - */ - trav_info_init(fname1, file1_id, &info1_grp); - /* optional data pass */ - info1_grp->opts = (diff_opt_t*)options; - - if(h5trav_visit(file1_id,obj1fullname,TRUE,TRUE, - trav_grp_objs,trav_grp_symlinks, info1_grp) < 0) - { - parallel_print("Error: Could not get file contents\n"); - options->err_stat = 1; - goto out; - } - info1_lp = info1_grp; - - /* - * traverse group2 - */ - trav_info_init(fname2, file2_id, &info2_grp); - /* optional data pass */ - info2_grp->opts = (diff_opt_t*)options; - - if(h5trav_visit(file2_id,obj2fullname,TRUE,TRUE, - trav_grp_objs,trav_grp_symlinks, info2_grp) < 0) - { - parallel_print("Error: Could not get file contents\n"); - options->err_stat = 1; - goto out; - } /* end if */ - info2_lp = info2_grp; - - -#ifdef H5_HAVE_PARALLEL - if(g_Parallel) - { - if((HDstrlen(fname1) > MAX_FILENAME) || - (HDstrlen(fname2) > MAX_FILENAME)) - { - fprintf(stderr, "The parallel diff only supports path names up to %d characters\n", MAX_FILENAME); - MPI_Abort(MPI_COMM_WORLD, 0); - } /* end if */ - - HDstrcpy(filenames[0], fname1); - HDstrcpy(filenames[1], fname2); - - /* Alert the worker tasks that there's going to be work. */ - for(i = 1; i < g_nTasks; i++) - MPI_Send(filenames, (MAX_FILENAME * 2), MPI_CHAR, i, MPI_TAG_PARALLEL, MPI_COMM_WORLD); - } /* end if */ -#endif - build_match_list (obj1fullname, info1_lp, obj2fullname, info2_lp, - &match_list, options); - nfound = diff_match(file1_id, obj1fullname, info1_lp, - file2_id, obj2fullname, info2_lp, - match_list, options); - } - else - { -#ifdef H5_HAVE_PARALLEL - if(g_Parallel) - /* Only single object diff, parallel workers won't be needed */ - phdiff_dismiss_workers(); -#endif - - nfound = diff_compare(file1_id, fname1, obj1fullname, info1_lp, - file2_id, fname2, obj2fullname, info2_lp, - options); - } - -out: -#ifdef H5_HAVE_PARALLEL - if(g_Parallel) - /* All done at this point, let tasks know that they won't be needed */ - phdiff_dismiss_workers(); -#endif - /* free buffers in trav_info structures */ - if (info1_obj) - trav_info_free(info1_obj); - if (info2_obj) - trav_info_free(info2_obj); - - if (info1_grp) - trav_info_free(info1_grp); - if (info2_grp) - trav_info_free(info2_grp); - - /* free buffers */ - if (obj1fullname) - HDfree(obj1fullname); - if (obj2fullname) - HDfree(obj2fullname); - - /* free link info buffer */ - if (trg_linfo1.trg_path) - HDfree(trg_linfo1.trg_path); - if (trg_linfo2.trg_path) - HDfree(trg_linfo2.trg_path); - - /* close */ - H5E_BEGIN_TRY - { - H5Fclose(file1_id); - H5Fclose(file2_id); - } H5E_END_TRY; - - return nfound; -} - - - -/*------------------------------------------------------------------------- - * Function: diff_match - * - * Purpose: - * Compare common objects in given groups according to table structure. - * The table structure has flags which can be used to find common objects - * and will be compared. - * Common object means same name (absolute path) objects in both location. - * - * Return: Number of differences found - * - * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu - * - * Date: May 9, 2003 - * - * Modifications: Jan 2005 Leon Arber, larber@uiuc.edu - * Added support for parallel diffing - * - * Pedro Vicente, pvn@hdfgroup.org, Nov 4, 2008 - * Compare the graph and make h5diff return 1 for difference if - * 1) the number of objects in file1 is not the same as in file2 - * 2) the graph does not match, i.e same names (absolute path) - * 3) objects with the same name are not of the same type - *------------------------------------------------------------------------- - */ -hsize_t diff_match(hid_t file1_id, const char *grp1, trav_info_t *info1, - hid_t file2_id, const char *grp2, trav_info_t *info2, - trav_table_t *table, diff_opt_t *options) -{ - hsize_t nfound = 0; - unsigned i; - - char * grp1_path = ""; - char * grp2_path = ""; - char * obj1_fullpath = NULL; - char * obj2_fullpath = NULL; - h5trav_type_t objtype; - diff_args_t argdata; - - - /* - * if not root, prepare object name to be pre-appended to group path to - * make full path - */ - if (HDstrcmp (grp1, "/")) - grp1_path = grp1; - if (HDstrcmp (grp2, "/")) - grp2_path = grp2; - - /*------------------------------------------------------------------------- - * regarding the return value of h5diff (0, no difference in files, 1 difference ) - * 1) the number of objects in file1 must be the same as in file2 - * 2) the graph must match, i.e same names (absolute path) - * 3) objects with the same name must be of the same type - *------------------------------------------------------------------------- - */ - - /* not valid compare nused when --exclude-path option is used */ - if (!options->exclude_path) - { - /* number of different objects */ - if ( info1->nused != info2->nused ) - { - options->contents = 0; - } - } - - /* objects in one file and not the other */ - for( i = 0; i < table->nobjs; i++) - { - if( table->objs[i].flags[0] != table->objs[i].flags[1] ) - { - options->contents = 0; - break; - } - } - - /* objects with the same name but different HDF5 types */ - for( i = 0; i < table->nobjs; i++) - { - if ( table->objs[i].flags[0] && table->objs[i].flags[1] ) - { - if ( table->objs[i].type != table->objs[i].type ) - { - options->contents = 0; - } - } - } - - /*------------------------------------------------------------------------- - * do the diff for common objects - *------------------------------------------------------------------------- - */ -#ifdef H5_HAVE_PARALLEL - { - char *workerTasks = (char*)HDmalloc((g_nTasks - 1) * sizeof(char)); - int n; - int busyTasks = 0; - struct diffs_found nFoundbyWorker; - struct diff_mpi_args args; - int havePrintToken = 1; - MPI_Status Status; - - /*set all tasks as free */ - HDmemset(workerTasks, 1, (g_nTasks - 1)); -#endif - - for(i = 0; i < table->nobjs; i++) - { - if( table->objs[i].flags[0] && table->objs[i].flags[1]) - { - objtype = table->objs[i].type; - /* make full path for obj1 */ - obj1_fullpath = (char*)HDcalloc (strlen(grp1_path) + strlen (table->objs[i].name) + 1, sizeof (char)); - HDstrcpy(obj1_fullpath, grp1_path); - HDstrcat(obj1_fullpath, table->objs[i].name); - - /* make full path for obj2 */ - obj2_fullpath = (char*)HDcalloc (strlen(grp2_path) + strlen (table->objs[i].name) + 1, sizeof (char)); - HDstrcpy(obj2_fullpath, grp2_path); - HDstrcat(obj2_fullpath, table->objs[i].name); - - /* Set argdata to pass other args into diff() */ - argdata.type = objtype; - argdata.is_same_trgobj = table->objs[i].is_same_trgobj; - - options->cmn_objs = 1; - if(!g_Parallel) - { - nfound += diff(file1_id, obj1_fullpath, - file2_id, obj2_fullpath, - options, &argdata); - } /* end if */ -#ifdef H5_HAVE_PARALLEL - else - { - int workerFound = 0; - - h5diffdebug("beginning of big else block\n"); - /* We're in parallel mode */ - /* Since the data type of diff value is hsize_t which can - * be arbitary large such that there is no MPI type that - * matches it, the value is passed between processes as - * an array of bytes in order to be portable. But this - * may not work in non-homogeneous MPI environments. - */ - - /*Set up args to pass to worker task. */ - if(HDstrlen(obj1_fullpath) > 255 || - HDstrlen(obj2_fullpath) > 255) - { - printf("The parallel diff only supports object names up to 255 characters\n"); - MPI_Abort(MPI_COMM_WORLD, 0); - } /* end if */ - - /* set args struct to pass */ - HDstrcpy(args.name1, obj1_fullpath); - HDstrcpy(args.name2, obj2_fullpath); - args.options = *options; - args.argdata.type = objtype; - args.argdata.is_same_trgobj = table->objs[i].is_same_trgobj; - - h5diffdebug2("busyTasks=%d\n", busyTasks); - /* if there are any outstanding print requests, let's handle one. */ - if(busyTasks > 0) - { - int incomingMessage; - - /* check if any tasks freed up, and didn't need to print. */ - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &incomingMessage, &Status); - - /* first block*/ - if(incomingMessage) - { - workerTasks[Status.MPI_SOURCE - 1] = 1; - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - } /* end if */ - - /* check to see if the print token was returned. */ - if(!havePrintToken) - { - /* If we don't have the token, someone is probably sending us output */ - print_incoming_data(); - - /* check incoming queue for token */ - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - - /* incoming token implies free task. */ - if(incomingMessage) { - workerTasks[Status.MPI_SOURCE - 1] = 1; - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - havePrintToken = 1; - } /* end if */ - } /* end if */ - - /* check to see if anyone needs the print token. */ - if(havePrintToken) - { - /* check incoming queue for print token requests */ - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &incomingMessage, &Status); - if(incomingMessage) - { - MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status); - MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD); - havePrintToken = 0; - } /* end if */ - } /* end if */ - } /* end if */ - - /* check array of tasks to see which ones are free. - * Manager task never does work, so freeTasks[0] is really - * worker task 0. */ - for(n = 1; (n < g_nTasks) && !workerFound; n++) - { - if(workerTasks[n-1]) - { - /* send file id's and names to first free worker */ - MPI_Send(&args, sizeof(args), MPI_BYTE, n, MPI_TAG_ARGS, MPI_COMM_WORLD); - - /* increment counter for total number of prints. */ - busyTasks++; - - /* mark worker as busy */ - workerTasks[n - 1] = 0; - workerFound = 1; - } /* end if */ - } /* end for */ - - h5diffdebug2("workerfound is %d \n", workerFound); - if(!workerFound) - { - /* if they were all busy, we've got to wait for one free up - * before we can move on. If we don't have the token, some - * task is currently printing so we'll wait for that task to - * return it. - */ - - if(!havePrintToken) - { - while(!havePrintToken) - { - int incomingMessage; - - print_incoming_data(); - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - if(incomingMessage) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - havePrintToken = 1; - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - /* send this task the work unit. */ - MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD); - } /* end if */ - } /* end while */ - } /* end if */ - /* if we do have the token, check for task to free up, or wait for a task to request it */ - else - { - /* But first print all the data in our incoming queue */ - print_incoming_data(); - MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &Status); - if(Status.MPI_TAG == MPI_TAG_DONE) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD); - } /* end if */ - else if(Status.MPI_TAG == MPI_TAG_TOK_REQUEST) - { - int incomingMessage; - - MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status); - MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD); - - do - { - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - - print_incoming_data(); - } while(!incomingMessage); - - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD); - } /* end else-if */ - else - { - printf("ERROR: Invalid tag (%d) received \n", Status.MPI_TAG); - MPI_Abort(MPI_COMM_WORLD, 0); - MPI_Finalize(); - } /* end else */ - } /* end else */ - } /* end if */ - } /* end else */ -#endif /* H5_HAVE_PARALLEL */ - if (obj1_fullpath) - HDfree (obj1_fullpath); - if (obj2_fullpath) - HDfree (obj2_fullpath); - } /* end if */ - } /* end for */ - h5diffdebug("done with for loop\n"); - -#ifdef H5_HAVE_PARALLEL - if(g_Parallel) - { - /* make sure all tasks are done */ - while(busyTasks > 0) - { - MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &Status); - if(Status.MPI_TAG == MPI_TAG_DONE) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - } /* end if */ - else if(Status.MPI_TAG == MPI_TAG_TOK_RETURN) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - havePrintToken = 1; - } /* end else-if */ - else if(Status.MPI_TAG == MPI_TAG_TOK_REQUEST) - { - MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status); - if(havePrintToken) - { - int incomingMessage; - - MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD); - - do { - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - - print_incoming_data(); - } while(!incomingMessage); - - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - } /* end if */ - /* someone else must have it...wait for them to return it, then give it to the task that just asked for it. */ - else - { - int source = Status.MPI_SOURCE; - int incomingMessage; - - do - { - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - - print_incoming_data(); - } while(!incomingMessage); - - - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - MPI_Send(NULL, 0, MPI_BYTE, source, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD); - } /* end else */ - } /* end else-if */ - else if(Status.MPI_TAG == MPI_TAG_TOK_RETURN) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - havePrintToken = 1; - } /* end else-if */ - else if(Status.MPI_TAG == MPI_TAG_PRINT_DATA) - { - char data[PRINT_DATA_MAX_SIZE + 1]; - HDmemset(data, 0, PRINT_DATA_MAX_SIZE + 1); - - MPI_Recv(data, PRINT_DATA_MAX_SIZE, MPI_CHAR, Status.MPI_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &Status); - - printf("%s", data); - } /* end else-if */ - else - { - printf("ph5diff-manager: ERROR!! Invalid tag (%d) received \n", Status.MPI_TAG); - MPI_Abort(MPI_COMM_WORLD, 0); - } /* end else */ - } /* end while */ - - for(i = 1; i < g_nTasks; i++) - MPI_Send(NULL, 0, MPI_BYTE, i, MPI_TAG_END, MPI_COMM_WORLD); - - /* Print any final data waiting in our queue */ - print_incoming_data(); - } /* end if */ - h5diffdebug("done with if block\n"); - - free(workerTasks); - } -#endif /* H5_HAVE_PARALLEL */ - - /* free table */ - if (table) - trav_table_free(table); - - return nfound; -} - - -/*------------------------------------------------------------------------- - * Function: diff_compare - * - * Purpose: get objects from list, and check for the same type - * - * Return: Number of differences found - * - * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu - * Date: May 9, 2003 - * - * Programmer: Jonathan Kim - * - add following links feature (Feb 11,2010) - *------------------------------------------------------------------------- - */ - -hsize_t diff_compare(hid_t file1_id, - const char *file1_name, - const char *obj1_name, - trav_info_t *info1, - hid_t file2_id, - const char *file2_name, - const char *obj2_name, - trav_info_t *info2, - diff_opt_t *options) -{ - int f1 = 0; - int f2 = 0; - hsize_t nfound = 0; - ssize_t i,j; - int l_ret; - int is_dangle_link1 = 0; - int is_dangle_link2 = 0; - const char *obj1name = obj1_name; - const char *obj2name = obj2_name; - diff_args_t argdata; - - /* local variables for diff() */ - h5trav_type_t obj1type, obj2type; - - /* to get link info */ - h5tool_link_info_t linkinfo1; - h5tool_link_info_t linkinfo2; - - /* init link info struct */ - HDmemset(&linkinfo1, 0, sizeof(h5tool_link_info_t)); - HDmemset(&linkinfo2, 0, sizeof(h5tool_link_info_t)); - - i = h5trav_getindex (info1, obj1name); - j = h5trav_getindex (info2, obj2name); - - if (i == -1) - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj1name, - file1_name); - f1 = 1; - } - if (j == -1) - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj2name, - file2_name); - f2 = 1; - } - if (f1 || f2) - { - options->err_stat = 1; - return 0; - } - /* use the name with "/" first, as obtained by iterator function */ - obj1name = info1->paths[i].path; - obj2name = info2->paths[j].path; - - obj1type = info1->paths[i].type; - obj2type = info2->paths[j].type; - - /*----------------------------------------------------------------- - * follow link option, compare with target object - */ - if (options->follow_links) - { - /* pass how to handle printing warning to linkinfo option */ - if(print_warn(options)) - linkinfo1.opt.msg_mode = linkinfo2.opt.msg_mode = 1; - - /*------------------------------------------------------------ - * Soft links - *------------------------------------------------------------*/ - - /*-------------------------- - * if object1 soft link */ - if (obj1type == H5TRAV_TYPE_LINK) - { - /* get type of target object */ - l_ret = H5tools_get_symlink_info(file1_id, obj1name, &linkinfo1, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj1name); - options->err_stat = 1; - goto out; - } - else - is_dangle_link1 = 1; - } - /* fail */ - else if(l_ret < 0) - { - options->err_stat = 1; - goto out; - } - else /* OK */ - { - /* target type for diff() */ - obj1type = linkinfo1.trg_type; - } - } - - /*----------------------------- - * if object2 is soft link */ - if (obj2type == H5TRAV_TYPE_LINK) - { - /* get type target object */ - l_ret = H5tools_get_symlink_info(file2_id, obj2name, &linkinfo2, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj2name); - options->err_stat = 1; - goto out; - } - else - is_dangle_link2=1; - } - /* fail */ - else if(l_ret < 0) - { - options->err_stat = 1; - goto out; - } - else /* OK */ - { - /* target type for diff() */ - obj2type = linkinfo2.trg_type; - } - } - - /*------------------------------------------------------------ - * External links - *------------------------------------------------------------*/ - - /*-------------------------------- - * if object1 is external link */ - if (obj1type == H5TRAV_TYPE_UDLINK) - { - /* get type and name of target object */ - l_ret = H5tools_get_symlink_info(file1_id, obj1name, &linkinfo1, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj1name); - options->err_stat = 1; - goto out; - } - else - is_dangle_link1 = 1; - } - /* fail */ - else if(l_ret < 0) - { - options->err_stat = 1; - goto out; - } - else /* OK */ - { - /* for external link */ - if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL) - obj1type = linkinfo1.trg_type; - } - } - - /*-------------------------------- - * if object2 is external link */ - if (obj2type == H5TRAV_TYPE_UDLINK) - { - /* get type and name of target object */ - l_ret = H5tools_get_symlink_info(file2_id, obj2name, &linkinfo2, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj2name); - options->err_stat = 1; - goto out; - } - else - is_dangle_link2 = 1; - } - /* fail */ - else if(l_ret < 0) - { - options->err_stat = 1; - goto out; - } - else /* OK */ - { - /* for external link */ - if(linkinfo2.linfo.type == H5L_TYPE_EXTERNAL) - obj2type = linkinfo2.trg_type; - } - } - /* found dangling link */ - if (is_dangle_link1 || is_dangle_link2) - goto out; - } /* end of follow_links */ - - /* objects are not the same type */ - if (obj1type != obj2type) - { - if (options->m_verbose||options->m_list_not_cmp) - { - parallel_print("<%s> is of type %s and <%s> is of type %s\n", - obj1name, get_type(obj1type), - obj2name, get_type(obj2type)); - } - options->not_cmp=1; - goto out; - } - - /* Set argdata to pass other args into diff() */ - argdata.type = obj1type; - argdata.is_same_trgobj = 0; - - nfound = diff(file1_id, obj1name, - file2_id, obj2name, - options, &argdata); - -out: - /*------------------------------- - * handle dangling link(s) */ - /* both obj1 and obj2 are dangling links */ - if(is_dangle_link1 && is_dangle_link2) - { - if(print_objname(options, nfound)) - { - do_print_objname("dangling link", obj1name, obj2name, options); - print_found(nfound); - } - } - /* obj1 is dangling link */ - else if (is_dangle_link1) - { - if(options->m_verbose) - parallel_print("obj1 <%s> is a dangling link.\n", obj1name); - nfound++; - if(print_objname(options, nfound)) - print_found(nfound); - } - /* obj2 is dangling link */ - else if (is_dangle_link2) - { - if(options->m_verbose) - parallel_print("obj2 <%s> is a dangling link.\n", obj2name); - nfound++; - if(print_objname(options, nfound)) - print_found(nfound); - } - - /* free link info buffer */ - if (linkinfo1.trg_path) - HDfree(linkinfo1.trg_path); - if (linkinfo2.trg_path) - HDfree(linkinfo2.trg_path); - - return nfound; -} - - -/*------------------------------------------------------------------------- - * Function: diff - * - * Purpose: switch between types and choose the diff function - * TYPE is either - * H5G_GROUP Object is a group - * H5G_DATASET Object is a dataset - * H5G_TYPE Object is a named data type - * H5G_LINK Object is a symbolic link - * - * Return: Number of differences found - * - * Programmer: Jonathan Kim - * - add following links feature (Feb 11,2010) - * - Change to use diff_args_t to pass the rest of args. - * Passing through it instead of individual args provides smoother - * extensibility through its members along with MPI code update for ph5diff - * as it doesn't require interface change. - * (May 6,2011) - * - * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu - * Date: May 9, 2003 - *------------------------------------------------------------------------- - */ - -hsize_t diff(hid_t file1_id, - const char *path1, - hid_t file2_id, - const char *path2, - diff_opt_t * options, - diff_args_t *argdata) -{ - hid_t type1_id = (-1); - hid_t type2_id = (-1); - hid_t grp1_id = (-1); - hid_t grp2_id = (-1); - int ret; - int is_dangle_link1 = 0; - int is_dangle_link2 = 0; - int is_hard_link = 0; - hsize_t nfound = 0; - - - /* to get link info */ - h5tool_link_info_t linkinfo1; - h5tool_link_info_t linkinfo2; - - /*init link info struct */ - HDmemset(&linkinfo1,0,sizeof(h5tool_link_info_t)); - HDmemset(&linkinfo2,0,sizeof(h5tool_link_info_t)); - - /* pass how to handle printing warnings to linkinfo option */ - if(print_warn(options)) - linkinfo1.opt.msg_mode = linkinfo2.opt.msg_mode = 1; - - /* - * Get target object info for obj1 and obj2 and check dangling links. - * (for hard-linked-objects, because diff() only get the obj1's type, - * so obj2's type should be check here when diff() is called from - * diff_match() for same-named objects with dangling link only one side.) - */ - - /* target object1 - get type and name */ - ret = H5tools_get_symlink_info(file1_id, path1, &linkinfo1, TRUE); - /* dangling link */ - if (ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", path1); - goto out; - } - else - is_dangle_link1 = 1; - } - else if (ret < 0) - goto out; - - /* target object2 - get type and name */ - ret = H5tools_get_symlink_info(file2_id, path2, &linkinfo2, TRUE); - /* dangling link */ - if (ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", path2); - goto out; - } - else - is_dangle_link2 = 1; - } - else if (ret < 0) - goto out; - - /* found dangling link */ - if (is_dangle_link1 || is_dangle_link2) - goto out2; - - /* - * If both points to the same target object, skip comparing details inside - * of the objects to improve performance. - * Always check for the hard links, otherwise if follow symlink option is - * specified. - * - * Perform this to match the outputs as bypassing. - */ - is_hard_link = (argdata->type == H5TRAV_TYPE_DATASET || - argdata->type == H5TRAV_TYPE_NAMED_DATATYPE || - argdata->type == H5TRAV_TYPE_GROUP); - if (options->follow_links || is_hard_link) - { - if (argdata->is_same_trgobj) - { - /* print information is only verbose option is used */ - if(options->m_verbose || options->m_report) - { - switch(argdata->type) - { - case H5TRAV_TYPE_DATASET: - do_print_objname("dataset", path1, path2, options); - break; - case H5TRAV_TYPE_NAMED_DATATYPE: - do_print_objname("datatype", path1, path2, options); - break; - case H5TRAV_TYPE_GROUP: - do_print_objname("group", path1, path2, options); - break; - case H5TRAV_TYPE_LINK: - do_print_objname("link", path1, path2, options); - break; - case H5TRAV_TYPE_UDLINK: - if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL && linkinfo2.linfo.type == H5L_TYPE_EXTERNAL) - do_print_objname("external link", path1, path2, options); - else - do_print_objname ("user defined link", path1, path2, options); - break; - default: - parallel_print("Comparison not supported: <%s> and <%s> are of type %s\n", - path1, path2, get_type(argdata->type) ); - options->not_cmp = 1; - break; - } /* switch(type)*/ - - print_found(nfound); - } /* if(options->m_verbose || options->m_report) */ - - goto out2; - } - } - - switch(argdata->type) - { - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_DATASET - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_DATASET: - /* verbose (-v) and report (-r) mode */ - if(options->m_verbose || options->m_report) - { - do_print_objname("dataset", path1, path2, options); - nfound = diff_dataset(file1_id, file2_id, path1, path2, options); - print_found(nfound); - } - /* quiet mode (-q), just count differences */ - else if(options->m_quiet) - { - nfound = diff_dataset(file1_id, file2_id, path1, path2, options); - } - /* the rest (-c, none, ...) */ - else - { - nfound = diff_dataset(file1_id, file2_id, path1, path2, options); - /* print info if difference found */ - if (nfound) - { - do_print_objname("dataset", path1, path2, options); - print_found(nfound); - } - } - break; - - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_NAMED_DATATYPE - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_NAMED_DATATYPE: - if((type1_id = H5Topen2(file1_id, path1, H5P_DEFAULT)) < 0) - goto out; - if((type2_id = H5Topen2(file2_id, path2, H5P_DEFAULT)) < 0) - goto out; - - if((ret = H5Tequal(type1_id, type2_id)) < 0) - goto out; - - /* if H5Tequal is > 0 then the datatypes refer to the same datatype */ - nfound = (ret > 0) ? 0 : 1; - - if(print_objname(options,nfound)) - do_print_objname("datatype", path1, path2, options); - - /* always print the number of differences found in verbose mode */ - if(options->m_verbose) - print_found(nfound); - - /*----------------------------------------------------------------- - * compare attributes - * the if condition refers to cases when the dataset is a - * referenced object - *----------------------------------------------------------------- - */ - if(path1) - nfound += diff_attr(type1_id, type2_id, path1, path2, options); - - if(H5Tclose(type1_id) < 0) - goto out; - if(H5Tclose(type2_id) < 0) - goto out; - break; - - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_GROUP - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_GROUP: - if(print_objname(options, nfound)) - do_print_objname("group", path1, path2, options); - - /* always print the number of differences found in verbose mode */ - if(options->m_verbose) - print_found(nfound); - - if((grp1_id = H5Gopen2(file1_id, path1, H5P_DEFAULT)) < 0) - goto out; - if((grp2_id = H5Gopen2(file2_id, path2, H5P_DEFAULT)) < 0) - goto out; - - /*----------------------------------------------------------------- - * compare attributes - * the if condition refers to cases when the dataset is a - * referenced object - *----------------------------------------------------------------- - */ - if(path1) - nfound += diff_attr(grp1_id, grp2_id, path1, path2, options); - - if(H5Gclose(grp1_id) < 0) - goto out; - if(H5Gclose(grp2_id) < 0) - goto out; - break; - - - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_LINK - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_LINK: - { - ret = HDstrcmp(linkinfo1.trg_path, linkinfo2.trg_path); - - /* if the target link name is not same then the links are "different" */ - nfound = (ret != 0) ? 1 : 0; - - if(print_objname(options, nfound)) - do_print_objname("link", path1, path2, options); - - if (options->follow_links) - { - /* objects are not the same type */ - if (linkinfo1.trg_type != linkinfo2.trg_type) - { - if (options->m_verbose||options->m_list_not_cmp) - { - parallel_print("<%s> is of type %s and <%s> is of type %s\n", path1, get_type(linkinfo1.trg_type), path2, get_type(linkinfo2.trg_type)); - } - options->not_cmp=1; - goto out; - } - - /* Renew type in argdata to pass into diff(). - * For recursive call, argdata.is_same_trgobj is already - * set from initial call, so don't reset here */ - argdata->type = linkinfo1.trg_type; - - /* call self to compare target object */ - nfound += diff(file1_id, path1, - file2_id, path2, - options, argdata); - } - - /* always print the number of differences found in verbose mode */ - if(options->m_verbose) - print_found(nfound); - - } - break; - - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_UDLINK - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_UDLINK: - { - /* Only external links will have a query function registered */ - if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL && linkinfo2.linfo.type == H5L_TYPE_EXTERNAL) - { - /* If the buffers are the same size, compare them */ - if(linkinfo1.linfo.u.val_size == linkinfo2.linfo.u.val_size) - { - ret = HDmemcmp(linkinfo1.trg_path, linkinfo2.trg_path, linkinfo1.linfo.u.val_size); - } - else - ret = 1; - - /* if "linkinfo1.trg_path" != "linkinfo2.trg_path" then the links - * are "different" extlinkinfo#.path is combination string of - * file_name and obj_name - */ - nfound = (ret != 0) ? 1 : 0; - - if(print_objname(options, nfound)) - do_print_objname("external link", path1, path2, options); - - if (options->follow_links) - { - /* objects are not the same type */ - if (linkinfo1.trg_type != linkinfo2.trg_type) - { - if (options->m_verbose||options->m_list_not_cmp) - { - parallel_print("<%s> is of type %s and <%s> is of type %s\n", path1, get_type(linkinfo1.trg_type), path2, get_type(linkinfo2.trg_type)); - } - options->not_cmp=1; - goto out; - } - - /* Renew type in argdata to pass into diff(). - * For recursive call, argdata.is_same_trgobj is already - * set from initial call, so don't reset here */ - argdata->type = linkinfo1.trg_type; - - nfound = diff(file1_id, path1, - file2_id, path2, - options, argdata); - } - } /* end if */ - else - { - /* If one or both of these links isn't an external link, we can only - * compare information from H5Lget_info since we don't have a query - * function registered for them. - * - * If the link classes or the buffer length are not the - * same, the links are "different" - */ - if((linkinfo1.linfo.type != linkinfo2.linfo.type) || - (linkinfo1.linfo.u.val_size != linkinfo2.linfo.u.val_size)) - nfound = 1; - else - nfound = 0; - - if (print_objname (options, nfound)) - do_print_objname ("user defined link", path1, path2, options); - } /* end else */ - - /* always print the number of differences found in verbose mode */ - if(options->m_verbose) - print_found(nfound); - } - break; - - default: - if(options->m_verbose) - parallel_print("Comparison not supported: <%s> and <%s> are of type %s\n", - path1, path2, get_type(argdata->type) ); - options->not_cmp = 1; - break; - } - - /* free link info buffer */ - if (linkinfo1.trg_path) - HDfree(linkinfo1.trg_path); - if (linkinfo2.trg_path) - HDfree(linkinfo2.trg_path); - - return nfound; - -out: - options->err_stat = 1; - -out2: - /*----------------------------------- - * handle dangling link(s) - */ - /* both path1 and path2 are dangling links */ - if(is_dangle_link1 && is_dangle_link2) - { - if(print_objname(options, nfound)) - { - do_print_objname("dangling link", path1, path2, options); - print_found(nfound); - } - } - /* path1 is dangling link */ - else if (is_dangle_link1) - { - if(options->m_verbose) - parallel_print("obj1 <%s> is a dangling link.\n", path1); - nfound++; - if(print_objname(options, nfound)) - print_found(nfound); - } - /* path2 is dangling link */ - else if (is_dangle_link2) - { - if(options->m_verbose) - parallel_print("obj2 <%s> is a dangling link.\n", path2); - nfound++; - if(print_objname(options, nfound)) - print_found(nfound); - } - - /* free link info buffer */ - if (linkinfo1.trg_path) - HDfree(linkinfo1.trg_path); - if (linkinfo2.trg_path) - HDfree(linkinfo2.trg_path); - - /* close */ - /* disable error reporting */ - H5E_BEGIN_TRY { - H5Tclose(type1_id); - H5Tclose(type2_id); - H5Gclose(grp1_id); - H5Tclose(grp2_id); - /* enable error reporting */ - } H5E_END_TRY; - - return nfound; -} - diff --git a/exsrc/src/o.txt b/exsrc/src/o.txt deleted file mode 100644 index 47eb655e17..0000000000 --- a/exsrc/src/o.txt +++ /dev/null @@ -1,3 +0,0 @@ -o -yes - diff --git a/exsrc/src/pbmplus/Makefile.in b/exsrc/src/pbmplus/Makefile.in deleted file mode 100644 index da35176c81..0000000000 --- a/exsrc/src/pbmplus/Makefile.in +++ /dev/null @@ -1,134 +0,0 @@ -# Makefile for pbmplus tools. -# -# Copyright (C) 1989, 1991 by Jef Poskanzer. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose and without fee is hereby granted, provided -# that the above copyright notice appear in all copies and that both that -# copyright notice and this permission notice appear in supporting -# documentation. This software is provided "as is" without express or -# implied warranty. - -# CONFIGURE: gcc makes things go faster on some machines, but not everyone -# has it. Warning: do not use gcc's -finline-functions or -fstrength-reduce -# flags, they can produce incorrect code. (This is with gcc versions 1.35, -# 1.36, and 1.37, later versions may fix these bugs.) Also, on some systems -# gcc can't compile pnmconvol - dunno why. And on some systems you can't -# use the -ansi flag, it gives compilation errors in . -CC = cc -#CC = gcc -#CC = gcc -fcombine-regs -fpcc-struct-return -#CC = gcc -ansi -pedantic -fcombine-regs -fpcc-struct-return - -# CONFIGURE: cc flags go here. -CFLAGS = -O -w -#CFLAGS = -g -#CFLAGS = -g -O - -# CONFIGURE: ld flags go here. Eunice users may want to use -noshare so that -# the binaries can run standalone. -LDFLAGS = -s -#LDFLAGS = -#LDFLAGS = -noshare - -# CONFIGURE: If you have an X11-style rgb color names file, define its -# path here. This is used by PPM to parse color names into rgb values. -# If you don't have such a file, comment this out and use the alternative -# hex and decimal forms to specify colors (see ppm/pgmtoppm.1 for details). -RGBDEF = -DRGB_DB=\"/usr/lib/X11/rgb\" - -# CONFIGURE: PBMPLUS's support for TIFF files depends on the library from -# Sam Leffler's TIFF Software package - see the OTHER.SYSTEMS file for a -# full description and access information. To configure PBMPLUS to use the -# library: first, if necessary, fetch the TIFF Software, unpack it in a -# scratch directory somewhere, and move the libtiff subdirectory right here -# into the PBMPLUS top-level directory. Configure and "make" in the -# libtiff directory. Yes, you do have to do the TIFF make by hand, the -# general PBMPLUS make will *not* make libtiff. Finally, uncomment the -# following five definitions. -# -# Libtiff is pretty good about portability, but there are some machines -# it has problems on. If you run into problems, you may wish to contact -# Sam directly, at the address listed in the OTHER.SYSTEMS file. -# -# By the way, you must have at least version 2.4 of libtiff. Earlier -# versions will not work. -TIFFDEF = -DLIBTIFF -TIFFINC = -I@EXTERNALS@/include -TIFFLIB = @EXTERNALS@/lib/libtiff.a -#TIFFBINARIES = tifftopnm pnmtotiff -#TIFFOBJECTS = tifftopnm.o pnmtotiff.o - -# CONFIGURE: Define the directory that you want the binaries copied to. -# If you need scripts and binaries to be in different directories, you -# can set that up too. -INSTALLBINARIES = @EXTERNALS@/bin -INSTALLSCRIPTS = $(INSTALLBINARIES) - -# CONFIGURE: Define the directories that you want the manual sources copied to, -# plus the suffix you want them to have. -INSTALLMANUALS1 = @EXTERNALS@/man/mann -SUFFIXMANUALS1 = n -INSTALLMANUALS3 = @EXTERNALS@/man/mann -SUFFIXMANUALS3 = n -INSTALLMANUALS5 = @EXTERNALS@/man/mann -SUFFIXMANUALS5 = n - -# CONFIGURE: Normally the man pages are installed using "cp". By changing -# this define you can use something else, for example a script that calls -# compress or pack. -MANCP = cp - -# CONFIGURE: Normally the Makefiles build and install separate binaries for -# each program. However, on some systems (especially those without shared -# libraries) this can mean a lot of space. In this case you might try -# building a "merge" instead. The idea here is to link all the binaries -# together into one huge executable, with a tiny dispatch program as the -# main. Then the merged binary is installed with file-system links for -# each program it includes. The dispatch routine can tell which program -# to run by looking at argv[0]. On a Sun3 under SunOS 3.5 the space for -# executables went from 2.9 meg to .36 meg. -# -# Note that if you make a "merge", the executables don't get created -# until you do the install. -all: binaries -install: install.bin install.man -#all: merge -#install: install.merge install.man - -# End of configurable definitions. - -SHELL = /bin/sh -MAKE = make -SUBDIRS = pbm pgm ppm pnm - -binaries: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' binaries ); \ - done - -merge: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' merge ); \ - done - -install.bin: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' 'INSTALLBINARIES=$(INSTALLBINARIES)' 'INSTALLSCRIPTS=$(INSTALLSCRIPTS)' install.bin ); \ - done - -install.merge: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' 'INSTALLBINARIES=$(INSTALLBINARIES)' 'INSTALLSCRIPTS=$(INSTALLSCRIPTS)' install.merge ); \ - done - -install.man: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'TIFFBINARIES=$(TIFFBINARIES)' 'INSTALLMANUALS1=$(INSTALLMANUALS1)' 'SUFFIXMANUALS1=$(SUFFIXMANUALS1)' 'INSTALLMANUALS3=$(INSTALLMANUALS3)' 'SUFFIXMANUALS3=$(SUFFIXMANUALS3)' 'INSTALLMANUALS5=$(INSTALLMANUALS5)' 'SUFFIXMANUALS5=$(SUFFIXMANUALS5)' 'MANCP=$(MANCP)' install.man ); \ - done - -clean: - -rm -f *.shar *.shar? art.* - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) clean ); \ - done diff --git a/exsrc/src/pbmplus/libpbm1.c b/exsrc/src/pbmplus/libpbm1.c deleted file mode 100644 index 00f3e31b2e..0000000000 --- a/exsrc/src/pbmplus/libpbm1.c +++ /dev/null @@ -1,674 +0,0 @@ -/* libpbm1.c - pbm utility library part 1 -** -** Copyright (C) 1988 by Jef Poskanzer. -** -** Permission to use, copy, modify, and distribute this software and its -** documentation for any purpose and without fee is hereby granted, provided -** that the above copyright notice appear in all copies and that both that -** copyright notice and this permission notice appear in supporting -** documentation. This software is provided "as is" without express or -** implied warranty. -*/ - -#include "pbm.h" -#include "version.h" -#include "libpbm.h" -#if __STDC__ -#include -#else /*__STDC__*/ -#include -#endif /*__STDC__*/ - - -/* Forward routines. */ - -#if defined(NEED_VFPRINTF1) || defined(NEED_VFPRINTF2) -int vfprintf ARGS(( FILE* stream, char* format, va_list args )); -#endif /*NEED_VFPRINTF*/ - - -/* Variable-sized arrays. */ - -char* -pm_allocrow( cols, size ) - int cols; - int size; - { - register char* itrow; - - itrow = (char*) malloc( cols * size ); - if ( itrow == (char*) 0 ) - pm_error( "out of memory allocating a row" ); - return itrow; - } - -void -pm_freerow( itrow ) - char* itrow; - { - free( itrow ); - } - - -char** -pm_allocarray( cols, rows, size ) - int cols, rows; - int size; - { - char** its; - int i; - - its = (char**) malloc( rows * sizeof(char*) ); - if ( its == (char**) 0 ) - pm_error( "out of memory allocating an array" ); - its[0] = (char*) malloc( rows * cols * size ); - if ( its[0] == (char*) 0 ) - pm_error( "out of memory allocating an array" ); - for ( i = 1; i < rows; ++i ) - its[i] = &(its[0][i * cols * size]); - return its; - } - -void -pm_freearray( its, rows ) - char** its; - int rows; - { - free( its[0] ); - free( its ); - } - - -/* Case-insensitive keyword matcher. */ - -int -pm_keymatch( str, keyword, minchars ) - char* str; - char* keyword; - int minchars; - { - register int len; - - len = strlen( str ); - if ( len < minchars ) - return 0; - while ( --len >= 0 ) - { - register char c1, c2; - - c1 = *str++; - c2 = *keyword++; - if ( c2 == '\0' ) - return 0; - if ( isupper( c1 ) ) - c1 = tolower( c1 ); - if ( isupper( c2 ) ) - c1 = tolower( c2 ); - if ( c1 != c2 ) - return 0; - } - return 1; - } - - -/* Log base two hacks. */ - -int -pm_maxvaltobits( maxval ) - int maxval; - { - if ( maxval <= 1 ) - return 1; - else if ( maxval <= 3 ) - return 2; - else if ( maxval <= 7 ) - return 3; - else if ( maxval <= 15 ) - return 4; - else if ( maxval <= 31 ) - return 5; - else if ( maxval <= 63 ) - return 6; - else if ( maxval <= 127 ) - return 7; - else if ( maxval <= 255 ) - return 8; - else if ( maxval <= 511 ) - return 9; - else if ( maxval <= 1023 ) - return 10; - else if ( maxval <= 2047 ) - return 11; - else if ( maxval <= 4095 ) - return 12; - else if ( maxval <= 8191 ) - return 13; - else if ( maxval <= 16383 ) - return 14; - else if ( maxval <= 32767 ) - return 15; - else if ( (long) maxval <= 65535L ) - return 16; - else - pm_error( "maxval of %d is too large!", maxval ); - } - -int -pm_bitstomaxval( bits ) - int bits; - { - return ( 1 << bits ) - 1; - } - - -/* Initialization. */ - -static char* progname; -static int showmessages; - -void -pm_init( argcP, argv ) - int* argcP; - char* argv[]; - { - int argn, i; - - /* Extract program name. */ - progname = rindex( argv[0], '/'); - if ( progname == NULL ) - progname = argv[0]; - else - ++progname; - - /* Check for any global args. */ - showmessages = 1; - for ( argn = 1; argn < *argcP; ++argn ) - { - if ( pm_keymatch( argv[argn], "-quiet", 6 ) ) - { - showmessages = 0; - } - else if ( pm_keymatch( argv[argn], "-version", 7 ) ) - { - pm_message( "Version of %s", PBMPLUS_VERSION ); -#ifdef BSD - pm_message( "BSD defined" ); -#endif /*BSD*/ -#ifdef SYSV - pm_message( "SYSV defined" ); -#endif /*SYSV*/ -#ifdef MSDOS - pm_message( "MSDOS defined" ); -#endif /*MSDOS*/ -#ifdef PBMPLUS_RAWBITS - pm_message( "PBMPLUS_RAWBITS defined" ); -#endif /*PBMPLUS_RAWBITS*/ -#ifdef PBMPLUS_BROKENPUTC1 - pm_message( "PBMPLUS_BROKENPUTC1 defined" ); -#endif /*PBMPLUS_BROKENPUTC1*/ -#ifdef PBMPLUS_BROKENPUTC2 - pm_message( "PBMPLUS_BROKENPUTC2 defined" ); -#endif /*PBMPLUS_BROKENPUTC2*/ -#ifdef PGM_BIGGRAYS - pm_message( "PGM_BIGGRAYS defined" ); -#endif /*PGM_BIGGRAYS*/ -#ifdef PPM_PACKCOLORS - pm_message( "PPM_PACKCOLORS defined" ); -#endif /*PPM_PACKCOLORS*/ -#ifdef DEBUG - pm_message( "DEBUG defined" ); -#endif /*DEBUG*/ -#ifdef NEED_VFPRINTF1 - pm_message( "NEED_VFPRINTF1 defined" ); -#endif /*NEED_VFPRINTF1*/ -#ifdef NEED_VFPRINTF2 - pm_message( "NEED_VFPRINTF2 defined" ); -#endif /*NEED_VFPRINTF2*/ -#ifdef RGB_DB - pm_message( "RGB_DB=\"%s\"", RGB_DB ); -#endif /*RGB_DB*/ -#ifdef LIBTIFF - pm_message( "LIBTIFF defined" ); -#endif /*LIBTIFF*/ - exit( 0 ); - } - else - continue; - for ( i = argn + 1; i <= *argcP; ++i ) - argv[i - 1] = argv[i]; - --(*argcP); - } - } - -void -pbm_init( argcP, argv ) - int* argcP; - char* argv[]; - { - pm_init( argcP, argv ); - } - - -/* Error handling. */ - -void -pm_usage( usage ) - char* usage; - { - fprintf( stderr, "usage: %s %s\n", progname, usage ); - exit( 1 ); - } - -void -pm_perror( reason ) - char* reason; - { - extern int errno; - char* e; - - e = sys_errlist[errno]; - - if ( reason != 0 && reason[0] != '\0' ) - pm_error( "%s - %s", reason, e ); - else - pm_error( "%s", e ); - } - -#if __STDC__ -void -pm_message( char* format, ... ) - { - va_list args; - - va_start( args, format ); -#else /*__STDC__*/ -/*VARARGS1*/ -void -pm_message( va_alist ) - va_dcl - { /*}*/ - va_list args; - char* format; - - va_start( args ); - format = va_arg( args, char* ); -#endif /*__STDC__*/ - - if ( showmessages ) - { - fprintf( stderr, "%s: ", progname ); - (void) vfprintf( stderr, format, args ); - fputc( '\n', stderr ); - } - va_end( args ); - } - -#if __STDC__ -void -pm_error( char* format, ... ) - { - va_list args; - - va_start( args, format ); -#else /*__STDC__*/ -/*VARARGS1*/ -void -pm_error( va_alist ) - va_dcl - { /*}*/ - va_list args; - char* format; - - va_start( args ); - format = va_arg( args, char* ); -#endif /*__STDC__*/ - - fprintf( stderr, "%s: ", progname ); - (void) vfprintf( stderr, format, args ); - fputc( '\n', stderr ); - va_end( args ); - exit( 1 ); - } - -#ifdef NEED_VFPRINTF1 - -/* Micro-vfprintf, for systems that don't have vfprintf but do have _doprnt. -*/ - -int -vfprintf( stream, format, args ) - FILE* stream; - char* format; - va_list args; - { - return _doprnt( format, args, stream ); - } -#endif /*NEED_VFPRINTF1*/ - -#ifdef NEED_VFPRINTF2 - -/* Portable mini-vfprintf, for systems that don't have either vfprintf or -** _doprnt. This depends only on fprintf. If you don't have fprintf, -** you might consider getting a new stdio library. -*/ - -int -vfprintf( stream, format, args ) - FILE* stream; - char* format; - va_list args; - { - int n; - char* ep; - char fchar; - char tformat[512]; - int do_long; - int i; - long l; - unsigned u; - unsigned long ul; - char* s; - double d; - - n = 0; - while ( *format != '\0' ) - { - if ( *format != '%' ) - { /* Not special, just write out the char. */ - (void) putc( *format, stream ); - ++n; - ++format; - } - else - { - do_long = 0; - ep = format + 1; - - /* Skip over all the field width and precision junk. */ - if ( *ep == '-' ) - ++ep; - if ( *ep == '0' ) - ++ep; - while ( isdigit( *ep ) ) - ++ep; - if ( *ep == '.' ) - { - ++ep; - while ( isdigit( *ep ) ) - ++ep; - } - if ( *ep == '#' ) - ++ep; - if ( *ep == 'l' ) - { - do_long = 1; - ++ep; - } - - /* Here's the field type. Extract it, and copy this format - ** specifier to a temp string so we can add an end-of-string. - */ - fchar = *ep; - (void) strncpy( tformat, format, ep - format + 1 ); - tformat[ep - format + 1] = '\0'; - - /* Now do a one-argument fprintf with the format string we have - ** isolated. - */ - switch ( fchar ) - { - case 'd': - if ( do_long ) - { - l = va_arg( args, long ); - n += fprintf( stream, tformat, l ); - } - else - { - i = va_arg( args, int ); - n += fprintf( stream, tformat, i ); - } - break; - - case 'o': - case 'x': - case 'X': - case 'u': - if ( do_long ) - { - ul = va_arg( args, unsigned long ); - n += fprintf( stream, tformat, ul ); - } - else - { - u = va_arg( args, unsigned ); - n += fprintf( stream, tformat, u ); - } - break; - - case 'c': - i = (char) va_arg( args, int ); - n += fprintf( stream, tformat, i ); - break; - - case 's': - s = va_arg( args, char* ); - n += fprintf( stream, tformat, s ); - break; - - case 'e': - case 'E': - case 'f': - case 'g': - case 'G': - d = va_arg( args, double ); - n += fprintf( stream, tformat, d ); - break; - - case '%': - (void) putc( '%', stream ); - ++n; - break; - - default: - return -1; - } - - /* Resume formatting on the next character. */ - format = ep + 1; - } - } - return nc; - } -#endif /*NEED_VFPRINTF2*/ - - -/* File open/close that handles "-" as stdin and checks errors. */ - -FILE* -pm_openr( name ) - char* name; - { - FILE* f; - - if ( strcmp( name, "-" ) == 0 ) - f = stdin; - else - { -#ifdef MSDOS - f = fopen( name, "rb" ); -#else /*MSDOS*/ - f = fopen( name, "r" ); -#endif /*MSDOS*/ - if ( f == NULL ) - { - pm_perror( name ); - exit( 1 ); - } - } - return f; - } - -FILE* -pm_openw( name ) - char* name; - { - FILE* f; - -#ifdef MSDOS - f = fopen( name, "wb" ); -#else /*MSDOS*/ - f = fopen( name, "w" ); -#endif /*MSDOS*/ - if ( f == NULL ) - { - pm_perror( name ); - exit( 1 ); - } - return f; - } - -void -pm_close( f ) - FILE* f; - { - fflush( f ); - if ( ferror( f ) ) - pm_message( "a file read or write error occurred at some point" ); - if ( f != stdin ) - if ( fclose( f ) != 0 ) - pm_perror( "fclose" ); - } - -/* Endian I/O. -*/ - -int -pm_readbigshort( in, sP ) - FILE* in; - short* sP; - { - int c; - - if ( (c = getc( in )) == EOF ) - return -1; - *sP = ( c & 0xff ) << 8; - if ( (c = getc( in )) == EOF ) - return -1; - *sP |= c & 0xff; - return 0; - } - -#if __STDC__ -int -pm_writebigshort( FILE* out, short s ) -#else /*__STDC__*/ -int -pm_writebigshort( out, s ) - FILE* out; - short s; -#endif /*__STDC__*/ - { - (void) putc( ( s >> 8 ) & 0xff, out ); - (void) putc( s & 0xff, out ); - return 0; - } - -int -pm_readbiglong( in, lP ) - FILE* in; - long* lP; - { - int c; - - if ( (c = getc( in )) == EOF ) - return -1; - *lP = ( c & 0xff ) << 24; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 16; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 8; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= c & 0xff; - return 0; - } - -int -pm_writebiglong( out, l ) - FILE* out; - long l; - { - (void) putc( ( l >> 24 ) & 0xff, out ); - (void) putc( ( l >> 16 ) & 0xff, out ); - (void) putc( ( l >> 8 ) & 0xff, out ); - (void) putc( l & 0xff, out ); - return 0; - } - -int -pm_readlittleshort( in, sP ) - FILE* in; - short* sP; - { - int c; - - if ( (c = getc( in )) == EOF ) - return -1; - *sP = c & 0xff; - if ( (c = getc( in )) == EOF ) - return -1; - *sP |= ( c & 0xff ) << 8; - return 0; - } - -#if __STDC__ -int -pm_writelittleshort( FILE* out, short s ) -#else /*__STDC__*/ -int -pm_writelittleshort( out, s ) - FILE* out; - short s; -#endif /*__STDC__*/ - { - (void) putc( s & 0xff, out ); - (void) putc( ( s >> 8 ) & 0xff, out ); - return 0; - } - -int -pm_readlittlelong( in, lP ) - FILE* in; - long* lP; - { - int c; - - if ( (c = getc( in )) == EOF ) - return -1; - *lP = c & 0xff; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 8; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 16; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 24; - return 0; - } - -int -pm_writelittlelong( out, l ) - FILE* out; - long l; - { - (void) putc( l & 0xff, out ); - (void) putc( ( l >> 8 ) & 0xff, out ); - (void) putc( ( l >> 16 ) & 0xff, out ); - (void) putc( ( l >> 24 ) & 0xff, out ); - return 0; - } diff --git a/exsrc/src/pbmplus/pbmplus.h b/exsrc/src/pbmplus/pbmplus.h deleted file mode 100644 index 7f868c83c8..0000000000 --- a/exsrc/src/pbmplus/pbmplus.h +++ /dev/null @@ -1,192 +0,0 @@ -/* pbmplus.h - header file for PBM, PGM, PPM, and PNM -** -** Copyright (C) 1988, 1989, 1991 by Jef Poskanzer. -** -** Permission to use, copy, modify, and distribute this software and its -** documentation for any purpose and without fee is hereby granted, provided -** that the above copyright notice appear in all copies and that both that -** copyright notice and this permission notice appear in supporting -** documentation. This software is provided "as is" without express or -** implied warranty. -*/ - -#ifndef _PBMPLUS_H_ -#define _PBMPLUS_H_ - -#include -#include -#include - -#if defined(USG) || defined(SVR4) -#define SYSV -#endif -#if ! ( defined(BSD) || defined(SYSV) || defined(MSDOS) ) -/* CONFIGURE: If your system is >= 4.2BSD, set the BSD option; if you're a -** System V site, set the SYSV option; and if you're IBM-compatible, set -** MSDOS. If your compiler is ANSI C, you're probably better off setting -** SYSV - all it affects is string handling. -*/ -#define BSD -/* #define SYSV */ -/* #define MSDOS */ -#endif - -/* CONFIGURE: If you want to enable writing "raw" files, set this option. -** "Raw" files are smaller, and much faster to read and write, but you -** must have a filesystem that allows all 256 ASCII characters to be read -** and written. You will no longer be able to mail P?M files without -** using uuencode or the equivalent, or running the files through pnmnoraw. -** Note that reading "raw" files works whether writing is enabled or not. -*/ -#define PBMPLUS_RAWBITS - -/* CONFIGURE: PGM can store gray values as either bytes or shorts. For most -** applications, bytes will be big enough, and the memory savings can be -** substantial. However, if you need more than 8 bits of grayscale resolution, -** then define this symbol. -*/ -/* #define PGM_BIGGRAYS */ - -/* CONFIGURE: Normally, PPM handles a pixel as a struct of three grays. -** If grays are stored in bytes, that's 24 bits per color pixel; if -** grays are stored as shorts, that's 48 bits per color pixel. PPM -** can also be configured to pack the three grays into a single longword, -** 10 bits each, 30 bits per pixel. -** -** If you have configured PGM with the PGM_BIGGRAYS option, AND you don't -** need more than 10 bits for each color component, AND you care more about -** memory use than speed, then this option might be a win. Under these -** circumstances it will make some of the programs use 1.5 times less space, -** but all of the programs will run about 1.4 times slower. -** -** If you are not using PGM_BIGGRAYS, then this option is useless -- it -** doesn't save any space, but it still slows things down. -*/ -/* #define PPM_PACKCOLORS */ - -/* CONFIGURE: uncomment this to enable debugging checks. */ -/* #define DEBUG */ - -#ifdef SYSV - -#include -#define index(s,c) strchr(s,c) -#define rindex(s,c) strrchr(s,c) -#define srandom(s) srand(s) -#define random rand -#define bzero(dst,len) memset(dst,0,len) -#define bcopy(src,dst,len) memcpy(dst,src,len) -#define bcmp memcmp -extern void srand(); -extern int rand(); - -#else /*SYSV*/ - -#include -extern void srandom(); -extern long random(); - -#endif /*SYSV*/ - -extern int atoi(); -extern void exit(); -extern long time(); -extern int write(); - -/* CONFIGURE: On some systems, malloc.h doesn't declare these, so we have -** to do it. On other systems, for example HP/UX, it declares them -** incompatibly. And some systems, for example Dynix, don't have a -** malloc.h at all. A sad situation. If you have compilation problems -** that point here, feel free to tweak or remove these declarations. -*/ -#include -//extern char* malloc(); -//extern char* realloc(); -//extern char* calloc(); - -/* CONFIGURE: Some systems don't have vfprintf(), which we need for the -** error-reporting routines. If you compile and get a link error about -** this routine, uncomment the first define, which gives you a vfprintf -** that uses the theoretically non-portable but fairly common routine -** _doprnt(). If you then get a link error about _doprnt, or -** message-printing doesn't look like it's working, try the second -** define instead. -*/ -/* #define NEED_VFPRINTF1 */ -/* #define NEED_VFPRINTF2 */ - -/* End of configurable definitions. */ - - -#undef max -#define max(a,b) ((a) > (b) ? (a) : (b)) -#undef min -#define min(a,b) ((a) < (b) ? (a) : (b)) -#undef abs -#define abs(a) ((a) >= 0 ? (a) : -(a)) -#undef odd -#define odd(n) ((n) & 1) - - -/* Definitions to make PBMPLUS work with either ANSI C or C Classic. */ - -#if __STDC__ -#define ARGS(alist) alist -#else /*__STDC__*/ -#define ARGS(alist) () -#define const -#endif /*__STDC__*/ - - -/* Initialization. */ - -void pm_init ARGS(( int* argcP, char* argv[] )); - - -/* Variable-sized arrays definitions. */ - -char** pm_allocarray ARGS(( int cols, int rows, int size )); -char* pm_allocrow ARGS(( int cols, int size )); -void pm_freearray ARGS(( char** its, int rows )); -void pm_freerow ARGS(( char* itrow )); - - -/* Case-insensitive keyword matcher. */ - -int pm_keymatch ARGS(( char* str, char* keyword, int minchars )); - - -/* Log base two hacks. */ - -int pm_maxvaltobits ARGS(( int maxval )); -int pm_bitstomaxval ARGS(( int bits )); - - -/* Error handling definitions. */ - -void pm_message ARGS(( char*, ... )); -void pm_error ARGS(( char*, ... )); /* doesn't return */ -void pm_perror ARGS(( char* reason )); /* doesn't return */ -void pm_usage ARGS(( char* usage )); /* doesn't return */ - - -/* File open/close that handles "-" as stdin and checks errors. */ - -FILE* pm_openr ARGS(( char* name )); -FILE* pm_openw ARGS(( char* name )); -void pm_close ARGS(( FILE* f )); - - -/* Endian I/O. */ - -int pm_readbigshort ARGS(( FILE* in, short* sP )); -int pm_writebigshort ARGS(( FILE* out, short s )); -int pm_readbiglong ARGS(( FILE* in, long* lP )); -int pm_writebiglong ARGS(( FILE* out, long l )); -int pm_readlittleshort ARGS(( FILE* in, short* sP )); -int pm_writelittleshort ARGS(( FILE* out, short s )); -int pm_readlittlelong ARGS(( FILE* in, long* lP )); -int pm_writelittlelong ARGS(( FILE* out, long l )); - - -#endif /*_PBMPLUS_H_*/ diff --git a/exsrc/src/pbmplus/pnm/Makefile.in b/exsrc/src/pbmplus/pnm/Makefile.in deleted file mode 100644 index e14ff6d7b9..0000000000 --- a/exsrc/src/pbmplus/pnm/Makefile.in +++ /dev/null @@ -1,188 +0,0 @@ -# Makefile for pnm tools. -# -# Copyright (C) 1989, 1991 by Jef Poskanzer. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose and without fee is hereby granted, provided -# that the above copyright notice appear in all copies and that both that -# copyright notice and this permission notice appear in supporting -# documentation. This software is provided "as is" without express or -# implied warranty. - -# Default values, usually overridden by top-level Makefile. -#CC = cc -CC = gcc -ansi -pedantic -fcombine-regs -fpcc-struct-return -#CFLAGS = -O -CFLAGS = -g -w -#CFLAGS = -g -O -TIFFDEF = -DLIBTIFF -TIFFINC = -I@EXTERNALS@/include -TIFFLIB = @EXTERNALS@/lib/libtiff.a -TIFFBINARIES = tifftopnm pnmtotiff -TIFFOBJECTS = tifftopnm.o pnmtotiff.o -#LDFLAGS = -s -LDFLAGS = -INSTALLBINARIES = @EXTERNALS@/bin -INSTALLSCRIPTS = $(INSTALLBINARIES) -INSTALLMANUALS1 = @EXTERNALS@/man/mann -SUFFIXMANUALS1 = 1 -INSTALLMANUALS3 = @EXTERNALS@/man/mann -SUFFIXMANUALS3 = 3 -INSTALLMANUALS5 = @EXTERNALS@/man/mann -SUFFIXMANUALS5 = 5 -MANCP = cp - -PPMDIR = ../ppm -INCLUDEPPM = -I$(PPMDIR) -LIBPPM = $(PPMDIR)/libppm.a -DEFPPM = $(PPMDIR)/ppm.h -DEFLIBPPM = $(PPMDIR)/libppm.h - -PGMDIR = ../pgm -INCLUDEPGM = -I$(PGMDIR) -LIBPGM = $(PGMDIR)/libpgm.a -DEFPGM = $(PGMDIR)/pgm.h -DEFLIBPGM = $(PGMDIR)/libpgm.h - -PBMDIR = ../pbm -INCLUDEPBM = -I$(PBMDIR) -LIBPBM = $(PBMDIR)/libpbm.a -DEFPBM = $(PBMDIR)/pbm.h ../pbmplus.h -DEFLIBPBM = $(PBMDIR)/libpbm.h - -SHELL = /bin/sh -INCLUDE = -I.. $(INCLUDEPPM) $(INCLUDEPGM) $(INCLUDEPBM) -ALLCFLAGS = $(CFLAGS) $(INCLUDE) $(TIFFDEF) $(TIFFINC) -LIBPNM = libpnm.a - -PORTBINARIES = pnmarith pnmcat pnmconvol pnmcrop pnmcut \ - pnmdepth pnmenlarge pnmfile pnmflip pnminvert \ - pnmnoraw pnmpaste pnmscale pnmtile pnmtops \ - pnmtorast pnmtoxwd rasttopnm xwdtopnm -MATHBINARIES = pnmgamma pnmrotate pnmshear -BINARIES = $(PORTBINARIES) $(MATHBINARIES) $(TIFFBINARIES) -SCRIPTS = anytopnm pnmindex pnmmargin pnmsmooth - -PORTOBJECTS = pnmarith.o pnmcat.o pnmconvol.o pnmcrop.o pnmcut.o \ - pnmdepth.o pnmenlarge.o pnmfile.o pnmflip.o pnminvert.o \ - pnmnoraw.o pnmpaste.o pnmscale.o pnmtile.o pnmtops.o \ - pnmtorast.o pnmtoxwd.o rasttopnm.o xwdtopnm.o \ - pnmgamma.o pnmrotate.o pnmshear.o -OBJECTS = $(PORTOBJECTS) $(TIFFOBJECTS) - -MANUALS1 = $(BINARIES) $(SCRIPTS) -MANUALS3 = libpnm -MANUALS5 = pnm - - -#all: binaries -all: merge -#install: install.bin -install: install.merge - - -binaries: $(BINARIES) - -install.bin: binaries $(SCRIPTS) - cd $(INSTALLBINARIES) ; rm -f $(BINARIES) - cp $(BINARIES) $(INSTALLBINARIES) - cd $(INSTALLSCRIPTS) ; rm -f $(SCRIPTS) - cp $(SCRIPTS) $(INSTALLSCRIPTS) - cd $(INSTALLSCRIPTS) ; chmod +x $(SCRIPTS) - - -merge: pnmmerge -pnmmerge: pnmmerge.c $(OBJECTS) $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) - $(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c $(OBJECTS) -lm $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB) - -install.merge: install.pnmmerge $(SCRIPTS) -install.pnmmerge: pnmmerge - cd $(INSTALLBINARIES) ; rm -f $(BINARIES) - cp pnmmerge $(INSTALLBINARIES) - cd $(INSTALLBINARIES) ; for i in $(BINARIES) ; do ln pnmmerge $$i ; done - rm $(INSTALLBINARIES)/pnmmerge - cd $(INSTALLSCRIPTS) ; rm -f $(SCRIPTS) - cp $(SCRIPTS) $(INSTALLSCRIPTS) - cd $(INSTALLSCRIPTS) ; chmod +x $(SCRIPTS) - - -install.man: - for i in $(MANUALS1) ; do \ - rm -f $(INSTALLMANUALS1)/$$i.$(SUFFIXMANUALS1) ; \ - $(MANCP) $$i.1 $(INSTALLMANUALS1)/$$i.$(SUFFIXMANUALS1) ; \ - done - for i in $(MANUALS3) ; do \ - rm -f $(INSTALLMANUALS3)/$$i.$(SUFFIXMANUALS3) ; \ - $(MANCP) $$i.3 $(INSTALLMANUALS3)/$$i.$(SUFFIXMANUALS3) ; \ - done - for i in $(MANUALS5) ; do \ - rm -f $(INSTALLMANUALS5)/$$i.$(SUFFIXMANUALS5) ; \ - $(MANCP) $$i.5 $(INSTALLMANUALS5)/$$i.$(SUFFIXMANUALS5) ; \ - done - - -# Rules for plain programs. -$(PORTBINARIES) $(TIFFBINARIES): pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB) - $(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB) - -# Rule for math-dependent programs. -$(MATHBINARIES): pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) $(LIBPNM) \ - $(LIBPPM) $(LIBPGM) $(LIBPBM) - $(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c -lm $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) - -# Rule for objects. -$(OBJECTS): pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) - $(CC) $(ALLCFLAGS) "-Dmain=$*_main" -c $*.c - -# And libraries. -$(LIBPBM): - cd $(PBMDIR) ; make lib -$(LIBPGM) FOO: - cd $(PGMDIR) ; make lib -$(LIBPPM) BAR: - cd $(PPMDIR) ; make lib -lib: $(LIBPNM) -$(LIBPNM): libpnm1.o libpnm2.o libpnm3.o libpnm4.o - -rm -f $(LIBPNM) - ar rc $(LIBPNM) libpnm1.o libpnm2.o libpnm3.o libpnm4.o - -ranlib $(LIBPNM) - -libpnm1.o: pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm1.c - $(CC) $(ALLCFLAGS) -c libpnm1.c -libpnm2.o: pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm2.c $(DEFLIBPPM) \ - $(DEFLIBPGM) $(DEFLIBPBM) - $(CC) $(ALLCFLAGS) -c libpnm2.c -libpnm3.o: pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm3.c $(DEFLIBPPM) \ - $(DEFLIBPGM) $(DEFLIBPBM) - $(CC) $(ALLCFLAGS) -c libpnm3.c -libpnm4.o: pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) rast.h libpnm4.c - $(CC) $(ALLCFLAGS) -c libpnm4.c - -# Other dependencies. -pnmarith pnmarith.o: pnmarith.c -pnmcat pnmcat.o: pnmcat.c -pnmconvol pnmconvol.o: pnmconvol.c -pnmcrop pnmcrop.o: pnmcrop.c -pnmcut pnmcut.o: pnmcut.c -pnmdepth pnmdepth.o: pnmdepth.c -pnmenlarge pnmenlarge.o: pnmenlarge.c -pnmfile pnmfile.o: pnmfile.c -pnmflip pnmflip.o: pnmflip.c -pnmgamma pnmgamma.o: pnmgamma.c -pnminvert pnminvert.o: pnminvert.c -pnmnoraw pnmnoraw.o: pnmnoraw.c -pnmpaste pnmpaste.o: pnmpaste.c -pnmrotate pnmrotate.o: pnmrotate.c -pnmscale pnmscale.o: pnmscale.c -pnmshear pnmshear.o: pnmshear.c -pnmtile pnmtile.o: pnmtile.c -pnmtops pnmtops.o: pnmtops.c -pnmtorast pnmtorast.o: pnmtorast.c rast.h -pnmtotiff pnmtotiff.o: pnmtotiff.c -pnmtoxwd pnmtoxwd.o: pnmtoxwd.c x11wd.h -rasttopnm rasttopnm.o: rasttopnm.c rast.h -tifftopnm tifftopnm.o: tifftopnm.c -xwdtopnm xwdtopnm.o: xwdtopnm.c x10wd.h x11wd.h - -clean: - -rm -f *.o *.a *.cat core $(BINARIES) pnmmerge diff --git a/exsrc/src/png/pngconf.h b/exsrc/src/png/pngconf.h deleted file mode 100644 index e185438ca4..0000000000 --- a/exsrc/src/png/pngconf.h +++ /dev/null @@ -1,632 +0,0 @@ - -/* pngconf.h - machine configurable file for libpng - * - * libpng version 1.5.1 - February 3, 2011 - * - * Copyright (c) 1998-2011 Glenn Randers-Pehrson - * (Version 0.96 Copyright (c) 1996, 1997 Andreas Dilger) - * (Version 0.88 Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.) - * - * This code is released under the libpng license. - * For conditions of distribution and use, see the disclaimer - * and license in png.h - * - */ - -/* Any machine specific code is near the front of this file, so if you - * are configuring libpng for a machine, you may want to read the section - * starting here down to where it starts to typedef png_color, png_text, - * and png_info. - */ - -#ifndef PNGCONF_H -#define PNGCONF_H - -/* PNG_NO_LIMITS_H may be used to turn off the use of the standard C - * definition file for machine specific limits, this may impact the - * correctness of the definitons below (see uses of INT_MAX). - */ -#ifndef PNG_NO_LIMITS_H -# include -#endif - -/* For the memory copy APIs (i.e. the standard definitions of these), - * because this file defines png_memcpy and so on the base APIs must - * be defined here. - */ -#ifdef BSD -# include -#else -# include -#endif - -/* For png_FILE_p - this provides the standard definition of a - * FILE - */ -#ifdef PNG_STDIO_SUPPORTED -# include -#endif - -/* This controls optimization of the reading of 16 and 32 bit values - * from PNG files. It can be set on a per-app-file basis - it - * just changes whether a macro is used to the function is called. - * The library builder sets the default, if read functions are not - * built into the library the macro implementation is forced on. - */ -#ifndef PNG_READ_INT_FUNCTIONS_SUPPORTED -# define PNG_USE_READ_MACROS -#endif -#if !defined(PNG_NO_USE_READ_MACROS) && !defined(PNG_USE_READ_MACROS) -# if PNG_DEFAULT_READ_MACROS -# define PNG_USE_READ_MACROS -# endif -#endif - -/* COMPILER SPECIFIC OPTIONS. - * - * These options are provided so that a variety of difficult compilers - * can be used. Some are fixed at build time (e.g. PNG_API_RULE - * below) but still have compiler specific implementations, others - * may be changed on a per-file basis when compiling against libpng. - */ - -/* The PNGARG macro protects us against machines that don't have function - * prototypes (ie K&R style headers). If your compiler does not handle - * function prototypes, define this macro and use the included ansi2knr. - * I've always been able to use _NO_PROTO as the indicator, but you may - * need to drag the empty declaration out in front of here, or change the - * ifdef to suit your own needs. - */ -#ifndef PNGARG - -# ifdef OF /* zlib prototype munger */ -# define PNGARG(arglist) OF(arglist) -# else - -# ifdef _NO_PROTO -# define PNGARG(arglist) () -# else -# define PNGARG(arglist) arglist -# endif /* _NO_PROTO */ - -# endif /* OF */ - -#endif /* PNGARG */ - -/* Function calling conventions. - * ============================= - * Normally it is not necessary to specify to the compiler how to call - * a function - it just does it - however on x86 systems derived from - * Microsoft and Borland C compilers ('IBM PC', 'DOS', 'Windows' systems - * and some others) there are multiple ways to call a function and the - * default can be changed on the compiler command line. For this reason - * libpng specifies the calling convention of every exported function and - * every function called via a user supplied function pointer. This is - * done in this file by defining the following macros: - * - * PNGAPI Calling convention for exported functions. - * PNGCBAPI Calling convention for user provided (callback) functions. - * PNGCAPI Calling convention used by the ANSI-C library (required - * for longjmp callbacks and sometimes used internally to - * specify the calling convention for zlib). - * - * These macros should never be overridden. If it is necessary to - * change calling convention in a private build this can be done - * by setting PNG_API_RULE (which defaults to 0) to one of the values - * below to select the correct 'API' variants. - * - * PNG_API_RULE=0 Use PNGCAPI - the 'C' calling convention - throughout. - * This is correct in every known environment. - * PNG_API_RULE=1 Use the operating system convention for PNGAPI and - * the 'C' calling convention (from PNGCAPI) for - * callbacks (PNGCBAPI). This is no longer required - * in any known environment - if it has to be used - * please post an explanation of the problem to the - * libpng mailing list. - * - * These cases only differ if the operating system does not use the C - * calling convention, at present this just means the above cases - * (x86 DOS/Windows sytems) and, even then, this does not apply to - * Cygwin running on those systems. - * - * Note that the value must be defined in pnglibconf.h so that what - * the application uses to call the library matches the conventions - * set when building the library. - */ - -/* Symbol export - * ============= - * When building a shared library it is almost always necessary to tell - * the compiler which symbols to export. The png.h macro 'PNG_EXPORT' - * is used to mark the symbols. On some systems these symbols can be - * extracted at link time and need no special processing by the compiler, - * on other systems the symbols are flagged by the compiler and just - * the declaration requires a special tag applied (unfortunately) in a - * compiler dependent way. Some systems can do either. - * - * A small number of older systems also require a symbol from a DLL to - * be flagged to the program that calls it. This is a problem because - * we do not know in the header file included by application code that - * the symbol will come from a shared library, as opposed to a statically - * linked one. For this reason the application must tell us by setting - * the magic flag PNG_USE_DLL to turn on the special processing before - * it includes png.h. - * - * Four additional macros are used to make this happen: - * - * PNG_IMPEXP The magic (if any) to cause a symbol to be exported from - * the build or imported if PNG_USE_DLL is set - compiler - * and system specific. - * - * PNG_EXPORT_TYPE(type) A macro that pre or appends PNG_IMPEXP to - * 'type', compiler specific. - * - * PNG_DLL_EXPORT Set to the magic to use during a libpng build to - * make a symbol exported from the DLL. - * - * PNG_DLL_IMPORT Set to the magic to force the libpng symbols to come - * from a DLL - used to define PNG_IMPEXP when - * PNG_USE_DLL is set. - */ - -/* System specific discovery. - * ========================== - * This code is used at build time to find PNG_IMPEXP, the API settings - * and PNG_EXPORT_TYPE(), it may also set a macro to indicate the DLL - * import processing is possible. On Windows/x86 systems it also sets - * compiler-specific macros to the values required to change the calling - * conventions of the various functions. - */ -#if ( defined(_Windows) || defined(_WINDOWS) || defined(WIN32) ||\ - defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__) ) &&\ - ( defined(_X86_) || defined(_X64_) || defined(_M_IX86) ||\ - defined(_M_X64) || defined(_M_IA64) ) - /* Windows system (DOS doesn't support DLLs) running on x86/x64. Includes - * builds under Cygwin or MinGW. Also includes Watcom builds but these need - * special treatment because they are not compatible with GCC or Visual C - * because of different calling conventions. - */ -# if PNG_API_RULE == 2 - /* If this line results in an error, either because __watcall is not - * understood or because of a redefine just below you cannot use *this* - * build of the library with the compiler you are using. *This* build was - * build using Watcom and applications must also be built using Watcom! - */ -# define PNGCAPI __watcall -# endif - -# if defined(__GNUC__) || (defined (_MSC_VER) && (_MSC_VER >= 800)) -# define PNGCAPI __cdecl -# if PNG_API_RULE == 1 -# define PNGAPI __stdcall -# endif -# else - /* An older compiler, or one not detected (erroneously) above, - * if necessary override on the command line to get the correct - * variants for the compiler. - */ -# ifndef PNGCAPI -# define PNGCAPI _cdecl -# endif -# if PNG_API_RULE == 1 && !defined(PNGAPI) -# define PNGAPI _stdcall -# endif -# endif /* compiler/api */ - /* NOTE: PNGCBAPI always defaults to PNGCAPI. */ - -# if defined(PNGAPI) && !defined(PNG_USER_PRIVATEBUILD) - ERROR: PNG_USER_PRIVATEBUILD must be defined if PNGAPI is changed -# endif - -# if (defined(_MSC_VER) && _MSC_VER < 800) ||\ - (defined(__BORLANDC__) && __BORLANDC__ < 0x500) - /* older Borland and MSC - * compilers used '__export' and required this to be after - * the type. - */ -# ifndef PNG_EXPORT_TYPE -# define PNG_EXPORT_TYPE(type) type PNG_IMPEXP -# endif -# define PNG_DLL_EXPORT __export -# else /* newer compiler */ -# define PNG_DLL_EXPORT __declspec(dllexport) -# ifndef PNG_DLL_IMPORT -# define PNG_DLL_IMPORT __declspec(dllimport) -# endif -# endif /* compiler */ - -#else /* !Windows/x86 */ -# if (defined(__IBMC__) || defined(__IBMCPP__)) && defined(__OS2__) -# define PNGAPI _System -# else /* !Windows/x86 && !OS/2 */ - /* Use the defaults, or define PNG*API on the command line (but - * this will have to be done for every compile!) - */ -# endif /* other system, !OS/2 */ -#endif /* !Windows/x86 */ - -/* Now do all the defaulting . */ -#ifndef PNGCAPI -# define PNGCAPI -#endif -#ifndef PNGCBAPI -# define PNGCBAPI PNGCAPI -#endif -#ifndef PNGAPI -# define PNGAPI PNGCAPI -#endif - -/* The default for PNG_IMPEXP depends on whether the library is - * being built or used. - */ -#ifndef PNG_IMPEXP -# ifdef PNGLIB_BUILD - /* Building the library */ -# if (defined(DLL_EXPORT)/*from libtool*/ ||\ - defined(_WINDLL) || defined(_DLL) || defined(__DLL__) ||\ - defined(_USRDLL) ||\ - defined(PNG_BUILD_DLL)) && defined(PNG_DLL_EXPORT) - /* Building a DLL. */ -# define PNG_IMPEXP PNG_DLL_EXPORT -# endif /* DLL */ -# else - /* Using the library */ -# if defined(PNG_USE_DLL) && defined(PNG_DLL_IMPORT) - /* This forces use of a DLL, disallowing static linking */ -# define PNG_IMPEXP PNG_DLL_IMPORT -# endif -# endif - -# ifndef PNG_IMPEXP -# define PNG_IMPEXP -# endif -#endif - -/* THe following complexity is concerned with getting the 'attributes' of the - * declared function in the correct place. This potentially requires a separate - * PNG_EXPORT function for every compiler. - */ -#ifndef PNG_FUNCTION -# if defined (__GNUC__) && !defined(__clang__) -# define PNG_FUNCTION(type, name, args, attributes)\ - attributes type name args -# else /* !GNUC */ -# ifdef _MSC_VER -# define PNG_FUNCTION(type, name, args, attributes)\ - attributes type name args -# else /* !MSC */ -# define PNG_FUNCTION(type, name, args, attributes)\ - type name args -# endif -# endif -#endif - -#ifndef PNG_EXPORT_TYPE -# define PNG_EXPORT_TYPE(type) PNG_IMPEXP type -#endif - - /* The ordinal value is only relevant when preprocessing png.h for symbol - * table entries, so we discard it here. See the .dfn files in the - * scripts directory. - */ -#ifndef PNG_EXPORTA -# define PNG_EXPORTA(ordinal, type, name, args, attributes)\ - extern PNG_FUNCTION(PNG_EXPORT_TYPE(type),(PNGAPI name),PNGARG(args),\ - attributes) -#endif - -#define PNG_EXPORT(ordinal, type, name, args)\ - PNG_EXPORTA(ordinal, type, name, args, ) - -/* Use PNG_REMOVED to comment out a removed interface. */ -#ifndef PNG_REMOVED -# define PNG_REMOVED(ordinal, type, name, args, attributes) -#endif - -#ifndef PNG_CALLBACK -# define PNG_CALLBACK(type, name, args, attributes)\ - type (PNGCBAPI name) PNGARG(args) attributes -#endif - -/* Support for compiler specific function attributes. These are used - * so that where compiler support is available incorrect use of API - * functions in png.h will generate compiler warnings. - * - * Added at libpng-1.2.41. - */ - -#ifndef PNG_NO_PEDANTIC_WARNINGS -# ifndef PNG_PEDANTIC_WARNINGS_SUPPORTED -# define PNG_PEDANTIC_WARNINGS_SUPPORTED -# endif -#endif - -#ifdef PNG_PEDANTIC_WARNINGS_SUPPORTED - /* Support for compiler specific function attributes. These are used - * so that where compiler support is available incorrect use of API - * functions in png.h will generate compiler warnings. Added at libpng - * version 1.2.41. - */ -# if defined (__GNUC__) && !defined(__clang__) -# ifndef PNG_USE_RESULT -# define PNG_USE_RESULT __attribute__((__warn_unused_result__)) -# endif -# ifndef PNG_NORETURN -# define PNG_NORETURN __attribute__((__noreturn__)) -# endif -# ifndef PNG_PTR_NORETURN -# define PNG_PTR_NORETURN __attribute__((__noreturn__)) -# endif -# ifndef PNG_ALLOCATED -# define PNG_ALLOCATED __attribute__((__malloc__)) -# endif - - /* This specifically protects structure members that should only be - * accessed from within the library, therefore should be empty during - * a library build. - */ -# ifndef PNGLIB_BUILD -# ifndef PNG_DEPRECATED -# define PNG_DEPRECATED __attribute__((__deprecated__)) -# endif -# ifndef PNG_DEPSTRUCT -# define PNG_DEPSTRUCT __attribute__((__deprecated__)) -# endif -# ifndef PNG_PRIVATE -# if 0 /* Doesn't work so we use deprecated instead*/ -# define PNG_PRIVATE \ - __attribute__((warning("This function is not exported by libpng."))) -# else -# define PNG_PRIVATE \ - __attribute__((__deprecated__)) -# endif -# endif /* PNG_PRIVATE */ -# endif /* PNGLIB_BUILD */ -# endif /* __GNUC__ */ -# ifdef _MSC_VER /* may need to check value */ -# ifndef PNG_USE_RESULT -# define PNG_USE_RESULT /*not supported*/ -# endif -# ifndef PNG_NORETURN -# define PNG_NORETURN __declspec(noreturn) -# endif -# ifndef PNG_PTR_NORETURN -# define PNG_PTR_NORETURN /*not supported*/ -# endif -# ifndef PNG_ALLOCATED -# define PNG_ALLOCATED __declspec(restrict) -# endif - - /* This specifically protects structure members that should only be - * accessed from within the library, therefore should be empty during - * a library build. - */ -# ifndef PNGLIB_BUILD -# ifndef PNG_DEPRECATED -# define PNG_DEPRECATED __declspec(deprecated) -# endif -# ifndef PNG_DEPSTRUCT -# define PNG_DEPSTRUCT __declspec(deprecated) -# endif -# ifndef PNG_PRIVATE -# define PNG_PRIVATE __declspec(deprecated) -# endif /* PNG_PRIVATE */ -# endif /* PNGLIB_BUILD */ -# endif /* __GNUC__ */ -#endif /* PNG_PEDANTIC_WARNINGS */ - -#ifndef PNG_DEPRECATED -# define PNG_DEPRECATED /* Use of this function is deprecated */ -#endif -#ifndef PNG_USE_RESULT -# define PNG_USE_RESULT /* The result of this function must be checked */ -#endif -#ifndef PNG_NORETURN -# define PNG_NORETURN /* This function does not return */ -#endif -#ifndef PNG_ALLOCATED -# define PNG_ALLOCATED /* The result of the function is new memory */ -#endif -#ifndef PNG_DEPSTRUCT -# define PNG_DEPSTRUCT /* Access to this struct member is deprecated */ -#endif -#ifndef PNG_PRIVATE -# define PNG_PRIVATE /* This is a private libpng function */ -#endif -#ifndef PNG_FP_EXPORT /* A floating point API. */ -# ifdef PNG_FLOATING_POINT_SUPPORTED -# define PNG_FP_EXPORT(ordinal, type, name, args)\ - PNG_EXPORT(ordinal, type, name, args) -# else /* No floating point APIs */ -# define PNG_FP_EXPORT(ordinal, type, name, args) -# endif -#endif -#ifndef PNG_FIXED_EXPORT /* A fixed point API. */ -# ifdef PNG_FIXED_POINT_SUPPORTED -# define PNG_FIXED_EXPORT(ordinal, type, name, args)\ - PNG_EXPORT(ordinal, type, name, args) -# else /* No fixed point APIs */ -# define PNG_FIXED_EXPORT(ordinal, type, name, args) -# endif -#endif - -/* The following uses const char * instead of char * for error - * and warning message functions, so some compilers won't complain. - * If you do not want to use const, define PNG_NO_CONST here. - * - * This should not change how the APIs are called, so it can be done - * on a per-file basis in the application. - */ -#ifndef PNG_CONST -# ifndef PNG_NO_CONST -# define PNG_CONST const -# else -# define PNG_CONST -# endif -#endif - -/* Some typedefs to get us started. These should be safe on most of the - * common platforms. The typedefs should be at least as large as the - * numbers suggest (a png_uint_32 must be at least 32 bits long), but they - * don't have to be exactly that size. Some compilers dislike passing - * unsigned shorts as function parameters, so you may be better off using - * unsigned int for png_uint_16. - */ - -#if defined(INT_MAX) && (INT_MAX > 0x7ffffffeL) -typedef unsigned int png_uint_32; -typedef int png_int_32; -#else -typedef unsigned long png_uint_32; -typedef long png_int_32; -#endif -typedef unsigned short png_uint_16; -typedef short png_int_16; -typedef unsigned char png_byte; - -#ifdef PNG_NO_SIZE_T -typedef unsigned int png_size_t; -#else -typedef size_t png_size_t; -#endif -#define png_sizeof(x) (sizeof (x)) - -/* The following is needed for medium model support. It cannot be in the - * pngpriv.h header. Needs modification for other compilers besides - * MSC. Model independent support declares all arrays and pointers to be - * large using the far keyword. The zlib version used must also support - * model independent data. As of version zlib 1.0.4, the necessary changes - * have been made in zlib. The USE_FAR_KEYWORD define triggers other - * changes that are needed. (Tim Wegner) - */ - -/* Separate compiler dependencies (problem here is that zlib.h always - * defines FAR. (SJT) - */ -#ifdef __BORLANDC__ -# if defined(__LARGE__) || defined(__HUGE__) || defined(__COMPACT__) -# define LDATA 1 -# else -# define LDATA 0 -# endif - /* GRR: why is Cygwin in here? Cygwin is not Borland C... */ -# if !defined(__WIN32__) && !defined(__FLAT__) && !defined(__CYGWIN__) -# define PNG_MAX_MALLOC_64K /* only used in build */ -# if (LDATA != 1) -# ifndef FAR -# define FAR __far -# endif -# define USE_FAR_KEYWORD -# endif /* LDATA != 1 */ - /* Possibly useful for moving data out of default segment. - * Uncomment it if you want. Could also define FARDATA as - * const if your compiler supports it. (SJT) -# define FARDATA FAR - */ -# endif /* __WIN32__, __FLAT__, __CYGWIN__ */ -#endif /* __BORLANDC__ */ - - -/* Suggest testing for specific compiler first before testing for - * FAR. The Watcom compiler defines both __MEDIUM__ and M_I86MM, - * making reliance oncertain keywords suspect. (SJT) - */ - -/* MSC Medium model */ -#ifdef FAR -# ifdef M_I86MM -# define USE_FAR_KEYWORD -# define FARDATA FAR -# include -# endif -#endif - -/* SJT: default case */ -#ifndef FAR -# define FAR -#endif - -/* At this point FAR is always defined */ -#ifndef FARDATA -# define FARDATA -#endif - -/* Typedef for floating-point numbers that are converted - * to fixed-point with a multiple of 100,000, e.g., gamma - */ -typedef png_int_32 png_fixed_point; - -/* Add typedefs for pointers */ -typedef void FAR * png_voidp; -typedef PNG_CONST void FAR * png_const_voidp; -typedef png_byte FAR * png_bytep; -typedef PNG_CONST png_byte FAR * png_const_bytep; -typedef png_uint_32 FAR * png_uint_32p; -typedef PNG_CONST png_uint_32 FAR * png_const_uint_32p; -typedef png_int_32 FAR * png_int_32p; -typedef PNG_CONST png_int_32 FAR * png_const_int_32p; -typedef png_uint_16 FAR * png_uint_16p; -typedef PNG_CONST png_uint_16 FAR * png_const_uint_16p; -typedef png_int_16 FAR * png_int_16p; -typedef PNG_CONST png_int_16 FAR * png_const_int_16p; -typedef char FAR * png_charp; -typedef PNG_CONST char FAR * png_const_charp; -typedef png_fixed_point FAR * png_fixed_point_p; -typedef PNG_CONST png_fixed_point FAR * png_const_fixed_point_p; -typedef png_size_t FAR * png_size_tp; -typedef PNG_CONST png_size_t FAR * png_const_size_tp; - -#ifdef PNG_STDIO_SUPPORTED -typedef FILE * png_FILE_p; -#endif - -#ifdef PNG_FLOATING_POINT_SUPPORTED -typedef double FAR * png_doublep; -typedef PNG_CONST double FAR * png_const_doublep; -#endif - -/* Pointers to pointers; i.e. arrays */ -typedef png_byte FAR * FAR * png_bytepp; -typedef png_uint_32 FAR * FAR * png_uint_32pp; -typedef png_int_32 FAR * FAR * png_int_32pp; -typedef png_uint_16 FAR * FAR * png_uint_16pp; -typedef png_int_16 FAR * FAR * png_int_16pp; -typedef PNG_CONST char FAR * FAR * png_const_charpp; -typedef char FAR * FAR * png_charpp; -typedef png_fixed_point FAR * FAR * png_fixed_point_pp; -#ifdef PNG_FLOATING_POINT_SUPPORTED -typedef double FAR * FAR * png_doublepp; -#endif - -/* Pointers to pointers to pointers; i.e., pointer to array */ -typedef char FAR * FAR * FAR * png_charppp; - -/* png_alloc_size_t is guaranteed to be no smaller than png_size_t, - * and no smaller than png_uint_32. Casts from png_size_t or png_uint_32 - * to png_alloc_size_t are not necessary; in fact, it is recommended - * not to use them at all so that the compiler can complain when something - * turns out to be problematic. - * Casts in the other direction (from png_alloc_size_t to png_size_t or - * png_uint_32) should be explicitly applied; however, we do not expect - * to encounter practical situations that require such conversions. - */ -#if defined(__TURBOC__) && !defined(__FLAT__) - typedef unsigned long png_alloc_size_t; -#else -# if defined(_MSC_VER) && defined(MAXSEG_64K) - typedef unsigned long png_alloc_size_t; -# else - /* This is an attempt to detect an old Windows system where (int) is - * actually 16 bits, in that case png_malloc must have an argument with a - * bigger size to accomodate the requirements of the library. - */ -# if (defined(_Windows) || defined(_WINDOWS) || defined(_WINDOWS_)) && \ - (!defined(INT_MAX) || INT_MAX <= 0x7ffffffeL) - typedef DWORD png_alloc_size_t; -# else - typedef png_size_t png_alloc_size_t; -# endif -# endif -#endif - -#endif /* PNGCONF_H */ diff --git a/exsrc/src/readline/shobj-conf b/exsrc/src/readline/shobj-conf deleted file mode 100644 index 663869a819..0000000000 --- a/exsrc/src/readline/shobj-conf +++ /dev/null @@ -1,579 +0,0 @@ -#! /bin/sh -# -# shobj-conf -- output a series of variable assignments to be substituted -# into a Makefile by configure which specify system-dependent -# information for creating shared objects that may be loaded -# into bash with `enable -f' -# -# usage: shobj-conf [-C compiler] -c host_cpu -o host_os -v host_vendor -# -# Chet Ramey -# chet@po.cwru.edu - -# Copyright (C) 1996-2009 Free Software Foundation, Inc. -# -# This file is part of GNU Bash, the Bourne Again SHell. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -# -# defaults -# -SHOBJ_STATUS=supported -SHLIB_STATUS=supported - -SHOBJ_CC=cc -SHOBJ_CFLAGS= -SHOBJ_LD= -SHOBJ_LDFLAGS= -SHOBJ_XLDFLAGS= -SHOBJ_LIBS= - -SHLIB_XLDFLAGS= -SHLIB_LIBS='-ltermcap' - -SHLIB_DOT='.' -SHLIB_LIBPREF='lib' -SHLIB_LIBSUFF='so' - -SHLIB_LIBVERSION='$(SHLIB_LIBSUFF)' -SHLIB_DLLVERSION='$(SHLIB_MAJOR)' - -PROGNAME=`basename $0` -USAGE="$PROGNAME [-C compiler] -c host_cpu -o host_os -v host_vendor" - -while [ $# -gt 0 ]; do - case "$1" in - -C) shift; SHOBJ_CC="$1"; shift ;; - -c) shift; host_cpu="$1"; shift ;; - -o) shift; host_os="$1"; shift ;; - -v) shift; host_vendor="$1"; shift ;; - *) echo "$USAGE" >&2 ; exit 2;; - esac -done - -case "${host_os}-${SHOBJ_CC}-${host_vendor}" in -sunos4*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD=/usr/bin/ld - SHOBJ_LDFLAGS='-assert pure-text' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -sunos4*) - SHOBJ_CFLAGS=-pic - SHOBJ_LD=/usr/bin/ld - SHOBJ_LDFLAGS='-assert pure-text' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -sunos5*-*gcc*|solaris2*-*gcc*) - SHOBJ_LD='${CC}' - ld_used=`gcc -print-prog-name=ld` - if ${ld_used} -V 2>&1 | grep GNU >/dev/null 2>&1; then - # This line works for the GNU ld - SHOBJ_LDFLAGS='-shared -Wl,-h,$@' - # http://sourceware.org/ml/binutils/2001-08/msg00361.html - SHOBJ_CFLAGS=-fPIC - else - # This line works for the Solaris linker in /usr/ccs/bin/ld - SHOBJ_LDFLAGS='-shared -Wl,-i -Wl,-h,$@' - SHOBJ_CFLAGS=-fpic - fi - -# SHLIB_XLDFLAGS='-R $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sunos5*|solaris2*) - SHOBJ_CFLAGS='-K pic' - SHOBJ_LD=/usr/ccs/bin/ld - SHOBJ_LDFLAGS='-G -dy -z text -i -h $@' - -# SHLIB_XLDFLAGS='-R $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -# All versions of Linux (including Gentoo/FreeBSD) or the semi-mythical GNU Hurd. -linux*-*|gnu*-*|k*bsd*-gnu-*|freebsd*-gentoo) - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir) -Wl,-soname,`basename $@ $(SHLIB_MINOR)`' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -freebsd2*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-x -Bshareable' - - SHLIB_XLDFLAGS='-R$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -# FreeBSD-3.x ELF -freebsd3*|freebsdaout*) - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - - if [ -x /usr/bin/objformat ] && [ "`/usr/bin/objformat`" = "elf" ]; then - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - else - SHOBJ_LDFLAGS='-shared' - - SHLIB_XLDFLAGS='-R$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - fi - ;; - -# FreeBSD-4.x and later have only ELF -freebsd[4-9]*|freebsdelf*|dragonfly*) - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -# Darwin/MacOS X -darwin[89]*|darwin10*) - SHOBJ_STATUS=supported - SHLIB_STATUS=supported - - SHOBJ_CFLAGS='-fno-common' - - SHOBJ_LD='MACOSX_DEPLOYMENT_TARGET=10.3 ${CC}' - - SHLIB_LIBVERSION='$(SHLIB_MAJOR)$(SHLIB_MINOR).$(SHLIB_LIBSUFF)' - SHLIB_LIBSUFF='dylib' - - SHOBJ_LDFLAGS='-dynamiclib -dynamic -undefined dynamic_lookup -arch_only `/usr/bin/arch`' - SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v' - - SHLIB_LIBS='-lncurses' # see if -lcurses works on MacOS X 10.1 - ;; - -darwin*|macosx*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=supported - - SHOBJ_CFLAGS='-fno-common' - - SHOBJ_LD='${CC}' - - SHLIB_LIBVERSION='$(SHLIB_MAJOR)$(SHLIB_MINOR).$(SHLIB_LIBSUFF)' - SHLIB_LIBSUFF='dylib' - - case "${host_os}" in - darwin[789]*|darwin10*) SHOBJ_LDFLAGS='' - SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v' - ;; - *) SHOBJ_LDFLAGS='-dynamic' - SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v' - ;; - esac - - SHLIB_LIBS='-lncurses' # see if -lcurses works on MacOS X 10.1 - ;; - -openbsd*|netbsd*) - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_XLDFLAGS='-R$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -bsdi2*) - SHOBJ_CC=shlicc2 - SHOBJ_CFLAGS= - SHOBJ_LD=ld - SHOBJ_LDFLAGS=-r - SHOBJ_LIBS=-lc_s.2.1.0 - - # BSD/OS 2.x and 3.x `shared libraries' are too much of a pain in - # the ass -- they require changing {/usr/lib,etc}/shlib.map on - # each system, and the library creation process is byzantine - SHLIB_STATUS=unsupported - ;; - -bsdi3*) - SHOBJ_CC=shlicc2 - SHOBJ_CFLAGS= - SHOBJ_LD=ld - SHOBJ_LDFLAGS=-r - SHOBJ_LIBS=-lc_s.3.0.0 - - # BSD/OS 2.x and 3.x `shared libraries' are too much of a pain in - # the ass -- they require changing {/usr/lib,etc}/shlib.map on - # each system, and the library creation process is byzantine - SHLIB_STATUS=unsupported - ;; - -bsdi4*) - # BSD/OS 4.x now supports ELF and SunOS-style dynamically-linked - # shared libraries. gcc 2.x is the standard compiler, and the - # `normal' gcc options should work as they do in Linux. - - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-Wl,-soname,`basename $@ $(SHLIB_MINOR)`' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -osf*-*gcc*) - # Fix to use gcc linker driver from bfischer@TechFak.Uni-Bielefeld.DE - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-rpath $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -osf*) - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-shared -soname $@ -expect_unresolved "*"' - - SHLIB_XLDFLAGS='-rpath $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -aix4.[2-9]*-*gcc*|aix[5-9].*-*gcc*) # lightly tested by jik@cisco.com - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='ld' - SHOBJ_LDFLAGS='-bdynamic -bnoentry -bexpall' - SHOBJ_XLDFLAGS='-G' - - SHLIB_XLDFLAGS='-bM:SRE' - SHLIB_LIBS='-lcurses -lc' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -aix4.[2-9]*|aix[5-9].*) - SHOBJ_CFLAGS=-K - SHOBJ_LD='ld' - SHOBJ_LDFLAGS='-bdynamic -bnoentry -bexpall' - SHOBJ_XLDFLAGS='-G' - - SHLIB_XLDFLAGS='-bM:SRE' - SHLIB_LIBS='-lcurses -lc' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -# -# THE FOLLOWING ARE UNTESTED -- and some may not support the dlopen interface -# -irix[56]*-*gcc*) - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -irix[56]*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld -# SHOBJ_LDFLAGS='-call_shared -hidden_symbol -no_unresolved -soname $@' -# Change from David Kaelbling . If you have problems, -# remove the `-no_unresolved' - SHOBJ_LDFLAGS='-shared -no_unresolved -soname $@' - - SHLIB_XLDFLAGS='-rpath $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -hpux9*-*gcc*) - # must use gcc; the bundled cc cannot compile PIC code - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-b -Wl,+s' - - SHLIB_XLDFLAGS='-Wl,+b,$(libdir)' - SHLIB_LIBSUFF='sl' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -hpux9*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - - # If you are using the HP ANSI C compiler, you can uncomment and use - # this code (I have not tested it) -# SHOBJ_STATUS=supported -# SHLIB_STATUS=supported -# -# SHOBJ_CFLAGS='+z' -# SHOBJ_LD='ld' -# SHOBJ_LDFLAGS='-b +s' -# -# SHLIB_XLDFLAGS='+b $(libdir)' -# SHLIB_LIBSUFF='sl' -# SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - - ;; - -hpux10*-*gcc*) - # must use gcc; the bundled cc cannot compile PIC code - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' - # if you have problems linking here, moving the `-Wl,+h,$@' from - # SHLIB_XLDFLAGS to SHOBJ_LDFLAGS has been reported to work - SHOBJ_LDFLAGS='-shared -fpic -Wl,-b -Wl,+s' - - SHLIB_XLDFLAGS='-Wl,+h,$@ -Wl,+b,$(libdir)' - SHLIB_LIBSUFF='sl' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -hpux10*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - - # If you are using the HP ANSI C compiler, you can uncomment and use - # this code (I have not tested it) -# SHOBJ_STATUS=supported -# SHLIB_STATUS=supported -# -# SHOBJ_CFLAGS='+z' -# SHOBJ_LD='ld' -# SHOBJ_LDFLAGS='-b +s +h $@' -# -# SHLIB_XLDFLAGS='+b $(libdir)' -# SHLIB_LIBSUFF='sl' -# SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - - ;; - -hpux11*-*gcc*) - # must use gcc; the bundled cc cannot compile PIC code - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' -# SHOBJ_LDFLAGS='-shared -Wl,-b -Wl,-B,symbolic -Wl,+s -Wl,+std -Wl,+h,$@' - SHOBJ_LDFLAGS='-shared -fpic -Wl,-b -Wl,+s -Wl,+h,$@' - - SHLIB_XLDFLAGS='-Wl,+b,$(libdir)' - SHLIB_LIBSUFF='sl' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -hpux11*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - - # If you are using the HP ANSI C compiler, you can uncomment and use - # this code (I have not tested it) -# SHOBJ_STATUS=supported -# SHLIB_STATUS=supported -# -# SHOBJ_CFLAGS='+z' -# SHOBJ_LD='ld' -# SHOBJ_LDFLAGS='-b +s +h $@' -# -# SHLIB_XLDFLAGS='+b $(libdir)' -# SHLIB_LIBSUFF='sl' -# SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - - ;; - -sysv4*-*gcc*) - SHOBJ_CFLAGS=-shared - SHOBJ_LDFLAGS='-shared -h $@' - SHOBJ_LD='${CC}' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv4*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-dy -z text -G -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sco3.2v5*-*gcc*) - SHOBJ_CFLAGS='-fpic' # DEFAULTS TO ELF - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sco3.2v5*) - SHOBJ_CFLAGS='-K pic -b elf' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -b elf -dy -z text -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5uw7*-*gcc*) - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5uw7*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -dy -z text -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5UnixWare*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5UnixWare*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -dy -z text -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5OpenUNIX*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5OpenUNIX*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -dy -z text -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -dgux*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -dgux*) - SHOBJ_CFLAGS='-K pic' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -dy -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -msdos*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - ;; - -cygwin*) - SHOBJ_LD='$(CC)' - SHOBJ_LDFLAGS='-shared -Wl,--enable-auto-import -Wl,--enable-auto-image-base -Wl,--export-all -Wl,--out-implib=$(@).a' - SHLIB_LIBPREF='cyg' - SHLIB_LIBSUFF='dll' - SHLIB_LIBVERSION='$(SHLIB_DLLVERSION).$(SHLIB_LIBSUFF)' - SHLIB_LIBS='$(TERMCAP_LIB)' - - SHLIB_DOT= - # For official cygwin releases, DLLVERSION will be defined in the - # environment of configure, and will be incremented any time the API - # changes in a non-backwards compatible manner. Otherwise, it is just - # SHLIB_MAJOR. - if [ -n "$DLLVERSION" ] ; then - SHLIB_DLLVERSION="$DLLVERSION" - fi - ;; - -mingw*) - SHOBJ_LD='$(CC)' - SHOBJ_LDFLAGS='-shared -Wl,--enable-auto-import -Wl,--enable-auto-image-base -Wl,--export-all -Wl,--out-implib=$(@).a' - SHLIB_LIBSUFF='dll' - SHLIB_LIBVERSION='$(SHLIB_DLLVERSION).$(SHLIB_LIBSUFF)' - SHLIB_LIBS='$(TERMCAP_LIB)' - - SHLIB_DOT= - # For official cygwin releases, DLLVERSION will be defined in the - # environment of configure, and will be incremented any time the API - # changes in a non-backwards compatible manner. Otherwise, it is just - # SHLIB_MAJOR. - if [ -n "$DLLVERSION" ] ; then - SHLIB_DLLVERSION="$DLLVERSION" - fi - ;; - -# -# Rely on correct gcc configuration for everything else -# -*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - ;; - -esac - -echo SHOBJ_CC=\'"$SHOBJ_CC"\' -echo SHOBJ_CFLAGS=\'"$SHOBJ_CFLAGS"\' -echo SHOBJ_LD=\'"$SHOBJ_LD"\' -echo SHOBJ_LDFLAGS=\'"$SHOBJ_LDFLAGS"\' -echo SHOBJ_XLDFLAGS=\'"$SHOBJ_XLDFLAGS"\' -echo SHOBJ_LIBS=\'"$SHOBJ_LIBS"\' - -echo SHLIB_XLDFLAGS=\'"$SHLIB_XLDFLAGS"\' -echo SHLIB_LIBS=\'"$SHLIB_LIBS"\' - -echo SHLIB_DOT=\'"$SHLIB_DOT"\' - -echo SHLIB_LIBPREF=\'"$SHLIB_LIBPREF"\' -echo SHLIB_LIBSUFF=\'"$SHLIB_LIBSUFF"\' - -echo SHLIB_LIBVERSION=\'"$SHLIB_LIBVERSION"\' -echo SHLIB_DLLVERSION=\'"$SHLIB_DLLVERSION"\' - -echo SHOBJ_STATUS=\'"$SHOBJ_STATUS"\' -echo SHLIB_STATUS=\'"$SHLIB_STATUS"\' - -exit 0 diff --git a/exsrc/src/yes.txt b/exsrc/src/yes.txt deleted file mode 100644 index c6991e8fe8..0000000000 --- a/exsrc/src/yes.txt +++ /dev/null @@ -1,2 +0,0 @@ -yes - diff --git a/exsrc/twisted.sh b/exsrc/twisted.sh deleted file mode 100755 index fafb9ea76d..0000000000 --- a/exsrc/twisted.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -PACKAGE="Twisted" -. ./prolog.sh -# Twisted. -(cd Twisted-*/zope.interface*; ${prefix}/${version}/bin/python setup.py build ${D} install; cd .. ; ${prefix}/${version}/bin/python setup.py build ${D} install) - diff --git a/exsrc/vtk.sh b/exsrc/vtk.sh deleted file mode 100755 index 7f15b4f500..0000000000 --- a/exsrc/vtk.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -PACKAGE="VTK" -. ./prolog.sh -( BUILD_DIR=`pwd`;\ - cd VTK*; \ - sed -e 's@CDAT_PREFIX@'${prefix}'/Externals@g' \ - -e 's/PY_VERSION/2.4/g' \ - -e 's@CDAT_BUILD_DIR@'${BUILD_DIR}'@g' \ - -e 's/TCLTK_VERSION/8.4/g' ../../VTK_BUILD_ANSWERS.core > VTK_BUILD_ANSWERS.feed ; \ - mkdir -p ${prefix}/Externals/VTK;\ - cp VTK_BUILD_ANSWERS.feed ${prefix}/Externals/VTK/CMakeCache.txt ; - cd ${prefix}/Externals/VTK ;\ - ${prefix}/Externals/bin/cmake CMakeCache.txt ;\ - make; make install ; \ - cd Wrapping/Python ; \ - ${prefix}/${version}/bin/python setup.py install; \ -) diff --git a/exsrc/xgks.sh b/exsrc/xgks.sh deleted file mode 100755 index 5061fb5411..0000000000 --- a/exsrc/xgks.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh -PACKAGE="xgks" -OS=`uname` -. ./prolog.sh -# xgks -if ( test "${OS}" = 'Darwin' ) then - CPP_X11="-I/usr/X11R6/include"; export CPP_X11 -fi -LD_X11=""; export LD_X11 -FC='';export FC -# The configure step will make a header file udposix.h that vcs needs -cd xgks -./configure --prefix=${prefix}/Externals || exit 1 -echo "Installing udposix.h" -/bin/rm -fr ${prefix}/Externals/include/udposix.h || exit 1 -/bin/cp port/misc/udposix.h ${prefix}/Externals/include/udposix.h || exit 1 -make port/all || exit 1 -make port/install || exit 1 -# added the CXX define for MacOS -make CXX=cc fontdb/all || exit 1 -make CXX=cc fontdb/install || exit 1 diff --git a/images/2leftarrow.gif b/images/2leftarrow.gif deleted file mode 100644 index 0f42224dad..0000000000 Binary files a/images/2leftarrow.gif and /dev/null differ diff --git a/images/2rightarrow.gif b/images/2rightarrow.gif deleted file mode 100644 index befbdfad31..0000000000 Binary files a/images/2rightarrow.gif and /dev/null differ diff --git a/images/ASD.scr b/images/ASD.scr deleted file mode 100755 index dfc4be3e4e..0000000000 --- a/images/ASD.scr +++ /dev/null @@ -1,1268 +0,0 @@ -Tt_ASD1(3,1,1,0.2,1) -Tt_ASD2(3,1,1,0.2,1) -To_ASD1(0.03,0,r,c,h) -To_ASD2(0.019,0,r,c,h) -P_ASD( - File(p=1,x=0.0725,y=0.02125,Tt=default,To=default), - Function(p=1,x=0.0725,y=0.02125,Tt=default,To=default), - LogicalMask(p=1,x=0.0725,y=0.03625,Tt=default,To=default), - Transform(p=1,x=0.0725,y=0.05125,Tt=default,To=default), - source(p=1,x=0.0725,y=0.70375,Tt=default,To=default), - name(p=1,x=0.0725,y=0.68875,Tt=default,To=default), - title(p=1,x=0.1675,y=0.68875,Tt=default,To=default), - units(p=1,x=0.6615,y=0.68875,Tt=default,To=default), - crdate(p=1,x=0.7375,y=0.68875,Tt=default,To=default), - crtime(p=1,x=0.8325,y=0.68875,Tt=default,To=default), - comment#1(p=1,x=0.909091,y=0.0466611,Tt=default,To=default), - comment#2(p=1,x=0.12,y=0.72875,Tt=default,To=default), - comment#3(p=1,x=0.12,y=0.74375,Tt=default,To=default), - comment#4(p=1,x=0.85,y=0.070,Tt=default,To=default), - xname(p=1,x=0.499345,y=0.17035,Tt=default,To=defcenter), - yname(p=1,x=0.0169,y=0.420034,Tt=default,To=defcentup), - zname(p=1,x=0.025,y=0.80875,Tt=default,To=default), - tname(p=1,x=0.025,y=0.80875,Tt=default,To=default), - xunits(p=0,x=0.595,y=0.22125,Tt=default,To=default), - yunits(p=0,x=0.044,y=0.48875,Tt=default,To=defcentup), - zunits(p=1,x=0.025,y=0.80875,Tt=default,To=default), - tunits(p=1,x=0.025,y=0.80875,Tt=default,To=default), - xvalue(p=1,x=0.785,y=0.70375,Th=default,Tt=default,To=default), - yvalue(p=1,x=0.785,y=0.68875,Th=default,Tt=default,To=default), - zvalue(p=1,x=0.785,y=0.67375,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.785,y=0.65875,Th=default,Tt=default,To=default), - mean(p=1,x=0.0725,y=0.66875,Th=default,Tt=default,To=default), - max(p=1,x=0.2625,y=0.66875,Th=default,Tt=default,To=default), - min(p=1,x=0.4525,y=0.66875,Th=default,Tt=default,To=default), - xtic#1(p=1,y1=0.21125,y2=0.20175,Tl=default), - xtic#2(p=1,y1=0.63875,y2=0.64825,Tl=default), - xmintic#a(p=1,y1=0.21125,y2=0.2065,Tl=default), - xmintic#b(p=1,y1=0.64825,y2=0.6535,Tl=default), - ytic#1(p=1,x1=0.0725,x2=0.063,Tl=default), - ytic#2(p=1,x1=0.9275,x2=0.937,Tl=default), - ymintic#a(p=1,x1=0.0725,x2=0.06775,Tl=default), - ymintic#b(p=1,x1=0.9275,x2=0.93225,Tl=default), - xlabel#1(p=1,y=0.19035,Tt=default,To=defcenter), - xlabel#2(p=1,y=0.66152,Tt=default,To=defcenter), - ylabel#1(p=1,x=0.063,Tt=default,To=defright), - ylabel#2(p=1,x=0.937,Tt=default,To=default), - box#1(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875,Tl=default), - box#2(p=0,x1=0.025,y1=0.23975,x2=0.899,y2=0.65775,Tl=default), - box#3(p=0,x1=0.025,y1=0.24925,x2=0.8895,y2=0.64825,Tl=default), - box#4(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - line#1(p=0,x1=0.0725,y1=0.425,x2=0.9275,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.21125,x2=0.5,y2=0.63875,Tl=default), - line#3(p=0,x1=0.025,y1=0.78125,x2=0.88,y2=0.78125,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.0725,y1=0.11625,x2=0.9275,y2=0.13525,Tt=default,To=defcenter,Tl=default), - data(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875) ) -P_ASD_dud( - File(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - Function(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - LogicalMask(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - Transform(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - source(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - name(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - title(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - units(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - crdate(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - crtime(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - comment#1(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - comment#2(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - comment#3(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - comment#4(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - xname(p=0,x=0.025,y=0.0112546,Tt=default,To=defcenter), - yname(p=0,x=0.025,y=0.01125,Tt=default,To=defcentup), - zname(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - tname(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - xunits(p=0,x=0.025,y=0.01125,Tt=default,To=default), - yunits(p=0,x=0.025,y=0.01125,Tt=default,To=defcentup), - zunits(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - tunits(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - xvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - tvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - mean(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - max(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - min(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.0212495,y2=0.0212495,Tl=default), - xtic#2(p=0,y1=0.0212495,y2=0.0212495,Tl=default), - xmintic#a(p=0,y1=0.0212495,y2=0.0212495,Tl=default), - xmintic#b(p=0,y1=0.0212495,y2=0.0212495,Tl=default), - ytic#1(p=0,x1=0.025,x2=0.025,Tl=default), - ytic#2(p=0,x1=0.025,x2=0.025,Tl=default), - ymintic#a(p=0,x1=0.025,x2=0.025,Tl=default), - ymintic#b(p=0,x1=0.025,x2=0.025,Tl=default), - xlabel#1(p=0,y=0.0212495,Tt=default,To=defcenter), - xlabel#2(p=0,y=0.02125,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.025,Tt=default,To=defright), - ylabel#2(p=0,x=0.025,Tt=default,To=default), - box#1(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default), - box#2(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - box#3(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - box#4(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - line#1(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default), - line#2(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default), - line#3(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - line#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - legend(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tt=default,To=defcenter,Tl=default), - data(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875) ) -P_ASD1( - File(p=0,x=0.0669935,y=0.0152291,Tt=default,To=default), - Function(p=0,x=0.0669935,y=0.0152291,Tt=default,To=default), - LogicalMask(p=1,x=0.0780229,y=0.00653595,Tt=default,To=default), - Transform(p=1,x=0.0780229,y=0.0163235,Tt=default,To=default), - source(p=0,x=0.0669935,y=0.717229,Tt=default,To=default), - name(p=0,x=0.0669935,y=0.705229,Tt=default,To=default), - title(p=1,x=0.348809,y=0.705235,Tt=ASD1,To=ASD1), - units(p=0,x=0.686993,y=0.705229,Tt=default,To=default), - crdate(p=0,x=0.766993,y=0.705229,Tt=default,To=default), - crtime(p=0,x=0.866993,y=0.705229,Tt=default,To=default), - comment#1(p=1,x=0.2,y=0.025,Tt=ASD2,To=ASD2), - comment#2(p=1,x=0.1,y=0.025,Tt=ASD2,To=ASD2), - comment#3(p=0,x=0.139052,y=0.711242,Tt=default,To=default), - comment#4(p=1,x=0.0339869,y=0.360785,Tt=default,To=defcentup), - xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter), - yname(p=1,x=0.0221,y=0.327701,Tt=default,To=defcentup), - zname(p=1,x=0.0169935,y=0.789542,Tt=default,To=default), - tname(p=1,x=0.0169935,y=0.789542,Tt=default,To=default), - xunits(p=0,x=0.616993,y=0.215229,Tt=default,To=default), - yunits(p=0,x=0.0369935,y=0.505229,Tt=default,To=defcentup), - zunits(p=1,x=0.0169935,y=0.789542,Tt=default,To=default), - tunits(p=1,x=0.0169935,y=0.789542,Tt=default,To=default), - xvalue(p=1,x=0.993464,y=0.672091,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.816993,y=0.695229,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.816993,y=0.685229,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.993464,y=0.642729,Th=default,Tt=default,To=default), - mean(p=0,x=0.0669935,y=0.685229,Th=default,Tt=default,To=default), - max(p=0,x=0.266993,y=0.685229,Th=default,Tt=default,To=default), - min(p=0,x=0.466993,y=0.685229,Th=default,Tt=default,To=default), - xtic#1(p=1,y1=0.0640523,y2=0.0724123,Tl=default), - xtic#2(p=1,y1=0.624837,y2=0.616477,Tl=default), - xmintic#a(p=0,y1=0.0640523, y2=0.067996695,Tl=default), - xmintic#b(p=0,y1=0.620657,y2=0.624837,Tl=default), - ytic#1(p=1,x1=0.1071242,x2=0.115306,Tl=default), - ytic#2(p=1,x1=0.819543,x2=0.811361,Tl=default), - ymintic#a(p=0,x1=0.1071242,x2=0.1112151,Tl=default), - ymintic#b(p=0,x1=0.819543,x2=0.815452,Tl=default), - xlabel#1(p=1,y=0.0522873,Tt=default,To=defcenter), - xlabel#2(p=0,y=0.64152,Tt=default,To=defcenter), - ylabel#1(p=1,x=0.0979738,Tt=default,To=defright), - ylabel#2(p=0,x=0.827,Tt=default,To=default), - box#1(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default), - box#2(p=0,x1=0.0169935,y1=0.235229,x2=0.936993,y2=0.675229,Tl=default), - box#3(p=0,x1=0.0169935,y1=0.245229,x2=0.926993,y2=0.665229,Tl=default), - box#4(p=0,x1=0.0169935,y1=0.00522876,x2=0.0169935,y2=0.00522876,Tl=default), - line#1(p=0,x1=0.0669935,y1=0.430229,x2=0.966993,y2=0.430229,Tl=default), - line#2(p=0,x1=0.516993,y1=0.205229,x2=0.516993,y2=0.655229,Tl=default), - line#3(p=0,x1=0.0169935,y1=0.405229,x2=0.916993,y2=0.405229,Tl=default), - line#4(p=0,x1=0.0169935,y1=0.805229,x2=0.916993,y2=0.805229,Tl=default), - legend(p=1,x1=0.863636,y1=0.617701,x2=0.909091,y2=0.617701,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD2( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.3,y=0.15,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.3,y=0.15,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.599123,x2=0.909091,y2=0.599123,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD3( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=1,x=0.5,y=0.726797,Tt=ASD1,To=ASD1), - comment#2(p=1,x=0.5,y=0.691504,Tt=ASD2,To=ASD2), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.0104575,y=0.360785,Tt=default,To=defcentup), - xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.0640523,y2=0.0724123,Tl=default), - xtic#2(p=0,y1=0.624837,y2=0.616477,Tl=default), - xmintic#a(p=1,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=1,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.1071242,x2=0.085306,Tl=default), - ytic#2(p=0,x1=0.819543,x2=0.781361,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.0522873,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.0679738,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.580546,x2=0.909091,y2=0.580546,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD4( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=1,x=0.5,y=0.726797,Tt=ASD1,To=ASD1), - comment#2(p=1,x=0.5,y=0.691504,Tt=ASD1,To=ASD1), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.0104575,y=0.360785,Tt=default,To=defcentup), - xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.0640523,y2=0.0724123,Tl=default), - xtic#2(p=0,y1=0.624837,y2=0.616477,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.1071242,x2=0.085306,Tl=default), - ytic#2(p=0,x1=0.819543,x2=0.781361,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.0522873,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.0679738,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.557324,x2=0.909091,y2=0.557324,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD5( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.538747,x2=0.909091,y2=0.538747,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD6( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.520169,x2=0.909091,y2=0.520169,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD7( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.501592,x2=0.909091,y2=0.501592,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD8( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.483014,x2=0.909091,y2=0.483014,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD9( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.464437,x2=0.909091,y2=0.464437,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD10( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.445859,x2=0.909091,y2=0.445859,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD11( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.402615,y=0.104575,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.427282,x2=0.909091,y2=0.427282,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD12( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.408704,x2=0.909091,y2=0.408704,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD13( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.390127,x2=0.909091,y2=0.390127,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD14( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=1,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.371549,x2=0.909091,y2=0.371549,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD15( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.352972,x2=0.909091,y2=0.352972,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_mwbotcenter(1,1,1,0.2,1) -To_mwbotcenter(0.01,0,r,c,b) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -Gfb_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - level_1=1e+20,level_2=1e+20,color_1=16,color_2=239,legend_type=0, - legend=(), - ext_1=n,ext_2=n,missing=241) -Gcon_ASD( - projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,yticlabels#1=lat20, - yticlabels#2=lat20, - datawc(-180,-90,180,90), - Tl=ASDCont, - Type=1, - ) -Tl_ASDCont(1,2.8,241) -Gfi_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,xmtics#1=*,xmtics#2=*, - yticlabels#1=*,yticlabels#2=*,ymtics#1=*,ymtics#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - missing=1e+20, - range - (id=0,level1=1e+20,level2=1e+20,Tf=default) ) -Gi_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - make_labels=n, - lines - (id=0,priority=1,level=0,increment=1e+20,hilite_ci=0, - label=*,Tl=default,Tt=default,To=default) - ) -Gfo_ASD( - projection=linear,xticlabels#1=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - Tf=default, - outline(1, 2, 3, 4, 5, 6, 7)) -Go_ASD( - projection=linear,xticlabels#1=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - Tl=default, - outline(1, 2, 3, 4, 5, 6, 7)) -GXy_ASD1( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD1,Tm=None) -Tl_ASD1(1,4.9,241) -Tm_ASD1(1,4.9,241) -GXy_ASD2( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD2,Tm=None) -Tl_ASD2(1,4.9,242) -Tm_ASD2(2,4.9,242) -GXy_ASD3( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD3,Tm=None) -Tl_ASD3(1,4.9,243) -Tm_ASD3(3,4.9,243) -GXy_ASD4( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD4,Tm=None) -Tl_ASD4(1,4.9,244) -Tm_ASD4(4,4.9,244) -GXy_ASD5( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD5,Tm=None) -Tl_ASD5(1,4.9,245) -Tm_ASD5(5,4.9,245) -GXy_ASD6( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD6,Tm=None) -Tl_ASD6(1,4.9,246) -Tm_ASD6(6,4.9,246) -GXy_ASD7( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD7,Tm=None) -Tl_ASD7(1,4.9,247) -Tm_ASD7(7,4.9,247) -GXy_ASD8( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD8,Tm=None) -Tl_ASD8(1,4.9,248) -Tm_ASD8(8,4.9,248) -GXy_ASD9( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD9,Tm=None) -Tl_ASD9(1,4.9,249) -Tm_ASD9(9,4.9,249) -GXy_ASD10( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD10,Tm=None) -Tl_ASD10(1,4.9,250) -Tm_ASD10(10,4.9,250) -GXy_ASD11( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD11,Tm=None) -Tl_ASD11(1,4.9,251) -Tm_ASD11(11,4.9,251) -GXy_ASD12( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD12,Tm=None) -Tl_ASD12(1,4.9,252) -Tm_ASD12(12,4.9,252) -GXy_ASD13( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD13,Tm=None) -Tl_ASD13(1,4.9,253) -Tm_ASD13(13,4.9,253) -GXy_ASD14( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD14,Tm=None) -Tl_ASD14(1,4.9,254) -Tm_ASD14(14,4.9,254) -GXy_ASD15( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD15,Tm=None) -Tl_ASD15(1,4.9,255) -Tm_ASD15(15,4.9,255) -GYx_ASD1( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - Tl=ASD1,Tm=none) -GYx_ASD2( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,40,120,100), - xaxisconvert=linear, - Tl=ASD2,Tm=none) -GYx_ASD3( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,40,120,100), - xaxisconvert=linear, - Tl=ASD3,Tm=none) -GYx_ASD4( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD4,Tm=none) -GYx_ASD5( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD5,Tm=none) -GYx_ASD6( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD6,Tm=none) -GYx_ASD7( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD7,Tm=none) -GYx_ASD8( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD8,Tm=none) -GYx_ASD9( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD9,Tm=none) -GYx_ASD10( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD10,Tm=none) -GYx_ASD11( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD11,Tm=none) -GYx_ASD12( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD12,Tm=none) -GYx_ASD13( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD13,Tm=none) -GYx_ASD14( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD14,Tm=none) -GYx_ASD15( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD15,Tm=none) -GSp_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - Tm=default) -Gv_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - Tl=default,vector_scale=1,vector_align=c,vector_type=2,ref_vector=1e+20) -C_ASD( - 100,100,100, 0,0,0, 84.7059,84.7059,84.7059, 31.7647,31.7647,31.7647, 100,100,100, 100,100,0, - 0,2.7451,100, 0,5.4902,100, 0,9.01961,100, 0,11.3725,100, 0,14.902,100, 0,17.6471,100, - 0,21.1765,100, 0,23.9216,100, 0,26.6667,100, 0,30.1961,100, 0,32.9412,100, 0,35.6863,100, - 0,39.2157,100, 0,41.9608,100, 0,44.7059,100, 0,48.2353,100, 0,50.9804,100, 0,54.1176,100, - 0,56.8627,100, 0,60.3922,100, 0,63.1373,100, 0,66.6667,100, 0,69.4118,100, 0,72.1569,100, - 0,75.6863,100, 0,78.4314,100, 0,81.1765,100, 0,84.7059,100, 0,87.451,100, 0,90.1961,100, - 0,93.7255,100, 0,96.4706,100, 0,100,100, 0,100,96.4706, 0,100,93.7255, 0,100,90.1961, - 0,100,87.451, 0,100,84.7059, 0,100,81.1765, 0,100,78.4314, 0,100,75.6863, 0,100,72.1569, - 0,100,69.4118, 0,100,66.6667, 0,100,63.1373, 0,100,60.3922, 0,100,56.8627, 0,100,54.1176, - 0,100,50.9804, 0,100,48.2353, 0,100,44.7059, 0,100,41.9608, 0,100,39.2157, 0,100,35.6863, - 0,100,32.9412, 0,100,30.1961, 0,100,26.6667, 0,100,23.9216, 0,100,21.1765, 0,100,17.6471, - 0,100,14.902, 0,100,11.3725, 0,100,9.01961, 0,100,5.4902, 0,100,2.7451, 0,100,0, - 2.7451,100,0, 5.4902,100,0, 9.01961,100,0, 11.3725,100,0, 14.902,100,0, 17.6471,100,0, - 21.1765,100,0, 23.9216,100,0, 26.6667,100,0, 30.1961,100,0, 32.9412,100,0, 35.6863,100,0, - 39.2157,100,0, 41.9608,100,0, 44.7059,100,0, 48.2353,100,0, 50.9804,100,0, 54.1176,100,0, - 56.8627,100,0, 60.3922,100,0, 63.1373,100,0, 66.6667,100,0, 69.4118,100,0, 72.1569,100,0, - 75.6863,100,0, 78.4314,100,0, 81.1765,100,0, 84.7059,100,0, 87.451,100,0, 90.1961,100,0, - 93.7255,100,0, 96.4706,100,0, 100,100,0, 100,97.6471,0, 100,95.6863,0, 100,93.7255,0, - 100,91.3726,0, 100,89.4118,0, 100,87.451,0, 100,85.4902,0, 100,83.1373,0, 100,81.1765,0, - 100,79.2157,0, 100,77.6471,0, 100,75.6863,0, 100,73.7255,0, 100,71.3726,0, 100,69.4118,0, - 100,67.451,0, 100,65.4902,0, 100,63.1373,0, 100,61.1765,0, 100,59.2157,0, 100,56.8627,0, - 100,54.902,0, 100,52.9412,0, 100,50.9804,0, 100,49.4118,0, 100,47.451,0, 100,44.7059,0, - 100,43.1373,0, 100,41.1765,0, 100,39.2157,0, 100,36.8627,0, 100,34.902,0, 100,32.9412,0, - 100,32.1569,0, 100,30.9804,0, 100,30.1961,0, 100,28.6275,0, 100,28.2353,0, 100,26.6667,0, - 100,25.8824,0, 100,24.7059,0, 100,23.9216,0, 100,23.1373,0, 100,21.9608,0, 100,21.1765,0, - 100,20,0, 100,18.4314,0, 100,17.6471,0, 100,16.4706,0, 100,15.6863,0, 100,14.902,0, - 100,13.7255,0, 100,12.9412,0, 100,11.3725,0, 100,10.9804,0, 100,9.41177,0, 100,9.01961,0, - 100,7.84314,0, 100,6.66667,0, 100,5.4902,0, 100,4.31373,0, 100,3.92157,0, 100,2.7451,0, - 100,1.56863,0, 100,0.784314,0, 100,0,0, 97.6471,0,0, 95.6863,0,0, 93.7255,0,0, - 92.1569,0,0, 90.1961,0,0, 88.2353,0,0, 86.6667,0,0, 84.7059,0,0, 82.7451,0,0, - 80.3922,0,0, 79.2157,0,0, 76.8627,0,0, 74.902,0,0, 72.9412,0,0, 71.3726,0,0, - 69.4118,0,0, 67.451,0,0, 65.8824,0,0, 63.9216,0,0, 61.9608,0,0, 60,0,0, - 58.4314,0,0, 56.4706,0,0, 54.1176,0,0, 52.1569,0,0, 50.1961,0,0, 48.6275,0,0, - 46.6667,0,0, 44.7059,0,0, 43.1373,0,0, 41.1765,0,0, 39.2157,0,0, 37.6471,0,0, - 38.4314,0,1.56863, 39.2157,0,3.92157, 40.3922,0,5.4902, 41.1765,0,7.84314, 41.9608,0,10.1961, 43.1373,0,12.1569, - 43.9216,0,13.7255, 44.7059,0,15.6863, 45.8824,0,18.4314, 46.6667,0,20.3922, 48.2353,0,21.9608, 48.6275,0,23.9216, - 50.1961,0,25.8824, 50.9804,0,28.6275, 52.1569,0,30.1961, 52.9412,0,32.1569, 53.7255,0,34.1176, 54.902,0,36.4706, - 55.6863,0,38.4314, 56.4706,0,40.3922, 57.6471,0,42.7451, 58.4314,0,44.7059, 59.2157,0,46.6667, 60.3922,0,48.6275, - 61.1765,0,50.9804, 62.7451,0,52.9412, 63.1373,0,54.902, 64.7059,0,56.8627, 65.4902,0,59.2157, 66.6667,0,61.1765, - 67.451,0,63.1373, 68.2353,0,65.4902, 69.4118,0,67.451, 70.1961,0,69.4118, 71.3726,0,71.3726, 72.1569,0,73.7255) - -Gtd_ASD( -detail = 50; -max = None; -quadrans = 1; -skillValues = [0.10000000000000001, 0.20000000000000001, 0.29999999999999999, 0.40000000000000002, 0.5, 0.59999999999999998, 0.69999999999999996, 0.80000000000000004, 0.90000000000000002, 0.94999999999999996]; -referencevalue = 1.0; -arrowlength = 0.05; -arrowangle = 20.0; -arrowbase = 0.75; -Marker; - status = []; - line = []; - id = []; - id_size = []; - id_color = []; - id_font = []; - symbol = []; - color = []; - size = []; - xoffset = []; - yoffset = []; - line_color = []; - line_size = []; - line_type = []; -) -Gfm_ASD( - projection=linear,xticlabels#1=*, - xticlabels#2=*, - xmtics#1=*, - xmtics#2=*, - yticlabels#1=*, - yticlabels#2=*, - ymtics#1=*, - ymtics#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - missing=241, - mesh=0, - wrap - (0, 0), - range - (id=0,level1=1e+20,level2=1e+20,Tf=default) ) - diff --git a/images/HARD_COPY b/images/HARD_COPY deleted file mode 100755 index 0483059574..0000000000 --- a/images/HARD_COPY +++ /dev/null @@ -1,76 +0,0 @@ -####################################################################### -########################## H A R D C O P Y ########################## -####################################################################### -# # -# This file contains the user specified printer names located on # -# their network! See the "/etc/printcap" file for a list of # -# active printers. It is important to read this entire file for # -# instructions!!!! # -# # -# The '#' at the start of a line indicates a comment or statement by # -# the user. # -# # -# I M P O R T A N T N O T I C E ! ! ! ! # -# - - - - - - - - - - - - - - - - - - - # -# VCS has no way of knowing which print manager your system is using. # -# That is, 'lpr' (the BSD print spooler) or 'lp'. If the set # -# environment variable 'PRINTER' is unset, then VCS will use 'lp'. # -# If the set environment variable 'PRINTER' is set to 'printer', # -# then VCS will use 'lpr'. # -# # -# # -# If sending a CGM file to the printer from VCS results in an error # -# message (e.g., 'Error - In sending CGM file to printer.'), then # -# set or unset the 'PRINTER' environment variable. # -# # -####################################################################### -####################################################################### -####################################################################### - -####################################################################### -####################################################################### -####################################################################### -# I M P O R T A N T N O T I C E ! ! ! ! # -# - - - - - - - - - - - - - - - - - - - # -# The lines below are used for GPLOT. GPLOT is a graphics utility # -# program designed for the processing of CGM metafiles. We use # -# GPLOT to convert the cgm file(s) to postscript output and send it # -# directly to a postscript printer. The absolute gplot path must be # -# set properly (below). That is: # -# landscape = /absolute_path/gplot -dPSC -r90 ... # -# portrait = /absolute_path/gplot -dPSC -D ... # -# # -####################################################################### -####################################################################### -####################################################################### - -############################################################################ -# PRINTER ORIENTATION: Landscape # -# OUTPUT TYPE: Postscript COLOR: YES # -# NOTE: THIS IS FOR SENDING TO THE PRINTER # -# # -# .cshrc file: # -# In your .cshrc file you can set up an alias for converting your # -# landscape .cgm files. That is, # -# alias landscape '/absolute_path/gplot -dPSC -r90 -x-1.75 -D -X12.5 -Y10' # -# # -############################################################################ -#landscape = /usr/local/bin/gplot -dPSC -r90 -x-1.75 -D -X12.5 -Y10 - -####################################################################### -# PRINTER ORIENTATION: Portrait # -# OUTPUT TYPE: Postscript COLOR: YES # -# NOTE: THIS IS FOR SENDING TO THE PRINTER # -# # -# .cshrc file: # -# In your .cshrc file you can set up an alias for converting your # -# portscript .cgm files. That is, # -# alias portrait '/absolute_path/gplot -dPSC -D -X10 -Y12.5' # -# # -####################################################################### -#portrait = /usr/local/bin/gplot -dPSC -D -X10 -Y12.5 - -####################################################################### -################### P R I N T E R N A M E S ####################### -####################################################################### - diff --git a/images/PCM_isofill.scr b/images/PCM_isofill.scr deleted file mode 100644 index c42b94f247..0000000000 --- a/images/PCM_isofill.scr +++ /dev/null @@ -1,976 +0,0 @@ -L_PCM_p_levels(1000,"1000",900,"900",800,"800",700,"700",600,"600", - 500,"500",400,"400",300,"300",200,"200",100,"100",50,"50", - 10,"10") - -L_PCM_height(1000,"0",795,"2",616.6,"4",472.2,"6",356.5,"8", - 265,"10",121.1,"15",55.3,"20",12,"30") - -Tf_PCM16( - 1, fais(1), - 1, fasi(1), - 1, faci(16), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM17( - 1, fais(1), - 1, fasi(1), - 1, faci(17), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM18( - 1, fais(1), - 1, fasi(1), - 1, faci(18), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM19( - 1, fais(1), - 1, fasi(1), - 1, faci(19), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM20( - 1, fais(1), - 1, fasi(1), - 1, faci(20), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM21( - 1, fais(1), - 1, fasi(1), - 1, faci(21), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM22( - 1, fais(1), - 1, fasi(1), - 1, faci(22), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM23( - 1, fais(1), - 1, fasi(1), - 1, faci(23), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM24( - 1, fais(1), - 1, fasi(1), - 1, faci(24), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM25( - 1, fais(1), - 1, fasi(1), - 1, faci(25), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM26( - 1, fais(1), - 1, fasi(1), - 1, faci(26), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM27( - 1, fais(1), - 1, fasi(1), - 1, faci(27), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM28( - 1, fais(1), - 1, fasi(1), - 1, faci(28), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM29( - 1, fais(1), - 1, fasi(1), - 1, faci(29), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM30( - 1, fais(1), - 1, fasi(1), - 1, faci(30), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM31( - 1, fais(1), - 1, fasi(1), - 1, faci(31), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM32( - 1, fais(1), - 1, fasi(1), - 1, faci(32), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM33( - 1, fais(1), - 1, fasi(1), - 1, faci(33), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM34( - 1, fais(1), - 1, fasi(1), - 1, faci(34), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM35( - 1, fais(1), - 1, fasi(1), - 1, faci(35), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM36( - 1, fais(1), - 1, fasi(1), - 1, faci(36), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM241( - 1, fais(1), - 1, fasi(1), - 1, faci(241), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) - - -Gfi_PCM_clt( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0.,level2=.10,Tf=PCM16) -(id=2,level1=.10,level2=.20,Tf=PCM22) -(id=3,level1=.20,level2=.30,Tf=PCM23) -(id=4,level1=.30,level2=.40,Tf=PCM32) -(id=5,level1=.40,level2=.50,Tf=PCM33) -(id=6,level1=.50,level2=.60,Tf=PCM34) -(id=7,level1=.60,level2=.70,Tf=PCM27) -(id=8,level1=.70,level2=.80,Tf=PCM28) -(id=9,level1=.80,level2=.90,Tf=PCM29) -(id=10,level1=.90,level2=1.00,Tf=PCM30) ) - -Gfi_PCM_hfls( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=0,Tf=PCM16) -(id=2,level1=0,level2=25,Tf=PCM18) -(id=3,level1=25,level2=50,Tf=PCM19) -(id=4,level1=50,level2=75,Tf=PCM20) -(id=5,level1=75,level2=100,Tf=PCM21) -(id=6,level1=100,level2=125,Tf=PCM22) -(id=7,level1=125,level2=150,Tf=PCM23) -(id=8,level1=150,level2=175,Tf=PCM24) -(id=9,level1=175,level2=200,Tf=PCM25) -(id=10,level1=200,level2=225,Tf=PCM26) -(id=11,level1=225,level2=250,Tf=PCM27) -(id=12,level1=250,level2=275,Tf=PCM28) -(id=13,level1=275,level2=300,Tf=PCM29) -(id=14,level1=300,level2=1e+20,Tf=PCM30) ) - -Gfi_PCM_hfss( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-100,Tf=PCM16) -(id=2,level1=-100,level2=-50,Tf=PCM18) -(id=3,level1=-50,level2=-25,Tf=PCM19) -(id=4,level1=-25,level2=-15,Tf=PCM20) -(id=5,level1=-15,level2=-10,Tf=PCM21) -(id=6,level1=-10,level2=-5,Tf=PCM22) -(id=7,level1=-5,level2=0,Tf=PCM23) -(id=8,level1=0,level2=5,Tf=PCM24) -(id=9,level1=5,level2=10,Tf=PCM25) -(id=10,level1=10,level2=15,Tf=PCM26) -(id=11,level1=15,level2=25,Tf=PCM27) -(id=12,level1=25,level2=50,Tf=PCM28) -(id=13,level1=50,level2=100,Tf=PCM29) -(id=14,level1=100,level2=1e+20,Tf=PCM30) ) - -Gfi_PCM_hus( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,10), -missing=1e+20, -range -(id=1,level1=0,level2=0.0005,Tf=PCM20) -(id=2,level1=0.0005,level2=0.001,Tf=PCM21) -(id=3,level1=0.001,level2=0.002,Tf=PCM22) -(id=4,level1=0.002,level2=0.004,Tf=PCM23) -(id=5,level1=0.004,level2=0.006,Tf=PCM24) -(id=6,level1=0.006,level2=0.008,Tf=PCM25) -(id=7,level1=0.008,level2=0.01,Tf=PCM26) -(id=8,level1=0.01,level2=0.012,Tf=PCM27) -(id=9,level1=0.012,level2=0.014,Tf=PCM28) -(id=10,level1=0.014,level2=0.016,Tf=PCM29) -(id=11,level1=0.016,level2=0.018,Tf=PCM30) -(id=12,level1=0.018,level2=0.02,Tf=PCM31) ) - - -Gfi_PCM_hur( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,10), -missing=1e+20, -range -(id=1,level1=0,level2=10,Tf=PCM20) -(id=2,level1=10,level2=20,Tf=PCM21) -(id=3,level1=20,level2=30,Tf=PCM22) -(id=4,level1=30,level2=40,Tf=PCM23) -(id=5,level1=40,level2=50,Tf=PCM24) -(id=6,level1=50,level2=60,Tf=PCM25) -(id=7,level1=60,level2=70,Tf=PCM26) -(id=8,level1=70,level2=80,Tf=PCM27) -(id=9,level1=80,level2=90,Tf=PCM28) -(id=10,level1=90,level2=100,Tf=PCM29) -(id=11,level1=1e+20,level2=1e+20,Tf=PCM241) ) - -Gfi_PCM_pr( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=1e-09,Tf=PCM16) -(id=2,level1=1e-09,level2=2e-09,Tf=PCM18) -(id=3,level1=2e-09,level2=5e-09,Tf=PCM19) -(id=4,level1=5e-09,level2=1e-08,Tf=PCM20) -(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21) -(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22) -(id=7,level1=2e-08,level2=3e-08,Tf=PCM23) -(id=8,level1=3e-08,level2=5e-08,Tf=PCM24) -(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25) -(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26) -(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27) -(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28) -(id=13,level1=2e-07,level2=3e-07,Tf=PCM29) -(id=14,level1=3e-07,level2=1e+20,Tf=PCM30) -(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) ) - - -Gfi_PCM_prc( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=1e-09,Tf=PCM16) -(id=2,level1=1e-09,level2=2e-09,Tf=PCM18) -(id=3,level1=2e-09,level2=5e-09,Tf=PCM19) -(id=4,level1=5e-09,level2=1e-08,Tf=PCM20) -(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21) -(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22) -(id=7,level1=2e-08,level2=3e-08,Tf=PCM23) -(id=8,level1=3e-08,level2=5e-08,Tf=PCM24) -(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25) -(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26) -(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27) -(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28) -(id=13,level1=2e-07,level2=3e-07,Tf=PCM29) -(id=14,level1=3e-07,level2=1e+20,Tf=PCM30) -(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) ) - - -Gfi_PCM_prsnc( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=1e-09,Tf=PCM16) -(id=2,level1=1e-09,level2=2e-09,Tf=PCM18) -(id=3,level1=2e-09,level2=5e-09,Tf=PCM19) -(id=4,level1=5e-09,level2=1e-08,Tf=PCM20) -(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21) -(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22) -(id=7,level1=2e-08,level2=3e-08,Tf=PCM23) -(id=8,level1=3e-08,level2=5e-08,Tf=PCM24) -(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25) -(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26) -(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27) -(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28) -(id=13,level1=2e-07,level2=3e-07,Tf=PCM29) -(id=14,level1=3e-07,level2=1e+20,Tf=PCM30) -(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) ) - - -Gfi_PCM_prsnl( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=1e-09,Tf=PCM16) -(id=2,level1=1e-09,level2=2e-09,Tf=PCM18) -(id=3,level1=2e-09,level2=5e-09,Tf=PCM19) -(id=4,level1=5e-09,level2=1e-08,Tf=PCM20) -(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21) -(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22) -(id=7,level1=2e-08,level2=3e-08,Tf=PCM23) -(id=8,level1=3e-08,level2=5e-08,Tf=PCM24) -(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25) -(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26) -(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27) -(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28) -(id=13,level1=2e-07,level2=3e-07,Tf=PCM29) -(id=14,level1=3e-07,level2=1e+20,Tf=PCM30) -(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) ) - - -Gfi_PCM_ps( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=97000,Tf=PCM30) -(id=2,level1=97000,level2=97500,Tf=PCM29) -(id=3,level1=97500,level2=98000,Tf=PCM28) -(id=4,level1=98000,level2=98500,Tf=PCM27) -(id=5,level1=98500,level2=99000,Tf=PCM26) -(id=6,level1=99000,level2=99500,Tf=PCM25) -(id=7,level1=99500,level2=100000,Tf=PCM24) -(id=8,level1=100000,level2=100500,Tf=PCM23) -(id=9,level1=100500,level2=101000,Tf=PCM22) -(id=10,level1=101000,level2=101500,Tf=PCM21) -(id=11,level1=101500,level2=102000,Tf=PCM20) -(id=12,level1=102000,level2=102500,Tf=PCM19) -(id=13,level1=102500,level2=103000,Tf=PCM18) -(id=14,level1=103000,level2=103500,Tf=PCM17) -(id=15,level1=103500,level2=104000,Tf=PCM35) -(id=16,level1=104000,level2=1e+20,Tf=PCM36) ) - - -Gfi_PCM_psl( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=97000,Tf=PCM30) -(id=2,level1=97000,level2=97500,Tf=PCM29) -(id=3,level1=97500,level2=98000,Tf=PCM28) -(id=4,level1=98000,level2=98500,Tf=PCM27) -(id=5,level1=98500,level2=99000,Tf=PCM26) -(id=6,level1=99000,level2=99500,Tf=PCM25) -(id=7,level1=99500,level2=100000,Tf=PCM24) -(id=8,level1=100000,level2=100500,Tf=PCM23) -(id=9,level1=100500,level2=101000,Tf=PCM22) -(id=10,level1=101000,level2=101500,Tf=PCM21) -(id=11,level1=101500,level2=102000,Tf=PCM20) -(id=12,level1=102000,level2=102500,Tf=PCM19) -(id=13,level1=102500,level2=103000,Tf=PCM18) -(id=14,level1=103000,level2=103500,Tf=PCM17) -(id=15,level1=103500,level2=104000,Tf=PCM35) -(id=16,level1=104000,level2=1e+20,Tf=PCM36) ) - -Gfi_PCM_rlut( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=100,Tf=PCM16) -(id=2,level1=100,level2=120,Tf=PCM30) -(id=3,level1=120,level2=140,Tf=PCM29) -(id=4,level1=140,level2=160,Tf=PCM28) -(id=5,level1=160,level2=180,Tf=PCM27) -(id=6,level1=180,level2=200,Tf=PCM26) -(id=7,level1=200,level2=220,Tf=PCM25) -(id=8,level1=220,level2=240,Tf=PCM24) -(id=9,level1=240,level2=260,Tf=PCM23) -(id=10,level1=260,level2=280,Tf=PCM22) -(id=11,level1=280,level2=300,Tf=PCM21) -(id=12,level1=300,level2=320,Tf=PCM20) -(id=13,level1=320,level2=340,Tf=PCM19) -(id=14,level1=340,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_rlutcs( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=100,Tf=PCM16) -(id=2,level1=100,level2=120,Tf=PCM30) -(id=3,level1=120,level2=140,Tf=PCM29) -(id=4,level1=140,level2=160,Tf=PCM28) -(id=5,level1=160,level2=180,Tf=PCM27) -(id=6,level1=180,level2=200,Tf=PCM26) -(id=7,level1=200,level2=220,Tf=PCM25) -(id=8,level1=220,level2=240,Tf=PCM24) -(id=9,level1=240,level2=260,Tf=PCM23) -(id=10,level1=260,level2=280,Tf=PCM22) -(id=11,level1=280,level2=300,Tf=PCM21) -(id=12,level1=300,level2=320,Tf=PCM20) -(id=13,level1=320,level2=340,Tf=PCM19) -(id=14,level1=340,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_rsds( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=25,Tf=PCM16) -(id=2,level1=25,level2=50,Tf=PCM30) -(id=3,level1=50,level2=75,Tf=PCM29) -(id=4,level1=75,level2=100,Tf=PCM28) -(id=5,level1=100,level2=125,Tf=PCM27) -(id=6,level1=125,level2=150,Tf=PCM34) -(id=7,level1=150,level2=175,Tf=PCM33) -(id=8,level1=175,level2=200,Tf=PCM32) -(id=9,level1=200,level2=225,Tf=PCM23) -(id=10,level1=225,level2=250,Tf=PCM22) -(id=11,level1=250,level2=275,Tf=PCM21) -(id=12,level1=275,level2=300,Tf=PCM20) -(id=13,level1=300,level2=325,Tf=PCM19) -(id=14,level1=325,level2=350,Tf=PCM18) -(id=15,level1=350,level2=1e+20,Tf=PCM17) ) - -Gfi_PCM_rsdscs( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=25,Tf=PCM16) -(id=2,level1=25,level2=50,Tf=PCM30) -(id=3,level1=50,level2=75,Tf=PCM29) -(id=4,level1=75,level2=100,Tf=PCM28) -(id=5,level1=100,level2=125,Tf=PCM27) -(id=6,level1=125,level2=150,Tf=PCM34) -(id=7,level1=150,level2=175,Tf=PCM33) -(id=8,level1=175,level2=200,Tf=PCM32) -(id=9,level1=200,level2=225,Tf=PCM23) -(id=10,level1=225,level2=250,Tf=PCM22) -(id=11,level1=250,level2=275,Tf=PCM21) -(id=12,level1=275,level2=300,Tf=PCM20) -(id=13,level1=300,level2=325,Tf=PCM19) -(id=14,level1=325,level2=350,Tf=PCM18) -(id=15,level1=350,level2=1e+20,Tf=PCM17) ) - -Gfi_PCM_rsus( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=-25,Tf=PCM16) -(id=2,level1=-25,level2=-50,Tf=PCM30) -(id=3,level1=-50,level2=-75,Tf=PCM29) -(id=4,level1=-75,level2=-100,Tf=PCM28) -(id=5,level1=-100,level2=-125,Tf=PCM27) -(id=6,level1=-125,level2=-150,Tf=PCM34) -(id=7,level1=-150,level2=-175,Tf=PCM33) -(id=8,level1=-175,level2=-200,Tf=PCM32) -(id=9,level1=-200,level2=-225,Tf=PCM23) -(id=10,level1=-225,level2=-250,Tf=PCM22) -(id=11,level1=-250,level2=-275,Tf=PCM21) -(id=12,level1=-275,level2=-300,Tf=PCM20) -(id=13,level1=-300,level2=-325,Tf=PCM19) -(id=14,level1=-325,level2=-350,Tf=PCM18) -(id=15,level1=-350,level2=-400,Tf=PCM17) -(id=16,level1=-400,level2=-1e+20,Tf=PCM35) ) - -Gfi_PCM_rsut( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=-25,Tf=PCM16) -(id=2,level1=-25,level2=-50,Tf=PCM30) -(id=3,level1=-50,level2=-75,Tf=PCM29) -(id=4,level1=-75,level2=-100,Tf=PCM28) -(id=5,level1=-100,level2=-125,Tf=PCM27) -(id=6,level1=-125,level2=-150,Tf=PCM34) -(id=7,level1=-150,level2=-175,Tf=PCM33) -(id=8,level1=-175,level2=-200,Tf=PCM32) -(id=9,level1=-200,level2=-225,Tf=PCM23) -(id=10,level1=-225,level2=-250,Tf=PCM22) -(id=11,level1=-250,level2=-275,Tf=PCM21) -(id=12,level1=-275,level2=-300,Tf=PCM20) -(id=13,level1=-300,level2=-325,Tf=PCM19) -(id=14,level1=-325,level2=-350,Tf=PCM18) -(id=15,level1=-350,level2=-400,Tf=PCM17) -(id=16,level1=-400,level2=-1e+20,Tf=PCM35) ) - - -Gfi_PCM_rsutcs( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=-25,Tf=PCM16) -(id=2,level1=-25,level2=-50,Tf=PCM30) -(id=3,level1=-50,level2=-75,Tf=PCM29) -(id=4,level1=-75,level2=-100,Tf=PCM28) -(id=5,level1=-100,level2=-125,Tf=PCM27) -(id=6,level1=-125,level2=-150,Tf=PCM34) -(id=7,level1=-150,level2=-175,Tf=PCM33) -(id=8,level1=-175,level2=-200,Tf=PCM32) -(id=9,level1=-200,level2=-225,Tf=PCM23) -(id=10,level1=-225,level2=-250,Tf=PCM22) -(id=11,level1=-250,level2=-275,Tf=PCM21) -(id=12,level1=-275,level2=-300,Tf=PCM20) -(id=13,level1=-300,level2=-325,Tf=PCM19) -(id=14,level1=-325,level2=-350,Tf=PCM18) -(id=15,level1=-350,level2=-400,Tf=PCM17) -(id=16,level1=-400,level2=-1e+20,Tf=PCM35) ) - - -Gfi_PCM_ta( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -yaxisconvert=linear, -missing=1e+20, -range(id=1,level1=-1e+20,level2=203,Tf=PCM16) -(id=2,level1=203,level2=213,Tf=PCM30) -(id=3,level1=213,level2=223,Tf=PCM29) -(id=4,level1=223,level2=233,Tf=PCM28) -(id=5,level1=233,level2=243,Tf=PCM27) -(id=6,level1=243,level2=253,Tf=PCM34) -(id=7,level1=253,level2=263,Tf=PCM33) -(id=8,level1=263,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_tas( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=238,Tf=PCM16) -(id=2,level1=238,level2=243,Tf=PCM30) -(id=3,level1=243,level2=248,Tf=PCM29) -(id=4,level1=248,level2=253,Tf=PCM28) -(id=5,level1=253,level2=258,Tf=PCM27) -(id=6,level1=258,level2=263,Tf=PCM34) -(id=7,level1=263,level2=268,Tf=PCM33) -(id=8,level1=268,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_tasmax( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=238,Tf=PCM16) -(id=2,level1=238,level2=243,Tf=PCM30) -(id=3,level1=243,level2=248,Tf=PCM29) -(id=4,level1=248,level2=253,Tf=PCM28) -(id=5,level1=253,level2=258,Tf=PCM27) -(id=6,level1=258,level2=263,Tf=PCM34) -(id=7,level1=263,level2=268,Tf=PCM33) -(id=8,level1=268,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_tasmin( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=238,Tf=PCM16) -(id=2,level1=238,level2=243,Tf=PCM30) -(id=3,level1=243,level2=248,Tf=PCM29) -(id=4,level1=248,level2=253,Tf=PCM28) -(id=5,level1=253,level2=258,Tf=PCM27) -(id=6,level1=258,level2=263,Tf=PCM34) -(id=7,level1=263,level2=268,Tf=PCM33) -(id=8,level1=268,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_ts( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=238,Tf=PCM16) -(id=2,level1=238,level2=243,Tf=PCM30) -(id=3,level1=243,level2=248,Tf=PCM29) -(id=4,level1=248,level2=253,Tf=PCM28) -(id=5,level1=253,level2=258,Tf=PCM27) -(id=6,level1=258,level2=263,Tf=PCM34) -(id=7,level1=263,level2=268,Tf=PCM33) -(id=8,level1=268,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_tauu( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-1,Tf=PCM29) -(id=2,level1=-1,level2=-0.5,Tf=PCM28) -(id=3,level1=-0.5,level2=-0.1,Tf=PCM27) -(id=4,level1=-0.1,level2=-0.05,Tf=PCM26) -(id=5,level1=-0.05,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=0,Tf=PCM24) -(id=7,level1=0,level2=0.01,Tf=PCM23) -(id=8,level1=0.01,level2=0.05,Tf=PCM22) -(id=9,level1=0.05,level2=0.1,Tf=PCM21) -(id=10,level1=0.1,level2=0.5,Tf=PCM20) -(id=11,level1=0.5,level2=1,Tf=PCM19) -(id=12,level1=1,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_tauugwd( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-1,Tf=PCM29) -(id=2,level1=-1,level2=-0.5,Tf=PCM28) -(id=3,level1=-0.5,level2=-0.1,Tf=PCM27) -(id=4,level1=-0.1,level2=-0.05,Tf=PCM26) -(id=5,level1=-0.05,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=0,Tf=PCM24) -(id=7,level1=0,level2=0.01,Tf=PCM23) -(id=8,level1=0.01,level2=0.05,Tf=PCM22) -(id=9,level1=0.05,level2=0.1,Tf=PCM21) -(id=10,level1=0.1,level2=0.5,Tf=PCM20) -(id=11,level1=0.5,level2=1,Tf=PCM19) -(id=12,level1=1,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_tauv( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-1,Tf=PCM29) -(id=2,level1=-1,level2=-0.5,Tf=PCM28) -(id=3,level1=-0.5,level2=-0.1,Tf=PCM27) -(id=4,level1=-0.1,level2=-0.05,Tf=PCM26) -(id=5,level1=-0.05,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=0,Tf=PCM24) -(id=7,level1=0,level2=0.01,Tf=PCM23) -(id=8,level1=0.01,level2=0.05,Tf=PCM22) -(id=9,level1=0.05,level2=0.1,Tf=PCM21) -(id=10,level1=0.1,level2=0.5,Tf=PCM20) -(id=11,level1=0.5,level2=1,Tf=PCM19) -(id=12,level1=1,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_tauvgwd( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-1,Tf=PCM29) -(id=2,level1=-1,level2=-0.5,Tf=PCM28) -(id=3,level1=-0.5,level2=-0.1,Tf=PCM27) -(id=4,level1=-0.1,level2=-0.05,Tf=PCM26) -(id=5,level1=-0.05,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=0,Tf=PCM24) -(id=7,level1=0,level2=0.01,Tf=PCM23) -(id=8,level1=0.01,level2=0.05,Tf=PCM22) -(id=9,level1=0.05,level2=0.1,Tf=PCM21) -(id=10,level1=0.1,level2=0.5,Tf=PCM20) -(id=11,level1=0.5,level2=1,Tf=PCM19) -(id=12,level1=1,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_ua( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-25,Tf=PCM29) -(id=2,level1=-25,level2=-20,Tf=PCM28) -(id=3,level1=-20,level2=-15,Tf=PCM27) -(id=4,level1=-15,level2=-10,Tf=PCM26) -(id=5,level1=-10,level2=-5,Tf=PCM25) -(id=6,level1=-5,level2=0,Tf=PCM24) -(id=7,level1=0,level2=5,Tf=PCM23) -(id=8,level1=5,level2=10,Tf=PCM22) -(id=9,level1=10,level2=15,Tf=PCM21) -(id=10,level1=15,level2=20,Tf=PCM20) -(id=11,level1=20,level2=25,Tf=PCM19) -(id=12,level1=25,level2=30,Tf=PCM17) -(id=13,level1=30,level2=35,Tf=PCM35) -(id=14,level1=35,level2=1e+20,Tf=PCM36) ) - - -Gfi_PCM_uas( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-21,Tf=PCM16) -(id=2,level1=-21,level2=-18,Tf=PCM30) -(id=3,level1=-18,level2=-15,Tf=PCM29) -(id=4,level1=-15,level2=-12,Tf=PCM28) -(id=5,level1=-12,level2=-9,Tf=PCM27) -(id=6,level1=-9,level2=-6,Tf=PCM34) -(id=7,level1=-6,level2=-3,Tf=PCM33) -(id=8,level1=-3,level2=0,Tf=PCM32) -(id=9,level1=0,level2=3,Tf=PCM23) -(id=10,level1=3,level2=6,Tf=PCM22) -(id=11,level1=6,level2=9,Tf=PCM21) -(id=12,level1=9,level2=12,Tf=PCM20) -(id=13,level1=12,level2=15,Tf=PCM19) -(id=14,level1=15,level2=18,Tf=PCM18) -(id=15,level1=18,level2=21,Tf=PCM17) -(id=16,level1=21,level2=1e+20,Tf=PCM35) -(id=17,level1=1e+20,level2=1e+20,Tf=PCM241) ) - -Gfi_PCM_vas( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-10,Tf=PCM29) -(id=2,level1=-10,level2=-8,Tf=PCM28) -(id=3,level1=-8,level2=-6,Tf=PCM27) -(id=4,level1=-6,level2=-4,Tf=PCM26) -(id=5,level1=-4,level2=-2,Tf=PCM25) -(id=6,level1=-2,level2=0,Tf=PCM24) -(id=7,level1=0,level2=2,Tf=PCM23) -(id=8,level1=2,level2=4,Tf=PCM22) -(id=9,level1=4,level2=6,Tf=PCM21) -(id=10,level1=6,level2=8,Tf=PCM20) -(id=11,level1=8,level2=10,Tf=PCM19) -(id=12,level1=10,level2=12,Tf=PCM18) -(id=13,level1=12,level2=14,Tf=PCM17) -(id=14,level1=14,level2=1e+20,Tf=PCM35) -(id=17,level1=1e+20,level2=1e+20,Tf=PCM241) ) - -Gfi_PCM_va( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -xaxisconvert=linear, -yaxisconvert=linear, -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-3,Tf=PCM29) -(id=2,level1=-3,level2=-2.5,Tf=PCM28) -(id=3,level1=-2.5,level2=-2,Tf=PCM27) -(id=4,level1=-2,level2=-1.5,Tf=PCM26) -(id=5,level1=-1.5,level2=-1,Tf=PCM25) -(id=6,level1=-1,level2=-0.5,Tf=PCM24) -(id=7,level1=-0.5,level2=0,Tf=PCM23) -(id=8,level1=0,level2=0.5,Tf=PCM22) -(id=9,level1=0.5,level2=1,Tf=PCM21) -(id=10,level1=1,level2=1.5,Tf=PCM20) -(id=11,level1=1.5,level2=2,Tf=PCM19) -(id=12,level1=2,level2=2.5,Tf=PCM17) -(id=13,level1=2.5,level2=3,Tf=PCM35) -(id=14,level1=3,level2=1e+20,Tf=PCM36) ) - -Gfi_PCM_wap( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -xaxisconvert=linear, -yaxisconvert=linear, -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-0.03,Tf=PCM29) -(id=2,level1=-0.03,level2=-0.025,Tf=PCM28) -(id=3,level1=-0.025,level2=-0.02,Tf=PCM27) -(id=4,level1=-0.02,level2=-0.015,Tf=PCM26) -(id=5,level1=-0.015,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=-0.005,Tf=PCM24) -(id=7,level1=-0.005,level2=0,Tf=PCM23) -(id=8,level1=0,level2=0.005,Tf=PCM22) -(id=9,level1=0.005,level2=0.01,Tf=PCM21) -(id=10,level1=0.01,level2=0.015,Tf=PCM20) -(id=11,level1=0.015,level2=0.02,Tf=PCM19) -(id=12,level1=0.02,level2=0.025,Tf=PCM17) -(id=13,level1=0.025,level2=0.03,Tf=PCM35) -(id=14,level1=0.03,level2=1e+20,Tf=PCM36) ) - - - -Gfi_PCM_zg( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -xaxisconvert=linear, -yaxisconvert=linear, -missing=1e+20, -range -(id=0,level1=-1e+20,level2=0,Tf=PCM241) -(id=1,level1=0,level2=3000,Tf=PCM29) -(id=2,level1=3000,level2=6000,Tf=PCM28) -(id=3,level1=6000,level2=9000,Tf=PCM27) -(id=4,level1=9000,level2=12000,Tf=PCM26) -(id=5,level1=12000,level2=15000,Tf=PCM25) -(id=6,level1=15000,level2=18000,Tf=PCM24) -(id=7,level1=18000,level2=21000,Tf=PCM23) -(id=8,level1=21000,level2=24000,Tf=PCM22) -(id=9,level1=24000,level2=27000,Tf=PCM21) -(id=10,level1=27000,level2=30000,Tf=PCM20) -(id=11,level1=30000,level2=33000,Tf=PCM19) -(id=12,level1=33000,level2=36000,Tf=PCM17) -(id=13,level1=36000,level2=39000,Tf=PCM35) -(id=14,level1=39000,level2=1e+20,Tf=PCM36) ) - -C_PCM( - 100,100,100, 0,0,0, 44.7,62.4,100, 29.8,44.3,62, 76.9,84.3,100, 100,100,0, - 100,55.6863,16.4706, 0,0,0, 100,100,100, 0,0,0, 100,0,0, 0,100,0, - 0,0,100, 100,100,0, 0,100,100, 100,0,100, 98.4314,98.4314,100, 78.4314,12.549,3.1373, - 88.6274,20,5.4902, 94.5098,33.3333,12.549, 100,55.6863,16.4706, 99.6078,80,24.7059, 97.6471,87.8431,24.7059, 95.6863,100,24.3137, - 79.2157,100,83.5294, 52.549,100,94.5098, 36.4706,100,94.5098, 7.0588,78.4314,100, 23.5294,52.9412,100, 57.6471,20.7843,99.6078, - 84.7059,6.6667,99.6078, 100,0,100, 80.7843,100,15.6863, 44.3137,100,14.1176, 23.1373,85.098,56.8627, 65.8824,0,0, - 47.451,8.2353,11.3725, 0,50,100, 0,40,100, 0,30,100, 0,20,100, 0,10,100, - 0,0,100, 10,0,100, 20,0,100, 30,0,100, 40,0,100, 50,0,100, - 60,0,100, 70,0,100, 80,0,100, 90,0,100, 100,0,100, 100,0,90, - 100,0,80, 100,0,70, 100,0,60, 100,0,50, 100,0,40, 100,0,30, - 100,0,20, 100,0,10, 100,0,0, 95,10,10, 90,20,20, 85,30,30, - 80,40,40, 75,50,50, 70,60,60, 65,70,70, 60,80,80, 55,90,90, - 50,100,100, 45,100,90, 40,100,80, 35,100,70, 30,100,60, 25,100,50, - 20,100,40, 15,100,30, 10,100,20, 5,100,10, 0,100,0, 10,95,10, - 20,90,20, 30,85,30, 40,80,40, 50,75,50, 60,70,60, 70,65,70, - 80,60,80, 90,55,90, 100,50,100, 90,45,100, 80,40,100, 70,35,100, - 60,30,100, 50,25,100, 40,20,100, 30,15,100, 20,10,100, 10,5,100, - 0,0,100, 10,10,95, 20,20,90, 30,30,85, 40,40,80, 50,50,75, - 60,60,70, 70,70,65, 80,80,60, 90,90,55, 100,100,50, 100,90,45, - 100,80,40, 100,70,35, 100,60,30, 100,50,25, 100,40,20, 100,30,15, - 100,20,10, 100,10,5, 100,0,0, 95,0,0, 90,0,0, 85,0,0, - 80,0,0, 75,0,0, 70,0,0, 65,0,0, 60,0,0, 55,0,0, - 50,0,0, 45,0,0, 40,0,0, 35,0,0, 30,0,0, 25,0,0, - 20,0,0, 15,0,0, 10,0,0, 5,0,0, 0,0,0, 0,5,0, - 0,10,0, 0,15,0, 0,20,0, 0,25,0, 0,30,0, 0,35,0, - 0,40,0, 0,45,0, 0,50,0, 0,55,0, 0,60,0, 0,65,0, - 0,70,0, 0,75,0, 0,80,0, 0,85,0, 0,90,0, 0,95,0, - 0,100,0, 0,95,5, 0,90,10, 0,85,15, 0,80,20, 0,75,25, - 0,70,30, 0,65,35, 0,60,40, 0,55,45, 0,50,50, 0,45,55, - 0,40,60, 0,35,65, 0,30,70, 0,25,75, 0,20,80, 0,15,85, - 0,10,90, 0,5,95, 0,0,100, 0,0,95, 0,0,90, 0,0,85, - 0,0,80, 0,0,75, 0,0,70, 0,0,65, 0,0,60, 0,0,55, - 0,0,50, 0,0,45, 0,0,40, 0,0,35, 0,0,30, 0,0,25, - 0,0,20, 0,0,15, 0,0,10, 0,0,5, 0,0,0, 5,5,5, - 10,10,10, 15,15,15, 20,20,20, 25,25,25, 30,30,30, 35,35,35, - 40,40,40, 45,45,45, 50,50,50, 55,55,55, 60,60,60, 65,65,65, - 70,70,70, 75,75,75, 80,80,80, 85,85,85, 90,90,90, 95,95,95, - 100,100,100, 100,95,95, 100,90,90, 100,85,85, 100,80,80, 100,75,75, - 100,70,70, 100,65,65, 100,60,60, 100,55,55, 100,50,50, 100,45,45, - 100,40,40, 100,35,35, 100,30,30, 100,25,25, 100,20,20, 100,15,15) diff --git a/images/UV-CDAT_logo.png b/images/UV-CDAT_logo.png deleted file mode 100644 index 17f40d09e0..0000000000 Binary files a/images/UV-CDAT_logo.png and /dev/null differ diff --git a/images/UV-CDAT_logo_sites.png b/images/UV-CDAT_logo_sites.png deleted file mode 100644 index 6d568b8256..0000000000 Binary files a/images/UV-CDAT_logo_sites.png and /dev/null differ diff --git a/images/add.gif b/images/add.gif deleted file mode 100644 index 3f40d591d1..0000000000 Binary files a/images/add.gif and /dev/null differ diff --git a/images/animate_load.gif b/images/animate_load.gif deleted file mode 100644 index a6563b031e..0000000000 Binary files a/images/animate_load.gif and /dev/null differ diff --git a/images/animate_save.gif b/images/animate_save.gif deleted file mode 100644 index 8b1081c543..0000000000 Binary files a/images/animate_save.gif and /dev/null differ diff --git a/images/base10.gif b/images/base10.gif deleted file mode 100644 index d3069446b6..0000000000 Binary files a/images/base10.gif and /dev/null differ diff --git a/images/bookmark_folder.gif b/images/bookmark_folder.gif deleted file mode 100644 index 28ffc21d1d..0000000000 Binary files a/images/bookmark_folder.gif and /dev/null differ diff --git a/images/cdatdemo.gif b/images/cdatdemo.gif deleted file mode 100644 index f8ca3e6bda..0000000000 Binary files a/images/cdatdemo.gif and /dev/null differ diff --git a/images/cdatnews b/images/cdatnews deleted file mode 100755 index 277b2d9aed..0000000000 --- a/images/cdatnews +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -/usr/bin/env P4PORT=stargate.llnl.gov:1666 P4USER=p4review p4 changes -l | more diff --git a/images/cos.gif b/images/cos.gif deleted file mode 100644 index 32fde08a86..0000000000 Binary files a/images/cos.gif and /dev/null differ diff --git a/images/cosh.gif b/images/cosh.gif deleted file mode 100644 index 977887a117..0000000000 Binary files a/images/cosh.gif and /dev/null differ diff --git a/images/cycle.gif b/images/cycle.gif deleted file mode 100644 index d472dcd90c..0000000000 Binary files a/images/cycle.gif and /dev/null differ diff --git a/images/devel_20.gif b/images/devel_20.gif deleted file mode 100644 index 2e1e1aa9b9..0000000000 Binary files a/images/devel_20.gif and /dev/null differ diff --git a/images/devel_menu.gif b/images/devel_menu.gif deleted file mode 100644 index c6448aa256..0000000000 Binary files a/images/devel_menu.gif and /dev/null differ diff --git a/images/divide.gif b/images/divide.gif deleted file mode 100644 index 8e540dc04c..0000000000 Binary files a/images/divide.gif and /dev/null differ diff --git a/images/edit.gif b/images/edit.gif deleted file mode 100644 index 1e6858c661..0000000000 Binary files a/images/edit.gif and /dev/null differ diff --git a/images/edit_20.gif b/images/edit_20.gif deleted file mode 100644 index f853cc7a04..0000000000 Binary files a/images/edit_20.gif and /dev/null differ diff --git a/images/edit_menu.gif b/images/edit_menu.gif deleted file mode 100644 index 913c12821b..0000000000 Binary files a/images/edit_menu.gif and /dev/null differ diff --git a/images/editdelete.gif b/images/editdelete.gif deleted file mode 100644 index f0a0da26f4..0000000000 Binary files a/images/editdelete.gif and /dev/null differ diff --git a/images/editdelete_20.gif b/images/editdelete_20.gif deleted file mode 100644 index 8bd6e28d0b..0000000000 Binary files a/images/editdelete_20.gif and /dev/null differ diff --git a/images/editdelete_menu.gif b/images/editdelete_menu.gif deleted file mode 100644 index e6b4f3cbe5..0000000000 Binary files a/images/editdelete_menu.gif and /dev/null differ diff --git a/images/equal.gif b/images/equal.gif deleted file mode 100644 index 3aa6e64035..0000000000 Binary files a/images/equal.gif and /dev/null differ diff --git a/images/exp.gif b/images/exp.gif deleted file mode 100644 index af87ff8a84..0000000000 Binary files a/images/exp.gif and /dev/null differ diff --git a/images/fabs.gif b/images/fabs.gif deleted file mode 100644 index c39cf67954..0000000000 Binary files a/images/fabs.gif and /dev/null differ diff --git a/images/function.gif b/images/function.gif deleted file mode 100644 index c60f72277d..0000000000 Binary files a/images/function.gif and /dev/null differ diff --git a/images/getmask.gif b/images/getmask.gif deleted file mode 100644 index f448b1aa22..0000000000 Binary files a/images/getmask.gif and /dev/null differ diff --git a/images/gohome.gif b/images/gohome.gif deleted file mode 100644 index a6cfab3dd3..0000000000 Binary files a/images/gohome.gif and /dev/null differ diff --git a/images/greater.gif b/images/greater.gif deleted file mode 100644 index d8bb1375c3..0000000000 Binary files a/images/greater.gif and /dev/null differ diff --git a/images/grower.gif b/images/grower.gif deleted file mode 100644 index d33b2ea196..0000000000 Binary files a/images/grower.gif and /dev/null differ diff --git a/images/hand1.gif b/images/hand1.gif deleted file mode 100644 index 382d9a1038..0000000000 Binary files a/images/hand1.gif and /dev/null differ diff --git a/images/hand2.gif b/images/hand2.gif deleted file mode 100644 index 7b3748df8e..0000000000 Binary files a/images/hand2.gif and /dev/null differ diff --git a/images/info.gif b/images/info.gif deleted file mode 100644 index af52cdd9a8..0000000000 Binary files a/images/info.gif and /dev/null differ diff --git a/images/info_20.gif b/images/info_20.gif deleted file mode 100644 index 83bac5312f..0000000000 Binary files a/images/info_20.gif and /dev/null differ diff --git a/images/info_menu.gif b/images/info_menu.gif deleted file mode 100644 index 267fe64fe8..0000000000 Binary files a/images/info_menu.gif and /dev/null differ diff --git a/images/inpin_red.gif b/images/inpin_red.gif deleted file mode 100644 index e85576af15..0000000000 Binary files a/images/inpin_red.gif and /dev/null differ diff --git a/images/inverse.gif b/images/inverse.gif deleted file mode 100644 index 16bed0d8cf..0000000000 Binary files a/images/inverse.gif and /dev/null differ diff --git a/images/less.gif b/images/less.gif deleted file mode 100644 index 7cdd74fca3..0000000000 Binary files a/images/less.gif and /dev/null differ diff --git a/images/list_20.gif b/images/list_20.gif deleted file mode 100644 index b26ab878af..0000000000 Binary files a/images/list_20.gif and /dev/null differ diff --git a/images/lock.gif b/images/lock.gif deleted file mode 100644 index b63a866701..0000000000 Binary files a/images/lock.gif and /dev/null differ diff --git a/images/log.gif b/images/log.gif deleted file mode 100644 index d570f0b3b3..0000000000 Binary files a/images/log.gif and /dev/null differ diff --git a/images/log_20.gif b/images/log_20.gif deleted file mode 100644 index 8191b4af41..0000000000 Binary files a/images/log_20.gif and /dev/null differ diff --git a/images/log_menu.gif b/images/log_menu.gif deleted file mode 100644 index 47b6856430..0000000000 Binary files a/images/log_menu.gif and /dev/null differ diff --git a/images/mask.gif b/images/mask.gif deleted file mode 100644 index 69ca60ba6b..0000000000 Binary files a/images/mask.gif and /dev/null differ diff --git a/images/mlog.gif b/images/mlog.gif deleted file mode 100644 index e0ed411d28..0000000000 Binary files a/images/mlog.gif and /dev/null differ diff --git a/images/mlog10.gif b/images/mlog10.gif deleted file mode 100644 index 4c64ac7dd8..0000000000 Binary files a/images/mlog10.gif and /dev/null differ diff --git a/images/multiply.gif b/images/multiply.gif deleted file mode 100644 index 7d5e63f5cf..0000000000 Binary files a/images/multiply.gif and /dev/null differ diff --git a/images/not.gif b/images/not.gif deleted file mode 100644 index 0a3c3a0559..0000000000 Binary files a/images/not.gif and /dev/null differ diff --git a/images/off.gif b/images/off.gif deleted file mode 100644 index 3e53637a0a..0000000000 Binary files a/images/off.gif and /dev/null differ diff --git a/images/on.gif b/images/on.gif deleted file mode 100644 index fb75f06ae5..0000000000 Binary files a/images/on.gif and /dev/null differ diff --git a/images/open.gif b/images/open.gif deleted file mode 100644 index 2aa70cdf0f..0000000000 Binary files a/images/open.gif and /dev/null differ diff --git a/images/opendap.gif b/images/opendap.gif deleted file mode 100644 index 346345da29..0000000000 Binary files a/images/opendap.gif and /dev/null differ diff --git a/images/outpin_red.gif b/images/outpin_red.gif deleted file mode 100644 index c1946afbb2..0000000000 Binary files a/images/outpin_red.gif and /dev/null differ diff --git a/images/pan_down.gif b/images/pan_down.gif deleted file mode 100644 index 4ff930a62c..0000000000 Binary files a/images/pan_down.gif and /dev/null differ diff --git a/images/pan_left.gif b/images/pan_left.gif deleted file mode 100644 index 2e84b43921..0000000000 Binary files a/images/pan_left.gif and /dev/null differ diff --git a/images/pan_right.gif b/images/pan_right.gif deleted file mode 100644 index a6a3a27e3e..0000000000 Binary files a/images/pan_right.gif and /dev/null differ diff --git a/images/pan_up.gif b/images/pan_up.gif deleted file mode 100644 index ed12afba32..0000000000 Binary files a/images/pan_up.gif and /dev/null differ diff --git a/images/player_end2.gif b/images/player_end2.gif deleted file mode 100644 index ca90804acc..0000000000 Binary files a/images/player_end2.gif and /dev/null differ diff --git a/images/player_pause.gif b/images/player_pause.gif deleted file mode 100644 index 9b88ec5ebd..0000000000 Binary files a/images/player_pause.gif and /dev/null differ diff --git a/images/player_play.gif b/images/player_play.gif deleted file mode 100644 index e610d84248..0000000000 Binary files a/images/player_play.gif and /dev/null differ diff --git a/images/player_rev.gif b/images/player_rev.gif deleted file mode 100644 index 4fdabd3115..0000000000 Binary files a/images/player_rev.gif and /dev/null differ diff --git a/images/player_start.gif b/images/player_start.gif deleted file mode 100644 index add7c1cd99..0000000000 Binary files a/images/player_start.gif and /dev/null differ diff --git a/images/player_stop.gif b/images/player_stop.gif deleted file mode 100644 index cbceec23c1..0000000000 Binary files a/images/player_stop.gif and /dev/null differ diff --git a/images/power.gif b/images/power.gif deleted file mode 100644 index a998b721bf..0000000000 Binary files a/images/power.gif and /dev/null differ diff --git a/images/pydebug b/images/pydebug deleted file mode 100755 index 30262bfe20..0000000000 --- a/images/pydebug +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/csh -f -unsetenv PYTHONPATH -unsetenv PYTHONHOME -set bindir = `dirname $0` -set pyver=`${bindir}/python -c "import sys;print 'python'+sys.version[0:3],"` -set libdirdir = `dirname ${bindir}` -set libdir = ${libdirdir}/lib -# setenv PYTHONPATH "${libdir}/${pyver}/site-packages/apps:${libdir}/${pyver}/site-packages/vtk" -# setenv LD_LIBRARY_PATH "${libdir}:${libdir}/${pyver}/site-packages/vtk:/usr/local/lib:/usr/local/X11R6/lib:/usr/lib" -if "$1" == "" then - echo "Usage: pydebug python_file" -else - exec $bindir/python ${libdir}/${pyver}/site-packages/pydebug/pydebug.py $* -endif diff --git a/images/pythonenv b/images/pythonenv deleted file mode 100755 index f19471f01f..0000000000 --- a/images/pythonenv +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -f -echo $0 -echo "This script shows you the environment variables relevant to running python." -echo PYTHONPATH=${PYTHONPATH:-'not set'} -echo PYTHONSTARTUP=${PYTHONSTARTUP:-'not set'} -echo PYTHONHOME=${PYTHONHOME:-'not set'} -echo LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-'not set'} -echo "You should not have PYTHONHOME set." -echo "It is ok to have PYTHONPATH set to a directory of your own scripts." -echo "It is ok to have PYTHONSTARTUP set to a script you want run"; \ -echo "when starting up Python interactively." - - - diff --git a/images/recycle.gif b/images/recycle.gif deleted file mode 100644 index 704e054eb0..0000000000 Binary files a/images/recycle.gif and /dev/null differ diff --git a/images/recycle_20.gif b/images/recycle_20.gif deleted file mode 100644 index df7fa308a1..0000000000 Binary files a/images/recycle_20.gif and /dev/null differ diff --git a/images/recycle_file.gif b/images/recycle_file.gif deleted file mode 100644 index f1ab50cd08..0000000000 Binary files a/images/recycle_file.gif and /dev/null differ diff --git a/images/recycle_menu.gif b/images/recycle_menu.gif deleted file mode 100644 index 74489e27d5..0000000000 Binary files a/images/recycle_menu.gif and /dev/null differ diff --git a/images/regrid.gif b/images/regrid.gif deleted file mode 100644 index 9777ff3cbb..0000000000 Binary files a/images/regrid.gif and /dev/null differ diff --git a/images/remove.gif b/images/remove.gif deleted file mode 100644 index 12e81f86fd..0000000000 Binary files a/images/remove.gif and /dev/null differ diff --git a/images/save.gif b/images/save.gif deleted file mode 100644 index bfbcf80586..0000000000 Binary files a/images/save.gif and /dev/null differ diff --git a/images/save_20.gif b/images/save_20.gif deleted file mode 100644 index b900e48013..0000000000 Binary files a/images/save_20.gif and /dev/null differ diff --git a/images/save_file.gif b/images/save_file.gif deleted file mode 100644 index dde653798f..0000000000 Binary files a/images/save_file.gif and /dev/null differ diff --git a/images/save_menu.gif b/images/save_menu.gif deleted file mode 100644 index 17fb7640b8..0000000000 Binary files a/images/save_menu.gif and /dev/null differ diff --git a/images/sin.gif b/images/sin.gif deleted file mode 100644 index 27f1b4ff1c..0000000000 Binary files a/images/sin.gif and /dev/null differ diff --git a/images/sinh.gif b/images/sinh.gif deleted file mode 100644 index 207e16b239..0000000000 Binary files a/images/sinh.gif and /dev/null differ diff --git a/images/spk2scr.py b/images/spk2scr.py deleted file mode 100755 index 03fa37519f..0000000000 --- a/images/spk2scr.py +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python -import vcs,os - -version = '1.0' -general_description = """ - Reads in and converts Ferret (spk) colormap file to vcs colormap - If method is set to 'blend': - colors will be set using the ferret % value, and blending will be used in between - 0% in ferret corresponds to index_start - 100% in ferret corresponds to index_end - If method is set to 'contiguous': - colors will be set starting at index_start and assigned in order as found in the ferret (spk) file, no blending between colors - """ - -def spk2vcs(file,cname=None,x=None,index_start=16,index_end=239,method='blend',verbose=False): - """ %s - Usage: - cmap, ncolors = spk2vcs(file,cname=None,x=None) - Input: - file : Ferret (spk) colormap file - cname : VCS output colormap name, if None, uses ferret file name - x : vcs canvas, if None then a vcs canvas instance will be created - index_start : 0%% of ferret %% index, default is 16 - index_end : 100%% of ferret %% index, defalut is 239 - method : 'blend' or 'adjacent', defalut is 'blend' - Output: - cmap : vcs colormap object, with conitguous color set from index_Start if method='contiguous' - or spread from index_start to index_end if method is 'blend' - """ - - f=open(file) - ln=f.readlines() - # Treat colormap name - if cname is None: - cname = '.'.join(os.path.split(op.file)[-1].split('.')[:-1]) - if verbose: print 'Colormap name:',cname - - if x is None: - x=vcs.init() - cmap=x.createcolormap(cname) - x.setcolormap(cmap.name) - ncolors = 0 - last_index = index_start - if verbose: print 'Method:',method - for l in ln: - sp=l.split() - if len(sp)!=4: # Is it a line with 4 values (p,r,g,b)? - continue - p,r,g,b=sp - try: # Are the 4 values float? - p=float(p) - r=float(r) - g=float(g) - b=float(b) - except: - continue - if method == 'contiguous': - x.setcolorcell(index_start + ncolors, int(r), int(g), int(b)) - if verbose: print 'Setting cell %s to: %s, %s, %s' % (index_start + ncolors, int(r), int(g), int(b)) - cmap=x.getcolormap(cmap.name) - ncolors+=1 - else: - index = index_start + int(p*(index_end-index_start)/100.) - x.setcolorcell( index, int(r), int(g), int(b)) - cmap=x.getcolormap(cmap.name) - if verbose: print 'Setting cell %s to: %s, %s, %s' % (index, int(r), int(g), int(b)) - dr = cmap.index[index][0] - cmap.index[last_index][0] - dg = cmap.index[index][1] - cmap.index[last_index][1] - db = cmap.index[index][2] - cmap.index[last_index][2] - for indx in range(last_index+1,index): - p = float(indx-last_index)/float(index-last_index) - r = cmap.index[last_index][0]+int(p*dr) - g = cmap.index[last_index][1]+int(p*dg) - b = cmap.index[last_index][2]+int(p*db) - x.setcolorcell(indx , r, g, b) - if verbose: print '\t Sub-setting cell %s to: %s, %s, %s' % (indx , r, g, b) - cmap=x.getcolormap(cmap.name) - last_index = index - return cmap -setattr(spk2vcs,'__doc__',spk2vcs.__doc__ % general_description) - -if __name__=='__main__': - import optparse - op=optparse.OptionParser(usage="%%prog [options]\n%s" % general_description,version="%%prog %s" % version) - op.add_option("--file",dest='file',help="Ferret (spk) colormap file to convert, [default: %default]",default="pal1.spk") - op.add_option("--name",dest="name",help="Name of the returned vcs colormap, [default: uses ferret (spk) file name]",default='default') - op.add_option("--out",dest="out",help="Name of the returned vcs script file, [default: file.scr]",default='default') - op.add_option("--index_start",dest="index_start",type='int',help='start index for mapping of ferret colors into vcs colormap, [default: %default]',default=16) - op.add_option("--index_end",dest="index_end",type='int',help='end index for mapping of ferret colors into vcs colormap, [default: %default]',default=239) - op.add_option("--method",dest="method",help='method for mapping of ferret colors into vcs colormap (blend or contiguous), [default: %default]',default='blend') - op.add_option("--blend",dest="blend",action='store_true',help='end index for mapping of ferret colors into vcs colormap, overrides --method option',default=True) - op.add_option("--contiguous",dest="blend",action='store_false',help='end index for mapping of ferret colors into vcs colormap, overrides --method option',default=True) - op.add_option("--verbose",dest="verbose",action='store_true',help='Enable verbose screen output while converting colorcells, [default: %default]',default=False) - - op,args = op.parse_args() - - if op.method in [ 'contiguous','blend']: - method = op.method - else: - op.error("options method can ONLY be either blend or contiguous") - - if op.blend is True: - method = 'blend' - else: - method = 'contiguous' - - if op.name == 'default': - cname = None - - cmap = spk2vcs(op.file,index_start=op.index_start,index_end=op.index_end,method=method,cname=cname,verbose=op.verbose) - - if op.out == 'default': - oname = '.'.join(os.path.split(op.file)[-1].split('.')[:-1])+'.scr' - cmap.script(oname) - print 'Done, colormap converted to VCS using "%s" method from index %s to index %s\nStored in file: %s' % (method,op.index_start,op.index_end,oname) - - diff --git a/images/splash.gif b/images/splash.gif deleted file mode 100755 index 3fb4ad9ae6..0000000000 Binary files a/images/splash.gif and /dev/null differ diff --git a/images/sqrt.gif b/images/sqrt.gif deleted file mode 100644 index 5bad811d7d..0000000000 Binary files a/images/sqrt.gif and /dev/null differ diff --git a/images/std.gif b/images/std.gif deleted file mode 100644 index 61e29c4ad7..0000000000 Binary files a/images/std.gif and /dev/null differ diff --git a/images/subtract.gif b/images/subtract.gif deleted file mode 100644 index ece53af6fa..0000000000 Binary files a/images/subtract.gif and /dev/null differ diff --git a/images/tan.gif b/images/tan.gif deleted file mode 100644 index bba5910b4c..0000000000 Binary files a/images/tan.gif and /dev/null differ diff --git a/images/tanh.gif b/images/tanh.gif deleted file mode 100644 index 60753ac6ab..0000000000 Binary files a/images/tanh.gif and /dev/null differ diff --git a/images/templator b/images/templator deleted file mode 100755 index a8c9f2e9bd..0000000000 --- a/images/templator +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh -f -unset PYTHONHOME -opt=$1 -bindir=`dirname $0` -if (test "--help" = "$1") then - echo 'Usage: templator [template_name]'; exit 0 -fi -set path="${bindir}:$path" #in case of respawns, get our python -exec ${bindir}/python -O -c "import vcs, gui_support;vcs.templateeditorgui.create(template_name='${opt}');gui_support.root().mainloop()" - diff --git a/images/tg2_20.gif b/images/tg2_20.gif deleted file mode 100644 index c2390d374d..0000000000 Binary files a/images/tg2_20.gif and /dev/null differ diff --git a/images/tg_20.gif b/images/tg_20.gif deleted file mode 100644 index b26ab878af..0000000000 Binary files a/images/tg_20.gif and /dev/null differ diff --git a/images/tg_menu.gif b/images/tg_menu.gif deleted file mode 100644 index 7e08f4ad83..0000000000 Binary files a/images/tg_menu.gif and /dev/null differ diff --git a/images/tiltedpin_red.gif b/images/tiltedpin_red.gif deleted file mode 100644 index db91095014..0000000000 Binary files a/images/tiltedpin_red.gif and /dev/null differ diff --git a/images/toggle_menu.gif b/images/toggle_menu.gif deleted file mode 100644 index 3a2e664df0..0000000000 Binary files a/images/toggle_menu.gif and /dev/null differ diff --git a/images/trashcan_empty.gif b/images/trashcan_empty.gif deleted file mode 100644 index cbd89f7848..0000000000 Binary files a/images/trashcan_empty.gif and /dev/null differ diff --git a/images/trashcan_empty_20.gif b/images/trashcan_empty_20.gif deleted file mode 100644 index ecb0c3ed76..0000000000 Binary files a/images/trashcan_empty_20.gif and /dev/null differ diff --git a/images/trashcan_full.gif b/images/trashcan_full.gif deleted file mode 100644 index 39acb09dc1..0000000000 Binary files a/images/trashcan_full.gif and /dev/null differ diff --git a/images/trashcan_full_20.gif b/images/trashcan_full_20.gif deleted file mode 100644 index 97ed55ba86..0000000000 Binary files a/images/trashcan_full_20.gif and /dev/null differ diff --git a/images/trashcan_menu.gif b/images/trashcan_menu.gif deleted file mode 100644 index 1b8b1af82d..0000000000 Binary files a/images/trashcan_menu.gif and /dev/null differ diff --git a/images/unlock.gif b/images/unlock.gif deleted file mode 100644 index da0b2f547f..0000000000 Binary files a/images/unlock.gif and /dev/null differ diff --git a/images/vcs2py.py b/images/vcs2py.py deleted file mode 100755 index bb06fc09fc..0000000000 --- a/images/vcs2py.py +++ /dev/null @@ -1,367 +0,0 @@ -#!/usr/bin/env python - -""" This script converts old vcs scripts to python scripts -This is version 0. graphic method and template won't be converted -Therefore old script still needed around in order to load all graphic methods needed - -Not implemented yet, to do: -xname, xunits, etc.... -Transform, Logical mask -""" -version='0.2' -import sys -import vcs -import cdms - -## initialize some stats -warnings=0 -vcscmd=0 -arrays=0 -arrays_from_file=0 -plots=0 -active_plots=0 -unimplemented=0 -vcs_objects=0 -## Continents overlaying (default none) -overlay_continents='' - -## Determine input script -src=sys.argv[1] - -## Generate output name -outnm=src[:-4]+'.py' - -## Generate output name for graphic methods, templates, etc... -outnm_vcs=src[:-4]+'_vcs.scr' -f2=open(outnm_vcs,'w') - -# open input script file -fi=open(src,'r') - -## Opens output script file -f=open(outnm,'w') -f.write('#/usr/bin/env python\nimport vcs\nimport cdms\nimport MV\nx=vcs.init()\n\n') -f.write('"""Python script autogenerated using vcs2py version '+version+'\n') -f.write('Input VCS script: '+src+'\n"""\n') -f.write('## First load all the necessary template and graphic methods from the old script\nx.scriptrun(\''+outnm_vcs+'\')\n') -f.write("## Individual python code for individual vcs object can be generated by loading the object and saving it to a file\n## e.g: t=x.getboxfill('default')\n## x.scriptobject(t,'myfile.py')\n\n") - -## Opens file for graphic methods rewriting - -## Ok now let's loop through all lines and figure out commands -ln=fi.readlines() -n=len(ln) - -def extract(instring,beg,end=','): - """ Extract part of a string between 2 characters def, returns None if not existing - Usage: val = extract(instring,beg,end=',') - """ - try: - sp=instring.split(beg)[1] - sp=sp.split(end)[0] - if sp[-1]==instring[-1]: - sp=sp[:-1] - except: - sp=None - return sp -for i in range(n): - l=ln[i] - #print l - iprint=0 - if l[:4]=='Page': - vcscmd+=1 - val=l[5:].split(')')[0] - f.write('x.'+val+'()\n\n') - elif l.split('_')[0] in ['L','Tt','To','Tl','Tf','Tm','Th','C','P', - 'Gi','Gfb','Gfi','Gfo','Go','GSp','Gv','GXY','GXy','GYx']: - # First reconstruct the full name - nbracket=l.count('(') - vcs_objects+=1 - j=1 - f2.write(ln[i]) - nbracket-=l.count(')') - while nbracket>0: - f2.write(ln[i+j]) - nbracket+=ln[i+j].count('(') - nbracket-=ln[i+j].count(')') - j+=1 - - elif l[:5]=='Sleep': - vcscmd+=1 - val=l[6:].split(')')[0] - f.write('import time\ntime.sleep('+val+')\n\n') - elif l[:4]=='Over': - vcscmd+=1 - overlay_continents=',continents=' - n=l[19:].split(')')[0] - overlay_continents+=n - elif l[:3].lower()=='cgm': - vcscmd+=1 - args=l[4:].split(')')[0] # get the arguments - sp=args.split(',') - cgmfnm=sp[0] - if len(sp)>1: - app=sp[1][0] - else: - app="'a'" - f.write("x.cgm('"+cgmfnm+"',"+app+")\n\n") - elif l[:3].lower()=='run': - vcscmd+=1 - args=l[4:].split(')')[0] # get the arguments - sp=args.split(',') - scrfnm=sp[0] - f.write("## Warning the following will only load the templates/graphic methods\n") - f.write("## To excute commands convert script to file and uncoment the following line\n") - warnings+=1 - print 'Warning: Run script, will not execute any command, you need to convert it first and uncoment the line in the python script' - pyfnm=scrfnm.replace('.scr','.py') - f.write("## execfile('"+pyfnm+"')\n") - f.write("x.scriptrun('"+scrfnm+"')\n\n") - elif l[:6].lower()=='raster': - vcscmd+=1 - args=l[7:].split(')')[0] # get the arguments - sp=args.split(',') - cgmfnm=sp[0] - if len(sp)>1: - app=sp[1][0] - else: - app="'a'" - f.write("x.raster('"+cgmfnm+"',"+app+")\n\n") - elif l[:3].lower() in['drs','hdf']: - vcscmd+=1 - warnings+=1 - args=l[4:].split(')')[0] # get the arguments - sp=args.split(',') - ncfnm=sp[0] - ncfnm=ncfnm.replace('.dic','.nc') - ncfnm=ncfnm.replace('.hdf','.nc') - if len(sp)>2: - app=sp[2][0] - if app=='r':app="'w'" - if app=='a':app="'r+'" - else: - app="'w'" - array=sp[1] - print 'WARNING: Output file converted from '+l[:3]+' to NetCDF' - f.write("f=cdms.open('"+ncfnm+"',"+app+")\n") - f.write("f.write("+array+","+app+")\n") - f.write('f.close()\n\n') - elif l[:6].lower()=='netcdf': - vcscmd+=1 - args=l[7:].split(')')[0] # get the arguments - sp=args.split(',') - ncfnm=sp[0] - if len(sp)>2: - app=sp[2][0] - if app=='r':app="'w'" - if app=='a':app="'r+'" - else: - app="'w'" - array=sp[1] - f.write("f=cdms.open('"+ncfnm+"',"+app+")\n") - f.write("f.write("+array+","+app+")\n") - f.write('f.close()\n\n') - elif l[:5].lower()=='clear': - vcscmd+=1 - f.write('x.clear()\n\n') - elif l[:5].lower()=='color': - vcscmd+=1 - cmap=l[6:].split(')')[0] - f.write("x.setcolormap('"+cmap+"')\n\n") - elif l[:6].lower()=='canvas': - vcscmd+=1 - if l[7:-1]=='open': - f.write('x.open()\n\n') - elif l[7:-1]=='close': - f.write('x.close()\n\n') - elif l[:2]=='A_': - arrays+=1 - # Acquiring Array data - # First reconstruct the full name - j=1 - while l[-2]!=')' and l[-1]!=')': - l=l[:-1]+ln[i+j] - j+=1 - l=l.replace('\n','') - nm=extract(l,'A_','(') - pnm=nm.replace('.','_') # . are not acceptable in python names - if pnm!=nm: - # Now replace in every over possible lines ! - for j in range(i,n): - ln[j]=ln[j].replace(nm,pnm) - fnm=extract(l,'File=') - src=extract(l,'Source=') - vr=extract(l,'Name=') - tit=extract(l,'Title=') - units=extract(l,'Units=') - xnm=extract(l,'XName=') - xfirst=extract(l,'xfirst=') - xlast=extract(l,'xlast=') - ynm=extract(l,'YName=') - yfirst=extract(l,'yfirst=') - ylast=extract(l,'ylast=') - znm=extract(l,'ZName=') - zfirst=extract(l,'zfirst=') - zlast=extract(l,'zlast=') - tnm=extract(l,'TName=') - tfirst=extract(l,'tfirst=') - tlast=extract(l,'tlast=') - func=extract(l,'Function="','"') - cmd='' - - if not fnm is None: - arrays_from_file+=1 - cmd+='f = cdms.open('+fnm+')\n' - cmd+=pnm+' = f('+vr - if fnm[-5:-1]=='.dic': - if not tnm is None: tnm=tnm[:-1]+'_'+vr[1:] - if not znm is None: znm=znm[:-1]+'_'+vr[1:] - if not ynm is None: ynm=ynm[:-1]+'_'+vr[1:] - if not xnm is None: xnm=xnm[:-1]+'_'+vr[1:] - - elif not func is None: - # First of all treats the special commands (mean and sqrt) - # Mean ? -## if func[:-1]!=')': -## func=func+')' - imean=func.find('mean(') - while imean!=-1 : - tmp=func[imean:] - tmp=tmp.replace('mean(','cdutil.averager(',1) - tmp=tmp.split(',') - tmp2=tmp[1] - fpar=tmp2.find('\'') - lpar=tmp2[fpar+1].find('\'') - tmp3=tmp2[fpar+1:lpar].lower() - if tmp3=='time': - tmp3="axis='t')" - elif tmp3=='longitude': - tmp3="axis='x')" - elif tmp3=='latitude': - tmp3="axis='y')" - elif tmp3=='level': - tmp3="axis='z')" - else: - tmp3="axis='("+tmp2[fpar+1:lpar-1]+")'"+tmp2[lpar:] - tmp[1]=tmp3 - tmp=','.join(tmp) - func=func[:imean]+tmp - imean=func.find('mean(') - isqrt=func.find('sqrt(') - while isqrt!=-1: - warnings+=1 - print 'WARNING FOR ARRAY:'+pnm+'\nsqrt FUNCTION FOUND YOU NEED TO REPLACE AXIS NAME WITH CORRECT VALUE !' - tmp=func[isqrt:] - tmp=tmp.replace('sqrt(','MV.xxxx(',1) - tmp=tmp.split(',') - if len(tmp)>1: - tmp2=tmp[1] - fpar=tmp2.find('\'') - lpar=tmp2[fpar+1].find('\'') - tmp3="axis='("+tmp2[fpar+1:lpar-1].lower()+")'" - tmp[1]=tmp3 - else: - tmp[0]+=')' - tmp=','.join(tmp) - func=func[:isqrt]+tmp - isqrt=func.find('sqrt(') - func=func.replace('MV.xxxx','MV.sqrt') - cmd+=pnm+' = '+func+'\n'+pnm+' = '+pnm+'(' - else: - raise 'Error array'+nm+' is coming neither from file nor function !' - # Now does the dimensions needed - order='' - if not tnm is None: - order+='('+tnm[1:-1]+')' - if not tfirst is None: - tcmd=tnm[1:-1]+'=('+tfirst+','+tlast+')' - if cmd[-1]!='(': - cmd+=','+tcmd - else: - cmd+=tcmd - if not znm is None: - order+='('+znm[1:-1]+')' - if not zfirst is None: - zcmd=znm[1:-1]+'=('+zfirst+','+zlast+')' - if cmd[-1]!='(': - cmd+=','+zcmd - else: - cmd+=zcmd - if not ynm is None: - order+='('+ynm[1:-1]+')' - if not yfirst is None: - ycmd=ynm[1:-1]+'=('+yfirst+','+ylast+')' - if cmd[-1]!='(': - cmd+=','+ycmd - else: - cmd+=ycmd - if not xnm is None: - order+='('+xnm[1:-1]+')' - if not xfirst is None: - xcmd=xnm[1:-1]+'=('+xfirst+','+xlast+')' - if cmd[-1]!='(': - cmd+=','+xcmd - else: - cmd+=xcmd - if order!='': - cmd+=",order='..."+order+"'" - cmd+=')\n' - if not fnm is None: - cmd+='f.close()\n' - if not src is None: - cmd+=pnm+'.source = '+src+'\n' - if not tit is None: - cmd+=pnm+'.title = '+tit+'\n' - if not units is None: - cmd+=pnm+'.units = '+units+'\n' - - # Now does the attributes that are overwrittable - for att in ['source','name','units','crdate','crtime', - 'comment#1','comment#2','comment#3','comment#4']: - val=extract(l,att+'="','"') - Att=att.replace('#','') - if not val is None: - cmd+=pnm+'.'+Att+' = "'+val+'"\n' - cmd+='\n' - cmd=cmd.replace('"',"'") - cmd=cmd.replace('(,',"(") - f.write(cmd) - elif l[:2]=='D_': - plots+=1 - # Plotting data - # First reconstruct the full string - j=1 - while l[-2]!=')' and l[-1]!=')': - l=l[:-1]+ln[i+j] - j+=1 - l=l.replace('\n','') - off=extract(l,'off=',',') - if int(off)==0: # Ok it's not off, let's draw it - cmd='' - active_plots+=1 - else: - cmd='## Next line commented, display was "off"\n## ' - type=extract(l,'type=' ,',') - if type is None: type = 'boxfill' - tmpl=extract(l,'template=',',') - if tmpl is None: tmpl='default' - mthd=extract(l,'graph=' ,',') - if mthd is None: mthd='default' - a =extract(l,'a=' ,',') - b =extract(l,'b=' ,',') - cmd+='x.plot('+a+', ' - if not b is None: - cmd+=b+' ,' - cmd+="'"+tmpl+"', '"+type+"', '"+mthd+"'"+overlay_continents+")\n\n" - f.write(cmd) -f.close() -print 'Successfully converted:',src -print 'Processed:' -print 'VCS Commands:',vcscmd - -print 'Arrays:',arrays,':',arrays_from_file,'from file and',arrays-arrays_from_file,'computed' -print 'Plots:',plots -print 'Active plots:',active_plots -print 'Warnings:',warnings -print 'VCS OBJECTS (templates, graphic methods, etc..):',vcs_objects - diff --git a/images/vcs_icon.xbm b/images/vcs_icon.xbm deleted file mode 100644 index 912510e778..0000000000 --- a/images/vcs_icon.xbm +++ /dev/null @@ -1,566 +0,0 @@ -#define splash_width 492 -#define splash_height 136 -static char splash_bits[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf4,0xff,0x3f,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0xff,0x0f,0xfc,0xff,0xbf, - 0xee,0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0xfd,0xfb,0xff,0x52,0xff, - 0x7f,0xeb,0x7b,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x57,0xb8,0xaf,0x00, - 0xef,0xff,0xd7,0x7f,0xdf,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xe0,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x6f, - 0x08,0xfd,0x7f,0x6b,0xfd,0xda,0x7b,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x28,0x16,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48, - 0xf9,0x12,0x80,0xaf,0xdb,0xff,0xb7,0xff,0xee,0x17,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0xe9,0xab,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2a,0xfc,0x3f, - 0x00,0xf0,0x4f,0x02,0x00,0x00,0xfc,0xd6,0xfe,0xad,0xbf,0x1d,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0x5f,0xbd,0x07, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x3a, - 0x74,0x15,0x00,0xa0,0x1f,0x01,0x00,0x00,0xf8,0xff,0x6b,0xff,0xea,0x0b,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x7f,0xeb, - 0xd6,0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0xd7,0xa9,0x80,0x8f,0xbe,0x2a,0x04,0x00,0x00,0x78,0x6d,0xff,0xd5,0x7f, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x00,0xf4, - 0xd7,0xb6,0xb5,0x5a,0x2f,0xad,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf8,0xfe,0x01,0x7a,0xf8,0xff,0x5f,0x00,0x00,0xdc,0xfb,0xad, - 0x7f,0xdb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0, - 0xf2,0xdf,0x6a,0x5b,0xdb,0xed,0xd5,0xb7,0x8b,0xfb,0xff,0x2d,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0xd0,0xff, - 0xbf,0x00,0x00,0x14,0x00,0xec,0x7f,0x01,0x3e,0xa0,0xd5,0xbf,0x01,0x00,0x68, - 0x5f,0xff,0xed,0x57,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0x07,0x00,0x00,0x00, - 0x20,0x40,0x01,0xa8,0xbd,0xed,0xad,0xb6,0xda,0xda,0xfa,0x6e,0xad,0xf6,0xff, - 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x60, - 0xff,0xff,0xff,0xff,0xfe,0xff,0xff,0x57,0x5b,0xf8,0x7b,0x70,0x00,0xf5,0x01, - 0x00,0xac,0xf7,0x6b,0x77,0x01,0x00,0x00,0x00,0x00,0x00,0x70,0xab,0xf4,0x13, - 0x20,0x20,0x91,0xbe,0x00,0xfc,0xd6,0xb6,0xf6,0x5b,0x6f,0xb7,0xae,0xb5,0x6a, - 0x55,0x95,0xfe,0xff,0xff,0x0f,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0xf0,0xff,0xbf,0xfb,0xff,0xff,0xff,0xff,0x23,0x3f,0xf0,0xde,0x1d,0x00, - 0xdc,0x00,0x00,0xd8,0xdd,0xfe,0xd3,0x00,0x00,0x00,0x00,0x00,0x00,0xdf,0x95, - 0x52,0xed,0x60,0xf0,0x7f,0x55,0x02,0x54,0xbb,0xad,0x2d,0xed,0xb5,0xad,0x75, - 0x5b,0x57,0x5b,0xd5,0xea,0x5f,0x8a,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0xe0,0xbf,0xff,0xef,0xff,0xff,0xf7,0xbe,0xff,0xf7,0xbf,0x7f, - 0x5d,0x00,0xf5,0x0f,0x00,0x7e,0xff,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x56,0xa8,0xaa,0x92,0x22,0x5c,0xaa,0x90,0x02,0xea,0xd6,0xf6,0xf6,0xaa,0x5a, - 0xf5,0xaa,0xd6,0xb9,0x6d,0x5b,0x35,0x69,0x6b,0xa5,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0xe0,0x7f,0xfb,0xf6,0xff,0xb6,0xef,0xdf,0xf7,0xfe,0xde, - 0xff,0xf6,0x02,0xe4,0x5f,0x08,0x00,0xee,0x2d,0x05,0x00,0x00,0x00,0x00,0x00, - 0x00,0x80,0x5f,0x83,0x95,0x02,0x94,0x52,0x55,0x2a,0x01,0x5c,0x7b,0x5b,0x5b, - 0xbf,0xed,0x56,0xb7,0x6b,0xae,0xaa,0xaa,0xd6,0x56,0xad,0xaa,0x05,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xfe,0xff,0xff,0xfe,0xff,0xfd,0xfe,0xff, - 0xdb,0x7f,0xff,0x8a,0x07,0xd0,0xff,0x00,0x00,0xbc,0x3f,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xe0,0xa5,0xc0,0x52,0x49,0xa9,0x2a,0x11,0x85,0x04,0xe8,0xad, - 0xad,0xed,0xd2,0x56,0xbb,0x5a,0xb5,0xab,0xd5,0xd6,0xaa,0xaa,0xaa,0xaa,0x2a, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0xfc,0xff,0xff,0xdf,0xbb,0xff,0xff, - 0xff,0xee,0x7f,0xf7,0x0b,0xa0,0x00,0x80,0x0f,0x00,0x00,0xf6,0x05,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xbc,0x2a,0x68,0xab,0xaa,0x94,0x8a,0xaa,0x28,0x09, - 0xb4,0xd6,0xf6,0x56,0x6f,0xfb,0xd5,0xad,0xad,0xd4,0x6e,0x5b,0x55,0xdb,0xaa, - 0x56,0x95,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0xfd,0xbb,0xb7,0xff,0xff, - 0xbb,0xb7,0xdb,0x7f,0xff,0xfd,0x4d,0x00,0x00,0x05,0x44,0x00,0x00,0xbc,0x0b, - 0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x6d,0x95,0x54,0x49,0x2a,0x55,0x52,0x44, - 0xa5,0x02,0x58,0x7b,0x5b,0xbb,0xb5,0xaa,0xae,0xf6,0xd6,0x6e,0xb5,0x6a,0x6b, - 0x55,0x7a,0x59,0x15,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0xc0,0xff,0xff,0xfb, - 0xfb,0xef,0xff,0xfe,0xff,0xfb,0xed,0x5f,0x00,0x00,0xf0,0x3f,0x00,0x00,0x00, - 0xf8,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x97,0x2a,0xa0,0xaa,0x52,0x49, - 0xa9,0x2a,0x12,0x04,0xe8,0xad,0xad,0x6d,0xdd,0x57,0xb5,0x95,0x6a,0x55,0x55, - 0x55,0xad,0x2a,0x20,0xa6,0x0a,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0xd0,0xff, - 0x57,0x41,0x7d,0xff,0xfe,0xdf,0x77,0xdf,0xbf,0x3b,0x00,0x00,0xe0,0x2f,0x10, - 0x00,0x00,0x80,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0xaa,0x2a,0x80,0x40, - 0x4a,0x25,0x45,0x52,0xa9,0x10,0xb5,0xd6,0xf6,0xb6,0x6b,0xed,0xdb,0x76,0xb7, - 0xb6,0xad,0xad,0xaa,0x2a,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x5a,0x00,0x00,0xe0,0xfd,0xef,0xff,0xfe,0xff,0xfb,0x3f,0x00,0x00,0xfc, - 0x7d,0x39,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x87,0x25, - 0x00,0xaa,0x2a,0x95,0x2a,0x25,0x05,0x44,0xdd,0x7b,0x5b,0xdb,0xb6,0x36,0xad, - 0xda,0x5a,0xdb,0xb6,0xd6,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x80,0x95,0x00,0x00,0xc0,0xb7,0xfd,0xf6,0xdf,0x76,0xef,0xb6,0x00, - 0x00,0x78,0xb7,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0e,0x00, - 0x80,0x1a,0xe0,0x4a,0xa9,0x54,0x92,0x94,0x08,0x58,0x6b,0xad,0x6d,0x6d,0x5b, - 0xdb,0x76,0x57,0xab,0x55,0x55,0x55,0x00,0x00,0x80,0x55,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x40,0x2b,0x00,0x00,0xe0,0xff,0xff,0xff,0xfe,0xff,0xff, - 0x7f,0x01,0x00,0xd8,0xff,0x5e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x1e,0x00,0x20,0x2b,0xa0,0x2a,0xa5,0x52,0x55,0x4a,0x05,0xd4,0xb6,0xed,0xd6, - 0xb6,0xed,0x6d,0xdb,0x6a,0xb5,0xaa,0xda,0xda,0x00,0x00,0x80,0xae,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x08,0x00,0x00,0x00,0x40,0xff,0xb7,0xdf,0xf7, - 0xdf,0x7d,0xb7,0x0b,0x00,0x7e,0xdb,0x37,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x02,0x00,0x30,0x00,0xe0,0x54,0x95,0x4a,0x22,0x21,0x12,0xb4,0xdd, - 0x56,0x7b,0xdb,0xb6,0xaa,0x55,0x5d,0x5b,0xbb,0x6b,0xab,0x00,0x00,0x00,0xbb, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x90,0xdd,0xfe, - 0xfb,0x7f,0xfb,0xef,0xfd,0x77,0xd1,0xff,0xff,0x7d,0x01,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xa0,0x09,0x00,0x30,0xc2,0x6b,0xa5,0x54,0x29,0x55,0x55,0x09, - 0x6c,0x6b,0xfb,0xaa,0x6d,0xd5,0xde,0xb6,0xab,0xad,0x55,0x55,0xad,0x02,0x18, - 0x00,0x60,0x01,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc8, - 0xff,0xff,0xfe,0xee,0x6f,0xff,0xbf,0xff,0xf0,0xd7,0x6b,0xef,0x05,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf8,0x20,0x00,0x7f,0x55,0x95,0x2a,0x25,0x95,0x24, - 0x8a,0x00,0xda,0xdd,0x96,0xdd,0xb6,0x5e,0x6b,0x6d,0x75,0xd5,0xda,0x5a,0xb5, - 0xfa,0x03,0x00,0xc0,0x03,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x80,0xf7,0xdb,0xdf,0xff,0xfe,0x75,0xef,0xbd,0xf0,0xfd,0xfe,0x7d,0x03, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0xa8,0xc1,0xab,0x52,0x55,0x52,0x95, - 0xaa,0x52,0x15,0x4a,0x6c,0x6b,0x7b,0x6b,0xdb,0xb5,0xd5,0xb6,0xad,0x6d,0x6d, - 0xab,0x55,0xad,0x86,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x80,0xbf,0xff,0x7b,0xf7,0xfb,0xff,0xfb,0xf7,0x7e,0xdf,0xb7, - 0xdf,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5c,0x5c,0x64,0x95,0x2a,0xa5, - 0x4a,0xa9,0x44,0x8a,0x08,0x28,0xb7,0xbd,0xd5,0xb6,0x6d,0xdb,0x6e,0xab,0x56, - 0xb5,0x56,0xb5,0xd6,0xaa,0x8a,0x03,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0xe0,0xfd,0xfe,0xff,0xbf,0x6f,0xbf,0xdf,0xde,0xff, - 0x7b,0x4f,0x3d,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0xf1,0xaa, - 0xaa,0xaa,0x2a,0x55,0x32,0x55,0x25,0xa4,0xda,0xd6,0x6e,0xdb,0xb6,0x6d,0xb5, - 0xdd,0xda,0x56,0x6b,0x5b,0x55,0x6b,0x0b,0x06,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0xea,0xd7,0xde,0xfb,0xff,0xed,0xfd, - 0xff,0x6e,0xef,0x05,0x80,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x04, - 0x2c,0xaa,0xaa,0x54,0xaa,0x24,0x95,0x48,0x12,0xd8,0x6f,0xbb,0xb5,0xad,0xda, - 0xb6,0xdb,0xaa,0x6d,0xb5,0xad,0xd5,0x6a,0xad,0x0a,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0xff,0xff,0x7b,0xdf,0xfd, - 0xbf,0x6f,0xf7,0xfb,0x7f,0x0f,0xe0,0x1b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0xa5,0x55,0xaa,0xaa,0x92,0xaa,0x4a,0x25,0x09,0x62,0xf5,0xd6,0xde, - 0xf6,0x6b,0xd5,0x56,0xdd,0x56,0x5b,0xb5,0xaa,0x56,0x55,0x15,0x10,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x77,0xff,0xff, - 0x7d,0xb7,0xfb,0xfe,0xbd,0xbf,0xed,0x05,0x01,0x0a,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xc0,0xb7,0x54,0x49,0x95,0xaa,0x4a,0xa9,0x94,0x2a,0xa8,0xae, - 0x7b,0x6b,0x9b,0xb6,0xbb,0x7a,0x6b,0xb5,0xd5,0xd6,0xb6,0xaa,0x6a,0x2b,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc0,0xff, - 0x5b,0xef,0xff,0xff,0xff,0xf7,0xf7,0xee,0xbf,0x4f,0x03,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x40,0x93,0xaa,0xa6,0x52,0x55,0x51,0x94,0x52,0x12, - 0xc0,0x75,0xad,0xb5,0x6d,0xdb,0xd6,0xad,0x55,0x5b,0xad,0xaa,0xaa,0x6d,0x5b, - 0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0xe0,0xde,0xff,0xff,0xef,0xdf,0xde,0x5f,0xdf,0x7b,0xdb,0x15,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0xab,0xaa,0x42,0x4a,0x29,0x2d,0x4b, - 0x29,0x25,0x60,0xbf,0xed,0xde,0xb6,0x6d,0xbb,0xd6,0xbe,0xd5,0xb6,0x6d,0x5b, - 0xb5,0xaa,0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x7f,0xff,0xbb,0xbe,0xfb,0xf7,0xfd,0xff,0xef,0x05,0x06,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x55,0xc5,0x8d,0x2b,0xa5, - 0xc2,0x28,0x95,0x04,0xa8,0xff,0x5a,0x6b,0xdb,0xb6,0x6d,0x6b,0xd3,0x6e,0xd5, - 0x56,0xad,0xaa,0xd5,0x0a,0x80,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0xf8,0xfb,0xb7,0xff,0xfb,0x7f,0xbf,0xff,0xed,0xfe,0x01, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf8,0xff,0x0b,0x00,0x0b, - 0x7c,0x55,0x35,0xa5,0xa4,0x02,0xd4,0xbf,0xa2,0xbb,0x6d,0xdb,0xb6,0xbd,0x6d, - 0xb5,0xb6,0xda,0x6a,0xad,0xb6,0x00,0xe0,0x04,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0xfe,0xff,0xff,0xfe,0xef,0xed,0xfb,0x6e,0xbf, - 0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x68,0xac,0x08, - 0x00,0xf0,0xa0,0xa2,0x95,0x2a,0x70,0x09,0xd5,0x57,0xe8,0x6d,0xb7,0x6d,0xdb, - 0xd6,0xb6,0x56,0x5b,0x6b,0xab,0xb5,0xaa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x7d,0xb7,0xfd,0xb7,0xbf,0xff,0xff, - 0xff,0xfb,0x85,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8, - 0x55,0x05,0x00,0xe8,0xc0,0xd2,0x55,0x95,0xd0,0x05,0xaa,0xaf,0xa0,0xab,0xdd, - 0xb6,0x6d,0x6b,0xdb,0xeb,0xaa,0x55,0xb5,0xaa,0xd5,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x80,0xff,0xff,0xef,0xff,0xfe, - 0x7f,0xb7,0xdb,0xef,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xac,0xaa,0x00,0x40,0x00,0x91,0x02,0xc2,0x25,0xf4,0x96,0xb6,0x5b,0xfd, - 0xaf,0xb6,0xdb,0xb6,0xba,0x55,0x5d,0xdb,0xba,0x56,0x81,0x5a,0x00,0x00,0x06, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xdf,0xff,0x7f, - 0xff,0xfb,0xed,0xff,0x7e,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x78,0x55,0x00,0x20,0x80,0x00,0x03,0xe9,0x0b,0xe8,0x6f,0x5b, - 0x35,0xfe,0xc3,0xfd,0x6f,0xdb,0xd7,0xee,0xaa,0xad,0x56,0xab,0x01,0x70,0x01, - 0x00,0x0e,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x7f, - 0xdb,0xfd,0xed,0xbf,0xff,0xfd,0xfb,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf8,0x0b,0x00,0x00,0x60,0x00,0x45,0xff,0x85,0x52, - 0xb5,0xed,0xde,0x5e,0xd5,0xff,0xbf,0x56,0x6d,0x5b,0xdb,0x76,0xab,0xb5,0x2a, - 0x80,0x06,0x00,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x80,0xff,0xff,0xf7,0xbf,0xff,0xbe,0xdf,0xef,0x37,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x05,0xfa,0xff,0x00,0x00,0x00,0x10, - 0xc6,0x7f,0x6f,0xb7,0xb5,0x76,0xd5,0xff,0xae,0xbb,0xb6,0xb5,0xad,0xaa,0x6d, - 0xd5,0x5a,0x00,0x06,0x00,0x0f,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0xf7,0xff,0xdf,0xff,0xf7,0xf7,0x06,0x04,0x80,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x06,0x80,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0xde,0x6d,0xff,0xfe,0xde,0xff,0x07,0xbf,0xcf,0x03, - 0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x00,0x00,0x80,0x07,0x00, - 0x00,0xf0,0x00,0x00,0x70,0x00,0x00,0xfc,0x0f,0x00,0x00,0x1c,0x00,0x00,0xf0, - 0xf8,0xfc,0x01,0x1e,0xff,0x1f,0x00,0x04,0x70,0x15,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xff,0xff,0xf7,0xff,0xff,0x87,0x7f, - 0xef,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x03,0x00,0x00,0x00,0x00,0x80, - 0x07,0x00,0x00,0x70,0x00,0x00,0x78,0x00,0x00,0xfc,0x07,0x00,0x00,0x1e,0x00, - 0x00,0x78,0xfc,0xfd,0x07,0x9f,0xff,0x1f,0x00,0x00,0xac,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xff,0xfb,0xff,0x7f,0xdb, - 0xc6,0xff,0x8f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x07,0x00,0x00,0x00, - 0x00,0xc0,0x07,0x00,0x00,0xf0,0x00,0x00,0x30,0x00,0x00,0xfc,0x0f,0x00,0x00, - 0x1e,0x00,0x00,0x78,0xfe,0xfd,0x0f,0x3f,0xdf,0x3f,0x00,0x80,0x07,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0xbc,0x6f,0xdf, - 0xfb,0xff,0xc7,0x63,0x0f,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xbc,0x07,0x00, - 0x08,0x00,0x00,0x80,0x07,0x00,0x00,0x70,0x00,0x00,0x00,0x00,0x00,0xe0,0x01, - 0x00,0x00,0x1e,0x00,0x00,0x3c,0xdf,0xdd,0x0f,0x3f,0x7c,0x7c,0x00,0x00,0x0a, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0xf6, - 0xff,0x7f,0xff,0x5f,0xe2,0x83,0x0f,0x00,0x00,0x00,0x80,0x03,0x00,0x00,0x3c, - 0x0f,0x00,0x1c,0x00,0x00,0xc0,0x07,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0xe0,0x01,0x00,0x00,0x1e,0x00,0x00,0x1c,0x9f,0x7d,0x9f,0x3f,0x7c,0x78,0x08, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x40,0xfd,0xfe,0xfd,0x5f,0x3d,0xe1,0x03,0xcf,0xf1,0xf3,0xc0,0x81,0x0f,0x07, - 0x00,0x3c,0x0f,0x06,0x1e,0x1c,0x00,0xc0,0x0f,0x32,0x60,0xf0,0x04,0xc1,0x20, - 0x10,0x00,0xe0,0x01,0x02,0x08,0x1e,0x04,0x00,0x3e,0x0f,0x7c,0x9f,0x3f,0xfc, - 0x78,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x80,0xfc,0xff,0xff,0x05,0x00,0xe0,0x83,0xef,0xfb,0xff,0xf1,0xc7, - 0xcf,0x0f,0x00,0x34,0x8e,0x1f,0x7e,0x3e,0x00,0xc0,0x8f,0x7f,0xf8,0x71,0xce, - 0xf3,0x79,0x7e,0x00,0xe0,0xc1,0x0f,0x3f,0xbe,0x1f,0x00,0x9e,0x0f,0x3c,0x9b, - 0x7f,0x7c,0xf8,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0xe0,0xe0,0xef,0xff,0x01,0x80,0xe0,0x01,0xcf,0xf3,0xff, - 0xfb,0xe7,0xcf,0x1f,0x00,0x3c,0xcf,0x3f,0x7f,0x7f,0x00,0xe0,0x8f,0x7f,0xfc, - 0xf1,0xde,0xfb,0x79,0x7e,0x00,0xe0,0xe1,0x1f,0x7f,0xde,0x1f,0x00,0x9e,0x0f, - 0x7c,0x9f,0x7f,0xfc,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xc3,0x7e,0xf7,0x22,0x09,0xe2,0x83,0xcf, - 0xfb,0xde,0x73,0xcf,0xef,0x1d,0x00,0x3c,0xce,0x3d,0x1f,0x77,0x00,0xe0,0x8e, - 0xf7,0xdc,0xf3,0xde,0xfb,0x79,0x6f,0x00,0xe0,0xe1,0x9f,0x7f,0xde,0x1f,0x00, - 0x9e,0x0f,0x7c,0x9f,0x7d,0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xa1,0xff,0xbf,0x08,0x20,0xe0, - 0x01,0xef,0xfb,0xde,0x1b,0xcf,0xe3,0x1f,0x00,0x3c,0xcf,0x38,0x1e,0xf1,0x00, - 0xe0,0x8e,0xf7,0xc4,0x71,0xfc,0x79,0x78,0x1e,0x00,0xe0,0xf1,0x9e,0xff,0xde, - 0x17,0x00,0x9f,0x0f,0x7c,0xdf,0x7f,0xfc,0xf8,0x80,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x83,0xfb,0x7f,0x81, - 0x04,0xe1,0x83,0xcf,0x73,0xde,0xdb,0x87,0xe3,0x1f,0x00,0x3c,0x4f,0x3e,0x0e, - 0x78,0x00,0xe0,0x9e,0xf3,0xc4,0xf3,0xfc,0xf9,0x78,0x3e,0x00,0xe0,0xf1,0xde, - 0xf7,0xbe,0x0f,0x00,0x9e,0x0f,0x5c,0xdf,0xff,0x7c,0xf8,0x80,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x0c,0xff, - 0xbf,0x50,0x50,0xe4,0x8b,0xef,0x7b,0xdf,0xe3,0xcf,0xe3,0x1f,0x00,0x3c,0x1f, - 0x3f,0x1e,0xfe,0x00,0xe0,0x1f,0xd7,0xf0,0xf3,0xfc,0xf1,0x78,0x3c,0x00,0xe0, - 0xf1,0xbe,0xf7,0x9e,0x0f,0x00,0x9f,0x9f,0x7c,0xdf,0xdf,0x7c,0xf9,0x81,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xbe,0x3b,0x05,0x05,0xe1,0x03,0xcf,0xfb,0xde,0xf3,0xcf,0xe3,0x15,0x00, - 0x3c,0xcf,0x3f,0x0e,0x7f,0x00,0xf0,0x9f,0xf7,0xfc,0x73,0xf8,0xe0,0x79,0x7c, - 0x00,0xe0,0xf1,0xde,0xf7,0x3e,0x1f,0x00,0x1e,0x1f,0x7c,0xdf,0xff,0xfc,0xf8, - 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xfc,0xdf,0xa8,0xfc,0xc2,0xe3,0xef,0x7b,0xde,0x7b,0x8f,0xe3, - 0x11,0x00,0x3c,0xcf,0x3b,0x0e,0xf7,0x00,0xf0,0x9f,0xf7,0xdc,0xf1,0xf8,0xc8, - 0x79,0x79,0x00,0xe0,0xf1,0xbe,0xbf,0xde,0x3e,0x00,0x1f,0x1f,0x7f,0xff,0xff, - 0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xff,0xbf,0x12,0x7f,0xc0,0x7f,0xcf,0xf3,0xdf,0xfb, - 0xdf,0xcb,0x1b,0x00,0xfc,0xc7,0x7d,0xbe,0xf7,0x01,0x70,0x9c,0x77,0xde,0xf7, - 0xf0,0xdc,0x7b,0xf7,0x00,0xe0,0xe1,0x9f,0xff,0xfe,0x3f,0x00,0x1e,0xff,0xfd, - 0xef,0xfb,0xfc,0xf8,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfe,0xff,0xa5,0xbe,0xc6,0xff,0xef,0x7b, - 0xde,0xfb,0x9f,0xdf,0x1f,0x00,0xfc,0xc7,0x7f,0x7e,0xff,0x01,0x70,0xbc,0xf6, - 0xfc,0xd7,0xf0,0xf8,0x7b,0x7e,0x00,0xe0,0xe1,0x9f,0x7f,0xde,0x1f,0x00,0x3e, - 0xfe,0xff,0xef,0xfb,0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0xbf,0xfb,0x57,0x81,0x7f, - 0xcf,0xfb,0xff,0x73,0xdf,0x9f,0x1f,0x00,0xfc,0xc3,0x7f,0x7e,0xbf,0x01,0x70, - 0xbc,0xf3,0xfc,0x77,0x7a,0xfc,0x79,0x7f,0x00,0xe0,0xe1,0x0f,0x7d,0xde,0x1f, - 0x00,0x1e,0xfe,0xff,0xe7,0xf3,0xfd,0xf8,0x00,0x02,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x2f, - 0x05,0xbe,0xef,0x73,0xde,0xe3,0x0e,0x0f,0x1f,0x00,0x6c,0x00,0x33,0x38,0xee, - 0x00,0x78,0xb8,0xd7,0xb8,0xf3,0x78,0xf1,0x78,0x3e,0x00,0xe0,0x83,0x17,0x3f, - 0xbe,0x1f,0x00,0x3e,0xf8,0x7c,0xe1,0xfb,0xfd,0xf8,0x00,0x06,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x44,0xf4, - 0xff,0xff,0x07,0x00,0x00,0x00,0x00,0x08,0x00,0x40,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x00,0x00,0xf8,0x00,0x06,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x09,0xf8,0xff,0x07,0x00,0x00,0x88,0x00,0x80,0x24,0x00,0x00,0x00,0x00, - 0x08,0x48,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x08,0x00,0x00,0x00, - 0x00,0x00,0x90,0xa0,0x00,0x00,0x00,0x3c,0x00,0x00,0x09,0x00,0x00,0x7c,0x00, - 0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x20,0xa4,0xf5,0xff,0x07,0x00,0x00,0x00,0x48,0x22,0x00,0x01,0x24, - 0x00,0x00,0x41,0x00,0x02,0x90,0x00,0x00,0x10,0x40,0x00,0x24,0xbc,0x00,0x00, - 0x40,0x00,0x00,0x08,0x04,0x04,0x92,0x24,0x00,0x3c,0x02,0x00,0x20,0xa9,0x00, - 0x7c,0x02,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x89,0x52,0xaa,0xfe,0x07,0x00,0x48,0x21,0x02,0x00,0x12, - 0x20,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x02,0x09,0x00,0x01,0x3c, - 0x80,0x44,0x08,0x00,0x00,0x00,0x40,0x20,0x00,0x00,0x00,0x78,0x20,0x40,0x02, - 0x00,0x20,0x3c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x50,0x55,0xe5,0x07,0x00,0x00,0x00,0x80, - 0x00,0x00,0x00,0x08,0x00,0x00,0x02,0x01,0x00,0x40,0x00,0x00,0x00,0x00,0x11, - 0x00,0x3c,0x00,0x00,0x00,0x00,0x00,0x20,0x08,0x80,0x00,0x00,0x00,0x78,0x80, - 0x00,0x00,0x04,0x02,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xaa,0xaa,0xda,0x07,0x10,0x00, - 0x00,0x00,0x08,0x08,0x00,0x00,0x00,0x00,0x00,0x04,0x20,0x00,0x00,0x00,0x08, - 0x00,0x00,0x20,0x1e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88,0x08,0x00, - 0xf0,0x00,0x88,0x48,0x20,0x00,0x1f,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x24,0x84,0x54,0xd5,0x06, - 0x80,0x00,0x00,0x08,0x40,0x40,0x04,0x00,0x00,0x00,0x00,0x80,0x80,0x00,0x00, - 0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x80,0xb2,0xaa, - 0xaa,0xff,0xff,0xff,0xff,0xff,0x57,0xab,0xf5,0x77,0xab,0xfd,0xde,0xba,0x5e, - 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xdf,0xfb, - 0xef,0xb6,0xed,0xfe,0xaa,0x5b,0x25,0x81,0x01,0x16,0x00,0x00,0x60,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20, - 0x88,0xaa,0xaa,0xea,0xff,0xff,0xff,0xbf,0xaa,0x56,0x95,0x54,0x55,0xa5,0x52, - 0xd5,0x4a,0xfd,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff, - 0x53,0x95,0x98,0xaa,0xaa,0x6a,0xb7,0xa4,0x12,0xa4,0x05,0x00,0x00,0x00,0x00, - 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x08,0xa5,0x54,0x55,0xad,0xfa,0xff,0xff,0xff,0x5f,0x55,0x55,0x55,0x55, - 0x95,0xaa,0xaa,0xb2,0xf6,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff, - 0xff,0xff,0x2a,0x55,0x67,0x55,0x55,0x55,0xaf,0xaa,0x94,0x00,0x0a,0x00,0x00, - 0x00,0x40,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x40,0x92,0xaa,0xaa,0x52,0xf6,0xff,0xff,0xff,0xf7,0xaf,0xaa, - 0xaa,0xaa,0x6a,0x55,0x55,0x55,0xd5,0xff,0xff,0xef,0xff,0xff,0xff,0xff,0xff, - 0xff,0xff,0xff,0xff,0x55,0x55,0xa9,0xaa,0xaa,0xaa,0x5e,0x55,0x42,0x08,0x38, - 0x00,0x00,0x78,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x02,0x28,0x91,0xaa,0xaa,0xfa,0xff,0xff,0xdb,0xfe, - 0xbb,0xaa,0xaa,0xaa,0x2a,0x55,0x95,0xaa,0x54,0xfe,0xb5,0x92,0xff,0xff,0xff, - 0xff,0xff,0xff,0xff,0xff,0x7f,0x55,0x55,0x95,0xaa,0xaa,0xaa,0xaa,0x92,0x14, - 0xfc,0x38,0x00,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x28,0x85,0x54,0x49,0xaa,0x6a,0xf7,0xbf, - 0xff,0xdf,0x6f,0x55,0x4a,0x55,0xa9,0x92,0x52,0x4a,0xa5,0x2a,0x55,0x55,0xda, - 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x3f,0x55,0x49,0xaa,0x54,0x95,0x54,0x49, - 0x4a,0x42,0x71,0x28,0x00,0x00,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x01,0x00,0x50,0xa5,0x54,0x49,0xfd, - 0x7f,0xfb,0xff,0xff,0x7e,0x49,0xa9,0x24,0xa5,0xaa,0x54,0xa9,0x2a,0x95,0x94, - 0x54,0xa5,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x5f,0x29,0x55,0xa9,0x4a,0x52, - 0x25,0xa5,0x24,0x11,0xc0,0x67,0x00,0x80,0x57,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xff,0x00,0x09,0x12,0x92, - 0x2a,0xff,0xff,0xff,0xbd,0xfb,0x5b,0x25,0x25,0x92,0x14,0x49,0x12,0x15,0x91, - 0x44,0x4a,0x8a,0x52,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x42,0x45,0x2a,0x25, - 0x22,0x89,0x92,0x54,0x92,0x08,0x80,0x4b,0x00,0xf8,0x5a,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x70,0xf8,0xff,0xa2, - 0x48,0x49,0xa0,0xff,0xee,0xdf,0xf7,0xdf,0xff,0x92,0x48,0x49,0xa2,0x24,0xa5, - 0x40,0x4a,0x29,0x21,0x51,0xc8,0xff,0xff,0xff,0xff,0xff,0xff,0x7f,0x29,0x28, - 0x41,0x92,0x94,0x54,0x48,0x09,0x25,0x42,0x02,0x5e,0x00,0x56,0x15,0x10,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0xfe,0xff,0x22,0x92,0xca,0xef,0x7f,0x7b,0xdf,0x7d,0x5f,0x5f,0x92,0xa4,0x14, - 0x92,0x48,0xaa,0x24,0x92,0x94,0x24,0xa5,0xff,0xff,0xff,0xff,0xff,0xff,0x2e, - 0x45,0x45,0x2a,0x49,0x49,0x22,0x25,0x52,0x88,0x10,0x00,0x78,0x00,0xde,0x0a, - 0x00,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0xc5,0xff,0x9f,0x44,0xe4,0xbf,0xfb,0xff,0xff,0xf7,0x77,0xeb,0x45, - 0x12,0xa2,0x48,0x12,0x11,0x90,0x44,0x22,0x89,0xc8,0x7f,0xdf,0xfd,0xff,0xff, - 0xf7,0xaf,0x90,0x10,0x81,0x10,0x22,0x89,0x88,0x04,0x21,0x42,0x00,0xa8,0x00, - 0x54,0x0d,0x2c,0x00,0x20,0xc0,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0xf0,0xee,0x0f,0xf0,0x5f,0x10,0xa1,0xfb,0xdf,0xef,0x7b,0xdf,0xfd, - 0x7f,0x29,0x80,0x08,0x02,0x40,0x44,0x05,0x20,0x08,0x20,0x82,0xff,0xff,0x77, - 0xef,0xdf,0xff,0x17,0x0a,0x44,0x28,0x84,0x08,0x20,0x22,0x50,0x04,0x00,0x08, - 0x70,0x00,0xa8,0x05,0x06,0x00,0xc0,0xe0,0x01,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0xf0,0xb6,0x0c,0x00,0x3e,0x42,0xc8,0xfe,0x7e,0xbd,0xde, - 0x7d,0xdf,0xda,0xd6,0x2b,0xa0,0x50,0x15,0x11,0xa0,0x8a,0x42,0x85,0x50,0xfe, - 0xff,0xff,0xbf,0x7d,0xdf,0x89,0xa0,0x12,0x82,0x22,0xa2,0x8a,0x88,0x04,0x21, - 0x09,0x00,0xa0,0x01,0x78,0x0f,0x16,0x80,0xc7,0x7e,0xfe,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xda,0x0e,0x8c,0x98,0x08,0xe2,0xaf,0xeb, - 0xf7,0xfb,0xf7,0xf7,0xf7,0xff,0x45,0x05,0x04,0x40,0x00,0x09,0x00,0x10,0x10, - 0x04,0xf0,0xfb,0xff,0xff,0xff,0xff,0x00,0x04,0x80,0x10,0x08,0x00,0x00,0x00, - 0x10,0x04,0x20,0x00,0xc0,0x02,0x00,0x00,0x08,0x00,0x00,0xb4,0x7f,0x01,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xb8,0x42,0x1b,0x18,0x20,0xc0, - 0xfb,0xbf,0xdf,0xdf,0xbe,0xba,0x5e,0x55,0xb7,0x10,0x00,0x00,0x44,0x00,0x20, - 0x02,0x40,0x10,0xe1,0xef,0xff,0xfd,0xff,0x7f,0x48,0x10,0x10,0x42,0x20,0x11, - 0x44,0x24,0x41,0x10,0x80,0x00,0x00,0x02,0x00,0x00,0x20,0x00,0x00,0x00,0xff, - 0x04,0x14,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xac,0xa0,0x7c,0x18, - 0x00,0xe4,0x7e,0xfb,0x7a,0xed,0xeb,0xef,0xf7,0xff,0x56,0x41,0x49,0x12,0x00, - 0x44,0x09,0x88,0x04,0x40,0xb4,0xff,0xbd,0xff,0x7d,0x7b,0x02,0x40,0x02,0x00, - 0x01,0x80,0x00,0x01,0x00,0x80,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x00,0x00, - 0x00,0xbf,0x52,0x0e,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xf4,0x20, - 0xee,0x1b,0x82,0x80,0xdb,0xde,0xdf,0x7f,0xdf,0xba,0xba,0xaa,0xbb,0x00,0x00, - 0x00,0x11,0x00,0x00,0x00,0x00,0x01,0xe0,0xff,0xf7,0xdf,0xef,0xef,0x00,0x01, - 0x40,0x00,0x00,0x04,0x08,0x00,0x04,0x00,0x00,0x00,0x00,0xd0,0x0e,0x00,0x00, - 0x00,0x00,0x80,0xaf,0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x5c,0x00,0xbb,0x39,0x00,0x00,0xfe,0xeb,0xb6,0xd5,0xf6,0xef,0xdf,0xff,0xee, - 0x02,0x00,0x00,0x40,0x00,0x40,0x00,0x00,0x00,0xc0,0xbd,0xff,0xfb,0xff,0x7f, - 0x10,0x00,0x00,0x08,0x20,0x00,0x00,0x20,0x00,0x01,0x00,0x00,0x00,0x00,0x50, - 0xa0,0x00,0x00,0x00,0x00,0x7a,0x78,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x60,0x34,0x70,0xed,0x19,0x00,0x00,0xde,0xbe,0x7d,0xbf,0xbb,0xba,0xea, - 0xaa,0x5b,0x00,0x00,0x00,0x00,0x01,0x00,0x20,0x00,0x00,0xc0,0xf7,0x7f,0xff, - 0xfd,0x7d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x40,0x03,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x60,0x38,0x50,0x5f,0x18,0x00,0x00,0xf4,0xef,0xd7,0xeb,0xde, - 0xdf,0x7f,0xf7,0x56,0x00,0x80,0x00,0x00,0x08,0x00,0x80,0x10,0x00,0xc0,0xff, - 0xf6,0xb7,0xb7,0xf7,0x04,0x08,0x02,0x00,0x02,0x20,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x60,0x0c,0xb8,0xf5,0x18,0x00,0x00,0xe0,0x7a,0x7d, - 0xbd,0xab,0xea,0xaa,0xad,0x0d,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x04, - 0x00,0xff,0xdf,0xff,0xff,0xff,0x02,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x08,0xa8,0xee,0x18,0x00,0x00,0xc0, - 0xaf,0xd7,0x6f,0xfd,0x5d,0xf7,0xde,0x06,0x00,0x02,0x00,0x00,0x40,0x00,0x00, - 0x00,0x00,0xc0,0xb7,0xff,0xdd,0xdd,0xdd,0x05,0x40,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0x01,0x70,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x08,0xf8,0x7b,0x19,0x00, - 0x00,0x40,0xf5,0xbd,0xda,0x57,0xb7,0xad,0x75,0x01,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xe0,0xff,0x7b,0xff,0xff,0xf7,0x00,0x40,0x01,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5e,0x04, - 0x18,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x10,0xb8,0x5e, - 0x19,0x00,0x00,0x80,0x5f,0xeb,0xbd,0xfa,0xed,0xf6,0xae,0x03,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0xfd,0xee,0xdb,0x6d,0xff,0x05,0x60,0x01, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88, - 0x57,0x01,0x58,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0x10, - 0xec,0x6b,0x1c,0x00,0x00,0x00,0xfa,0x5e,0xeb,0x57,0xbb,0xad,0xf5,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xd7,0xff,0x7f,0xff,0xbd,0x00, - 0xfe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x80,0xaf,0x95,0x00,0x68,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0xe0,0x00,0xb8,0x69,0x1c,0x00,0x00,0x00,0xd0,0xeb,0x5e,0xdd,0xd6,0x76,0x5b, - 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,0xff,0x7f,0xff,0xdd, - 0x6f,0x80,0x5f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x70,0xa5,0x54,0x15,0x16,0x01,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0xe0,0x40,0xf8,0x28,0x1c,0x00,0x00,0x00,0x40,0xbf,0xeb,0xb6,0x6d, - 0xab,0xad,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x7d,0xdb, - 0xeb,0x7f,0x0b,0x80,0xbf,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0xb5,0xaa,0x24,0x5e,0x01,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0xc0,0x00,0xe0,0x21,0x1c,0x00,0x00,0x00,0x00,0x6a,0xbd, - 0x6d,0xbb,0xdd,0x76,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0, - 0xdf,0x7f,0x7f,0xf7,0x07,0x80,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x4a,0xa5,0x52,0xa1,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x20,0xe1,0x00,0x1c,0x00,0x00,0x00,0x00, - 0xbe,0xd7,0xb6,0x6d,0x6b,0xab,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x60,0xfb,0xf6,0xf7,0x5f,0x00,0x00,0x1b,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xae,0xaa,0x54,0x2a,0x55,0x01,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0xe1,0x80,0x1c,0x00,0x00, - 0x00,0x00,0xd6,0x7a,0xdb,0xb6,0xbd,0xdd,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xc0,0xef,0xff,0xbe,0x7b,0x00,0xc0,0x2f,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x57,0x55,0x55,0x55,0x95, - 0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x40,0xc0,0x60,0x1c, - 0x00,0x00,0x00,0x00,0x7c,0xad,0x6d,0xdb,0xd6,0x6a,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x80,0x7f,0xdb,0xfb,0x3e,0x00,0xe0,0x16,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x57,0x55,0x55,0xa5, - 0x24,0x49,0x05,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x80,0xe0, - 0x00,0x1d,0x00,0x00,0x00,0x00,0xac,0xd7,0xb6,0x6d,0x6b,0x37,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xef,0xff,0xef,0x6f,0x00,0xc0,0x0b, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x55,0x55, - 0x2a,0x95,0xaa,0x54,0x12,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0x80,0xe0,0x00,0x1c,0x00,0x00,0x00,0x00,0xf6,0xba,0xdd,0xb6,0xb5,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xbd,0xdb,0xbe,0xbb,0x00, - 0xe0,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x70, - 0x55,0x55,0x55,0x55,0x55,0x25,0x49,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0x40,0x60,0x80,0x1d,0x00,0x00,0x00,0x00,0x5c,0xef,0x76,0xdb,0x1e, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xff,0xfe,0x7b, - 0x6f,0x00,0xc0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x5c,0x55,0x55,0x55,0x55,0x49,0x55,0x25,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0x40,0x60,0x40,0x1f,0x00,0x00,0x00,0x00,0xee,0x5a,0xab, - 0x6d,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xef, - 0xb7,0xff,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xb8,0xaa,0xaa,0xaa,0x54,0xaa,0xa4,0x94,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0xc0,0x1f,0x00,0x00,0x00,0x00,0xb4, - 0xed,0x7d,0xab,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xbe,0xff,0xdb,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x54,0x55,0x55,0xa5,0xaa,0x4a,0x95,0x52,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa0,0x1e,0x00,0x00,0x00, - 0x00,0xdc,0x56,0xab,0xdd,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xfb,0xdd,0xfe,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa8,0xaa,0xaa,0xaa,0x4a,0x29,0x55,0xaa, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x06,0x00,0xc0,0x1e,0x00, - 0x00,0x00,0x00,0x76,0xfb,0x76,0x6b,0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x7e,0xff,0x6f,0x03,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x55,0x25,0x55,0xa9,0xaa, - 0x52,0x85,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x00, - 0x1e,0x00,0x00,0x00,0x00,0xdc,0xad,0xad,0xbd,0x09,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x75,0xfb,0x01,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x55,0x55,0x55, - 0x55,0x55,0x4a,0x29,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x1e,0x00,0x00,0x00,0x00,0xb4,0xd6,0x76,0x6b,0x01,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xff,0x1f,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa8,0xaa, - 0xaa,0xae,0x2a,0x49,0x29,0x05,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x00,0xc0,0x07,0x00,0x1f,0x00,0x00,0x00,0x00,0xd8,0xbb,0xad,0x5d,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xdf,0x0e,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x54,0x55,0x3d,0xe0,0xaa,0xaa,0xa6,0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x3f,0x00,0x1f,0x00,0x00,0x00,0x00,0xb8,0x6e,0xf7,0xb6, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xf6, - 0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xa8,0xaa,0x02,0x80,0xa9,0x2a,0x99,0x02,0x00,0x00,0x08,0xf0,0x00, - 0x00,0x00,0x00,0x00,0x1e,0x00,0xf8,0x01,0x1f,0x00,0x00,0x00,0x00,0x78,0xdb, - 0x5a,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xe0,0x7f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xbe,0x25,0x00,0x80,0x55,0xa9,0x54,0x00,0x00,0x00,0x5a, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x7c,0x00,0xe0,0x07,0x1f,0x00,0x00,0x00,0x00, - 0xec,0x75,0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x24,0x55,0x25,0x00,0x00, - 0x00,0x18,0xf0,0x00,0x00,0x00,0x00,0x00,0xf8,0x01,0x80,0x3f,0x1f,0x00,0x00, - 0x00,0x00,0xb8,0xde,0xda,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xaa,0x4a,0x05, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc0,0x07,0x00,0x7e,0x1f, - 0x00,0x00,0x00,0x00,0xdc,0xab,0x6d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb0, - 0xaa,0x02,0x00,0x00,0x00,0x07,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x1f,0x00, - 0x70,0x1e,0x00,0x00,0x00,0x00,0x6e,0x7d,0xdb,0x02,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x70,0x55,0x03,0x00,0x00,0x80,0x01,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0xfc,0x00,0xc0,0x0e,0x00,0x00,0x00,0x00,0xb8,0xd7,0x4d,0x01,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x40,0x19,0x00,0x00,0x00,0xc0,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x03,0x00,0x07,0x00,0x00,0x00,0x00,0xf8,0x7a,0x07,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0xc0,0x0f,0x00,0x08,0x00,0x00,0x00,0x00,0x68,0xaf,0x01, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x94, - 0xda,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x00,0x00, - 0x00,0xa0,0x77,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x07,0x00,0x00, - 0x00,0x00,0x00,0xf0,0xdd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x0f, - 0x00,0x00,0x00,0x00,0x00,0xd0,0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x7e,0x00,0x00,0x00,0x00,0x00,0xf0,0x1b,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x00,0x00,0x80,0xb6,0x04,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xe0,0x07,0x00,0x00,0x00,0x00,0x00,0x2f,0x01,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x1f,0x00,0x00,0x00,0x00,0x80,0xfb, - 0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x00, - 0x80,0x56,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x00,0x00, - 0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30, - 0x00,0x00,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0x15,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0a,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0}; diff --git a/images/viewmag+.gif b/images/viewmag+.gif deleted file mode 100644 index 6daba52333..0000000000 Binary files a/images/viewmag+.gif and /dev/null differ diff --git a/images/viewmag-.gif b/images/viewmag-.gif deleted file mode 100644 index e2dc98fa27..0000000000 Binary files a/images/viewmag-.gif and /dev/null differ diff --git a/images/viewmag-.png b/images/viewmag-.png deleted file mode 100644 index 8108ecd7b0..0000000000 Binary files a/images/viewmag-.png and /dev/null differ diff --git a/installation/DAP.py b/installation/DAP.py deleted file mode 100644 index ca1edb3786..0000000000 --- a/installation/DAP.py +++ /dev/null @@ -1,29 +0,0 @@ -# Edit this configuration file before building. -# Always build with --force after changing a configuration. -# You do not need to rebuild Python itself. -#print 'DAP' - -import os -current_dir = os.path.dirname(__file__) -src_dir = os.path.join(current_dir, '..') -installation_script_dir = os.path.join(src_dir, 'installation') - -sys.path.append(src_dir) -sys.path.append(installation_script_dir) - -CDMS_INCLUDE_DAP='yes' -CDMS_DAP_DIR="" -try: - import cdat_info - externals = cdat_info.externals -except: - externals = os.path.join(sys.prefix,"Externals") -externals = os.environ.get("EXTERNALS",externals) - -for o in sys.argv[1:]: - pth = o.lower().split('with-opendap=') - if len(pth)>1: - CDMS_DAP_DIR=pth[1] - -if CDMS_DAP_DIR is "": - CDMS_DAP_DIR=os.path.join(externals,'OpenDAP') diff --git a/installation/HDF.py b/installation/HDF.py deleted file mode 100644 index 23830d08e4..0000000000 --- a/installation/HDF.py +++ /dev/null @@ -1,26 +0,0 @@ -# To build on Linux with HDF: -# express_install /usr/local/cdat/somewhere --force --configuration installation/hdf.py -import os -current_dir = os.path.dirname(__file__) -src_dir = os.path.join(current_dir, '..') -installation_script_dir = os.path.join(src_dir, 'installation') - -sys.path.append(src_dir) -sys.path.append(installation_script_dir) - -CDMS_INCLUDE_HDF='yes' -CDMS_HDF_DIR="" -try: - import cdat_info - externals = cdat_info.externals -except: - externals = os.path.join(sys.prefix,"Externals") -externals = os.environ.get("EXTERNALS",externals) - -for o in sys.argv[1:]: - pth = o.lower().split('with-hdf4=') - if len(pth)>1: - CDMS_HDF_DIR=pth[1] - -if CDMS_HDF_DIR is "": - CDMS_HDF_DIR=os.path.join(externals,'HDF') diff --git a/installation/cdmsonly.py b/installation/cdmsonly.py deleted file mode 100644 index 90ea118248..0000000000 --- a/installation/cdmsonly.py +++ /dev/null @@ -1,16 +0,0 @@ -packages = [ - "Packages/AutoAPI", - "Packages/cdtime", - "Packages/regrid2", -# "Packages/regrid", - "Packages/Properties", - "Packages/kinds", - "Packages/cdms2", - "Packages/genutil", - "Packages/cdutil", - "Packages/unidata", -# "Packages/cdms", - "Packages/ncml", - "Packages/esg", - "Packages/distarray", - ] diff --git a/installation/contrib.py b/installation/contrib.py deleted file mode 100644 index 606aaf5fdf..0000000000 --- a/installation/contrib.py +++ /dev/null @@ -1,73 +0,0 @@ -import os -dostandard = force -## try: -## import Numeric, cdms -## except ImportError: -## dostandard = 1 -dostandard = 1 -import sys -if not 'clist' in locals().keys(): - clist=[] -## Format is [path,description,licence_file] -Clist = [ -# ['contrib/Sphinx','sphinx documentation builder','GNU'], -## ['contrib/zope-interface','zope interface','GNU'], -# ['contrib/Twisted','network computing tools','GNU'], -# ['contrib/Foolscap','RPC protocol for Python+Twisted','GNU'], -# ['contrib/ipython','an Enhanced Python Shell','GNU'], -# ['contrib/scipy','Scientific tools for Python (core only)','GNU'], - ['contrib/SP','A collection of Python modules that are useful for scientific computing.','LICENSE'], - ['contrib/cssgrid','An interpolation package for random data on the surface of a sphere based on the work of Robert Renka. cssgrid uses cubic splines to calculate its interpolation function.',''], - ['contrib/lmoments','56 routines for statistical analysis using L-moments','UC'], - ['contrib/ort','Reads in Oort data files','UC'], -# ['contrib/spherepack','A collection of programs for computing certain common differential operators and performing related manipulations on a sphere.',''], - ['contrib/asciidata','Reads in ASCII files with the ability to specify tab or comma or space delimited fields','Lib/ASV.py'], - ['contrib/eof','Calculates Empirical Orthogonal Functions of either one variable or two variables jointly','UC'], - ['contrib/trends','Computes variance estimate taking auto-correlation into account.',''], - ['contrib/binaryio','Handles binary or unformatted data',''], - ['contrib/regridpack','A collection of programs for linear or cubic interpolation in one, two, three or four dimensions.',''], - ['contrib/shgrid','An interpolation package for random data in 3-space based on the work of Robert Renka. shgrid uses a modified Shepard\'s algorithm to calculate its interpolation function',''], - ['contrib/dsgrid','A three-dimensional random data interpolator based on a simple inverse distance weighting algorithm.',''], - ['contrib/pyclimate','Provides functions to perform some simple IO operations, operations with COARDS-compliant netCDF files, EOF analysis, SVD and CCA analysis of coupled data sets, some linear digital filters, kernel based probability density function estimation and access to DCDFLIB.C library from Python.','GNU'], - ['contrib/ComparisonStatistics','Calculates statistics (e.g., correlations and RMS differences) that quantify differences between two datasets. Allows for ellaborated masking and regridding operations','UC'], - ['contrib/IaGraph','Package for Quick Interactive Graphing','GNU'], - ['contrib/MSU','Package to compute Equivalent MSU Temperatures','UC'], - ['contrib/EzTemplate','Package to generate VCS templates easily','GNU'], - ['contrib/ZonalMeans','Package to compute zonal means on any grid (requires f90 compiler)','GNU'], - ['contrib/HDF5Tools','Package to read HDF5 files into CDAT (requires h5dump binary utility)','GNU'], -# following is now built via externals -# ['contrib/eof2','',''], -# ['contrib/eofs','',''], -# ['contrib/windspharm','','GNU'], -] - -# natgrid has illegal C comments but gcc lets them through... -# we need to fix it. -NCARG_ROOT = os.environ.get('NCARG_ROOT') -NCARG_COLORMAP_PATH = os.environ.get('NCARG_COLORMAP_PATH') -if NCARG_COLORMAP_PATH or NCARG_ROOT : - Clist.append(['contrib/pyncl','Generate NCL plots of cdms transient variables','']) - - -if sys.platform == "linux2" or sys.platform == 'darwin': - Clist.append(['contrib/natgrid','A two-dimensional random data interpolation package based on Dave Watson\'s nngridr','']) - -if '--enable-R' in sys.argv or '--enable-r' in sys.argv: - Clist.append(['contrib/Rpy','Python Interface to the R library','GNU']) - -if '--enable-ioapi' in sys.argv : - Clist.append(['contrib/pyIoapi','Python Interface to the IoAPI library','GNU']) - Clist.append(['contrib/egenix',"Collection of tools which enhance Python's usability in many important areas such as ODBC database connectivity, fast text processing, date/time processing and web site programming.",'LICENSE']) - Clist.append(['contrib/ioapiTools','ioapiTools developped by Alexis Zubrow form University of Chicago','GNU']) - -if '--enable-spanlib' in sys.argv : - Clist.append(['contrib/spanlib','Package to do Spectral analysis','GNU'],) - -if not dostandard: - packages = [] - -for c in Clist: - clist.append(c) - packages.append(c[0]) - - diff --git a/installation/control.py b/installation/control.py deleted file mode 100644 index 49ed5d9af6..0000000000 --- a/installation/control.py +++ /dev/null @@ -1,72 +0,0 @@ -# This file is used to control the behavior of install.py. - -# The search path is used if the X11 directories aren't configured. -x11search = ['/usr/X11R6', '/usr/X11R6.5.1', - '/usr/X11R6.4','/usr','/usr/openwin','/opt'] -# Here is where they are on OSF1 and perhaps similar systems -x11OSF1lib = ['/usr/lib/X11', '/usr/lib'] -x11OSF1include = ['/usr/include/X11'] - -# Controlling the install itself -force=0 # Force a complete recompilation? -norun=0 # Cause _install just to echo command? -echo=0 # Echo installation commands before executing? -log=1 # Make logs? -silent = 0 # Report progress? - -import os,sys -current_dir = os.path.dirname(__file__) -build_dir = os.getcwd() -sys.path.append(build_dir) -src_dir = os.path.join(current_dir, '..') -installation_script_dir = os.path.join(src_dir, 'installation') - -sys.path.append(src_dir) -sys.path.append(installation_script_dir) - -# Configuration -do_configure = 1 -if os.path.isfile(os.path.join(build_dir,'cdat_info.py')): - try: - import cdat_info - do_configure = 0 - except: - pass - -finish=""" -****************************************************** -Success! CDAT has been installed in %s . -Make sure all Packages built successfully -****************************************************** - -""" %(sys.prefix,) - -# Options used for building setup.py, install_script, make -if os.environ.has_key('MAKE'): - make_code = os.environ['MAKE'] -else: - make_code = 'make' - -# List of packages to be built -packages = [ - "Packages/pydebug", - "Packages/cdtime", - "Packages/demo", - "Packages/help", - "Packages/regrid2", - "Packages/cdms2", - "Packages/esg", - "Packages/ncml", - "Packages/DV3D", - "Packages/vcs", - "Packages/vcsaddons", - "Packages/cdutil", - "Packages/unidata", - "Packages/xmgrace", - "Packages/genutil", - "Packages/Thermo", - "Packages/WK", - "Packages/gui_support", - "Packages/distarray", - "Packages/testing", - ] diff --git a/installation/debug.py b/installation/debug.py deleted file mode 100644 index 87fcd2bc9f..0000000000 --- a/installation/debug.py +++ /dev/null @@ -1,12 +0,0 @@ -## action['setup.py'] = sys.executable + ' setup.py build --debug install' -## action['install_script'] = './install_script --debug ' + sys.exec_prefix -## for k in ['makefile','Makefile','MAKEFILE']: -## action[k] = make_code + " PREFIX='%s' DEBUG=1 install " % sys.exec_prefix - -# matplotlib depends on pkg-config under install/bin -action['setup.py'] = 'PATH=%s/bin:$PATH && %s setup.py build --debug install --prefix=%s ; ' \ - % (sys.exec_prefix, sys.executable, target_prefix) -action['install_script'] = './install_script %s %s --debug ; ' % (target_prefix, sys.exec_prefix) -for k in ['makefile','Makefile','MAKEFILE']: - action[k] = make_code + " PYPREFIX='%s' PREFIX='%s' DEBUG=1 install ; " % (sys.exec_prefix,target_prefix) -action['autogen.sh'] = "autogen.sh ; ./configure --prefix=%s --with-python=%s ; make ; make install ;" % (os.path.join(os.path.split(target_prefix)[0],'Externals'), os.path.join(sys.exec_prefix,'bin','python')) diff --git a/installation/irix.py b/installation/irix.py deleted file mode 100644 index 04e8318aa5..0000000000 --- a/installation/irix.py +++ /dev/null @@ -1,2 +0,0 @@ -x11include='/usr/include/X11' -x11libdir='/usr/lib/X11' diff --git a/installation/pcmdi.py b/installation/pcmdi.py deleted file mode 100644 index 141884ef28..0000000000 --- a/installation/pcmdi.py +++ /dev/null @@ -1,22 +0,0 @@ -# Edit this configuration file before building. -# Always build with --force after changing a configuration. -# You do not need to rebuild Python itself. -CDMS_INCLUDE_DRS='yes' -# if sys.platform=="linux2": -# COMPILER_EXTRA_LIBS=['pgftnrtl','pgc'] -# else: -# COMPILER_EXTRA_LIBS=[] -COMPILER_EXTRA_LIBS=["gfortran",] -#if sys.platform[0:3] == "aix": # and probably other platforms... -# CMDS_INCLUDE_QL = 'no' -#else: -# CDMS_INCLUDE_QL ='yes' - -# These don't actually get respected by the libcdms build yet. -# drs_file = '/usr/local/lib/libdrs.a' - -# Add on additional packages -#packages.append('Packages/psql') -#packages.append('Packages/cu') -#packages.append('Packages/pcmdi') - diff --git a/installation/pp.py b/installation/pp.py deleted file mode 100644 index 6c5abf9c0b..0000000000 --- a/installation/pp.py +++ /dev/null @@ -1,3 +0,0 @@ -# To build CDMS with support for the Met Office PP format: -# express_install /usr/local/cdat/somewhere --force --configuration=installation/pp.py -CDMS_INCLUDE_PP='yes' diff --git a/installation/psql.py b/installation/psql.py deleted file mode 100644 index d3b52b6ebc..0000000000 --- a/installation/psql.py +++ /dev/null @@ -1,3 +0,0 @@ -# Add on additional packages -CDMS_INCLUDE_QL ='yes' -packages.append('Packages/psql') diff --git a/installation/standard.py b/installation/standard.py deleted file mode 100644 index b86f594dc1..0000000000 --- a/installation/standard.py +++ /dev/null @@ -1,81 +0,0 @@ -# DO NOT EDIT THIS FILE -# Instead, make your own configuration file to override these values -# and use the -c option to read it. - -# This is the standard configuration file. It is read first by install.py. -# In your own configuration file you can use any Python statements to modify -# these values. - -# File pcmdi.txt is an example that shows the changes we use at PCMDI. - -# Append to packages to build additional packages, such as -# packages.append('cu') - -#This file is executed as Python input so you can compute values depending on -#platform, etc. Modules os, sys will be imported already. - -current_dir = os.path.dirname(__file__) -src_dir = os.path.join(current_dir, '..') -libcdms_dir = os.path.join(src_dir, 'libcdms') - -## This part figures out the target thing -target_prefix = sys.prefix -for i in range(len(sys.argv)): - a = sys.argv[i] - if a=='--prefix': - target_prefix=sys.argv[i+1] - sp = a.split("--prefix=") - if len(sp)==2: - target_prefix=sp[1] - - -# This is where we build netcdf, if you let us -#netcdf_directory = os.popen('%s --prefix' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0] -#netcdf_include_directory = os.popen('%s --includedir' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0] -#netcdf_include_directory= os.path.join(os.environ.get("EXTERNALS",os.path.join(sys.prefix,'Externals')),'include') - -# Control of the CDMS build -drs_file = '/usr/local/libdrs.a' # only needed if next line is 'yes' -CDMS_INCLUDE_DRS='no' # Change to 'yes' to include DRS. If yes: - # Assumes /usr/local/lib/libdrs.a exists. - # Assumes you have a Fortran compiler. -CDMS_INCLUDE_QL='no' # Include QL in build? - # Caution: if set to yes, CDMS library compile - # may fail on certain platforms, including AIX. -CDMS_INCLUDE_HDF='no' # Don't set to yes, doesn't work. -CDMS_INCLUDE_PP='no' # Met Office PP format is built in to cdunif. -# Location of X11 library -# If you set x11libdir (that is two ones and an el) AND x11include to point -# to the lib and include directories, they will be used. -# Otherwise a search is made for common locations. -if sys.platform in ['mac']: - x11libdir='/usr/X11R6/lib' - x11include='/usr/X11R6/include' -else: - x11libdir='' - x11include='' - -# List of math libraries -# We attempt to get the C math libraries right but if we don't fix it. -mathlibs= ['m'] #i.e., libm.a -if sys.platform in ['win32', 'mac', 'beos5']: - mathlibs = [] - -# Build actions -action = {} -## Commenting out pyfort not used anylonger (it's been years) -#if os.path.exists(os.path.join(target_prefix, 'bin', 'pyfort')): -# action['*.pfp'] = os.path.join(target_prefix, 'bin', 'pyfort') + " -i %(file)s ; " -#elif os.path.exists(os.path.join(sys.exec_prefix, 'bin', 'pyfort')): -# action['*.pfp'] = os.path.join(sys.exec_prefix, 'bin', 'pyfort') + " -i %(file)s ; " -#else: -# action['*.pfp'] = "pyfort -i %(file)s ; " - -# matplotlib depends on pkg-config -action['setup.py'] = 'PATH=%s/bin:$PATH %s setup.py install --prefix=%s ; ' \ - % (sys.exec_prefix, sys.executable, target_prefix) -install_script_path = os.path.join(libcdms_dir, 'install_script') -action['install_script'] = install_script_path + ' %s %s ; ' % (target_prefix, sys.executable) -for k in ['makefile','Makefile','MAKEFILE']: - action[k] = make_code + " PYPREFIX='%s' PREFIX='%s' install ; " % (sys.exec_prefix,target_prefix) -action['autogen.sh'] = "autogen.sh ; ./configure --prefix=%s --with-python=%s ; make -j1 ; make -j1 install ;" % (os.environ.get("EXTERNALS",os.path.join(sys.prefix,'Externals')), os.path.join(sys.exec_prefix,'bin','python')) diff --git a/pysrc/README.txt b/pysrc/README.txt deleted file mode 100644 index 040a48ab71..0000000000 --- a/pysrc/README.txt +++ /dev/null @@ -1,36 +0,0 @@ -This directory contains all the sources for building a Python suitable for -use with CDAT. - -Changes from standard distributions: - a. readline - In file readline.c, change definition of RL_LIBRARY_VERSION to avoid - the error if this macro is already defined, by undefining it. - b. We use a private version of Python's setup.py to have it find - our own tcl/tk. - -To install: -./install_script /whereyouwanttoputit - -A subdirectory build will be created that contains the output. -Some of these products can be tested by changing to their directory under -build and typing "make test". - -If you put in a new source file you need to remove the old one and run -./clean_script before building again. - - -OPTIONS: -you can add: --enable-aqua to the build line to prevent the build of Tcl/Tk -and use Aqua Native -you can add: --disable-tkbuild to the build line to prevent the build of Tcl/Tk - -Log files are created in the build subdirectory. - -Each of the pieces may be built individually using the corresponding .sh -files in this directory. Some warning errors are usual from -many of the packages and vary from architecture to architecture. - -N.B.: The order in which the packages are built matters. - -You can add an 'exit 0' at any appropriate point in install_script if you -want to go up to that point and then stop. diff --git a/pysrc/clean_script b/pysrc/clean_script deleted file mode 100755 index 185cc2b0e8..0000000000 --- a/pysrc/clean_script +++ /dev/null @@ -1,2 +0,0 @@ -/bin/rm -fr build >/dev/null 2>&1 -find . -name 'config.cache' -print -exec rm {} \; diff --git a/pysrc/install_script.obsolete b/pysrc/install_script.obsolete deleted file mode 100755 index a96a6fab40..0000000000 --- a/pysrc/install_script.obsolete +++ /dev/null @@ -1,117 +0,0 @@ -#!/bin/sh -d=`pwd` -if [ -n "$PYTHONPATH" ]; then - echo "PYTHONPATH environment variable should not be set!" - exit 1 -fi - -if [ -n "$PYTHONHOME" ]; then - echo "PYTHONHOME environment variable should not be set!" - exit 1 -fi - -echo "Building Zlib, Readline, Tcl, Tk, and Python." -echo "Logs are in $d/build" - - -OS=`uname` -AQUA=no -TK=yes -all=$* -READLINE=yes -ZLIB=yes -OSver=`uname -r` -OSMajor=`uname -r | cut -d. -f1` - -s=$1; shift; -while [ "$#" -ne 0 ] -do - # Translate $1 to lowercase - MYOPT=`echo $1 | tr 'A-Z' 'a-z'` - if [ "$MYOPT" = "--enable-aqua" ]; then - if ( test "${OS}" == "Darwin" ) then - AQUA=yes - else - echo "--enable-aqua is for Darwin systems only! Use --disable-tkbuild" - exit 1 - fi - if ( test "${OSMajor}" == "9" ) then - echo "Detected Leopard 10.5, doing the posix thing"; - CPPFLAGS="-DSETPGRP_HAVE_ARG "${CFLAGS} - fi - fi - if [ "$MYOPT" = "--disable-tkbuild" ]; then - TK=no - fi - if [ "$MYOPT" = "--disable-externals-build" ]; then - TK=no - READLINE=no - ZLIB=no - fi - shift -done -./prolog.sh ${all} -if [ $? -ne 0 ]; then - echo "Unpacking of tar files failed." - exit 1 -fi - - -if [ "${ZLIB}" = "no" ]; then - echo "skipping build of zlib" -else - echo "Building zlib" - ./zlib.sh $s >build/zlib.LOG 2>&1 - if [ $? -ne 0 ]; then - echo "Build of zlib failed" - exit 1 - fi -fi - -if [ "${READLINE}" = "no" ]; then - echo "skipping build of readline" -else - echo "Building readline" - ./readline.sh $s >build/readline.LOG 2>&1 - if [ $? -ne 0 ]; then - echo "Build of readline failed" - exit 1 - fi -fi -if [ "${OS}" = "CYGWIN_NT-5.1" ]; then - echo "Tcl - Using the pre-built tcl library that is part of the standard Cygwin distribution" - echo "Tk - Using the pre-built tk library that is part of the standard Cygwin distribution" -elif [ "${OS}" = "CYGWIN_NT-6.0" ]; then - echo "Tcl - Using the pre-built tcl library that is part of the standard Cygwin distribution" - echo "Tk - Using the pre-built tk library that is part of the standard Cygwin distribution" -elif [ "${AQUA}" = "yes" ]; then - echo "Tcl - Using the pre-built tcl library that is part of the standard Darwin distribution (with Aqua support)" - echo "Tk - Using the pre-built tk library that is part of the standard Darwin distributioni (with Aqua support)" -elif [ "${TK}" = "no" ]; then - echo "Tcl - Using the pre-built tcl library that is part of your system" - echo "Tk - Using the pre-built tk library that is part of your system" -else - echo "Building tcl" - ./tcl.sh $s >build/tcl.LOG 2>&1 - if [ $? -ne 0 ]; then - echo "Build of tcl failed." - exit 1 - fi - - echo "Building tk" - ./tk.sh $s >build/tk.LOG 2>&1 - if [ $? -ne 0 ]; then - echo "Build of tk failed." - exit 1 - fi -fi - -echo "Building python" -./python.sh $s >build/python.LOG 2>&1 -if [ $? -ne 0 ]; then - echo "Build of Python failed." - exit 1 -fi - -echo "Python built successfully." - diff --git a/pysrc/prolog.sh b/pysrc/prolog.sh deleted file mode 100755 index f989095939..0000000000 --- a/pysrc/prolog.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/sh -if (test "$1" = "--debug") then - D="--debug"; - OPT=-g; - shift -else - D=""; - OPT=${OPT:=-O} -fi -export OPT - -OS=`uname` - -if (test -z "$1") then - echo "Usage: $0 prefix"; - exit 1 -fi -version=`more ../version` - -if (test ! -d $1) then - echo -n "$1/${version} is not a directory; create it? (y/[n])"; - y='n' - read y; - if (test ${y} = 'y') then - mkdir -p $1/${version}/bin; mkdir $1/${version}/lib; mkdir $1/${version}/include ; mkdir -p $1/Externals/bin ; mkdir $1/Externals/lib ; mkdir $1/Externals/share ; mkdir $1/Externals/include - if (test ! -d $1) then - echo "Could not create $1, installation aborted."; - exit 1 - fi - else - echo 'Installation aborted.'; - exit 1 - fi -fi -echo "Created $1/${version} and $1/Externals directories" -echo "Python/CDAT built in $1/${version} and external dependencies binaries and libs are built to $1/Externals" - -prefix=`(cd $1;pwd)` - -if (test ! -d build) then - # Unpack everything into build - mkdir build - /bin/cp src/*gz build - cd build - OS=`uname` - if (test "${OS}" = "Linux" ) then - TAR=`which tar` - elif (test "${OS}" = "Darwin" ) then - TAR=`which tar` - elif (test "${OS}" = "CYGWIN_NT-5.1" ) then - TAR=`which tar` - elif (test "${OS}" = "CYGWIN_NT-6.0" ) then - TAR=`which tar` - elif (test "${OS}" = "AIX" ) then - TAR=`which tar` - else - echo "Building tar for non GNU OS to unpack Python, some error messages may be generated but can be ignored" - chmod +w tar*gz - for x in tar*gz; - do - gunzip -f $x; - tar xf `basename $x .gz`; - (cd tar-* ; ./configure --prefix=$1/Externals ; make ; make install; cd .. )> LOG.prolog; - TAR=$1/Externals/bin/tar - done - fi - #rm tar*gz - chmod +w *.gz - for x in *.gz; - do - echo "$x"; - gunzip -f $x; - ${TAR} xf `basename $x .gz`; - /bin/rm -f `basename $x .gz`; - done -# for x in *.tgz; -# do -# echo "$x"; -# ${TAR} xzf $x; -# /bin/rm -f $x -# done - cd .. -fi -cd build -echo "Installation to ${prefix}" diff --git a/pysrc/python.sh b/pysrc/python.sh deleted file mode 100755 index 3e0d844b76..0000000000 --- a/pysrc/python.sh +++ /dev/null @@ -1,76 +0,0 @@ -#!/bin/sh -. ./prolog.sh -# Python, idle -# This needs to be set or Python's installer will conclude _tkinter cannot -# be imported. -CCTEMP=${CC-gcc} -# Get command name WITHOUT the parameters -CCTEMP=`echo $CCTEMP | awk '{print $1}'` -if (test "${CCTEMP}" = "gcc") then -config_opt="--with-gcc --without-cxx" -else - config_opt="--without-gcc --without-cxx" -fi -OS=`uname` -if (test "${OS}" = "Darwin") then # MacIntosh OSX - CPPFLAGS="-I${prefix}/Externals/include"; export CPPFLAGS - LDFLAGS="-L${prefix}/Externals/lib"; export LDFLAGS - config_opt="" - OPT=""; export OPT -fi -getaddrbug="" -if (test "${OS}" = "OSF1") then - getaddrbug="--disable-ipv6" -fi -if (test "${OS}" = "AIX") then - getaddrbug="--disable-ipv6" -fi -cd Python* -/bin/rm -f setup.py -/bin/cp ../../src/setup.py setup.py -CDAT_PREFIX=${prefix}/Externals; export CDAT_PREFIX -if (test "${OS}" = "Linux") then # Linux -- needed for readline - export LDFLAGS="-L${prefix}/Externals/lib -Wl,-R${prefix}/Externals/lib" - if (test "${CCTEMP}" = "icc") then # zeus x86_64 with Intel compiler - if (test "${IC}" = "") then - echo "Run 'use ' to set environment variable IC to the location of libimf.a, libirc.a" - exit 1 - fi - export LDFLAGS="${LDFLAGS} -L${IC}/lib -limf -lirc" - fi -fi -./configure ${config_opt} --prefix=${prefix}/${version} ${getaddrbug} -if (test $? -ne 0) then - echo "Python configure failed."; exit 1; -fi - -make -if (test $? -ne 0) then - echo "Python make failed."; exit 1; -fi - -make install -if (test $? -ne 0) then - echo "Python install failed."; exit 1; -fi - -#cd Tools/idle -#${prefix}/bin/python setup.py install -#if (test $? -ne 0) then -# echo "Python idle install failed."; exit 1; -#fi -mkdir -p ${prefix}/Externals/share -if (test "${OS}" = "CYGWIN_NT-5.1" ) then - ln -s /usr/share/tcl* ${prefix}/Externals/share ; - ln -s /usr/share/tk* ${prefix}/Externals/share ; -fi -if (test "${OS}" = "CYGWIN_NT-6.0" ) then - ln -s /usr/share/tcl* ${prefix}/Externals/share ; - ln -s /usr/share/tk* ${prefix}/Externals/share ; -fi - -${prefix}/${version}/bin/python -c "import Tkinter" -if (test $? -ne 0) then - echo "Python Tkinter import failed."; exit 1; -fi -echo "Python built with Tkinter correctly." diff --git a/pysrc/readline.sh b/pysrc/readline.sh deleted file mode 100755 index 40f2d97d2d..0000000000 --- a/pysrc/readline.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/sh -. ./prolog.sh -cd readline-* -./configure --prefix=${prefix}/Externals -if (test $? -ne 0) then - echo "readline configuration failed."; - echo "Some platforms don't support readline, this doesn't matter."; - echo "Ignoring this error."; - exit 0; -fi -make -if (test $? -ne 0) then - echo "readline make failed."; - echo "Some platforms don't support readline, this doesn't matter."; - echo "Ignoring this error."; - exit 0; -fi -make install -if (test $? -ne 0) then - echo "readline install failed."; - echo "This is unexpected since it built ok."; - exit 1; -fi diff --git a/pysrc/src/setup-2.7.1.py b/pysrc/src/setup-2.7.1.py deleted file mode 100644 index c7d0590694..0000000000 --- a/pysrc/src/setup-2.7.1.py +++ /dev/null @@ -1,2067 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision: 86041 $" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/') - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if sys.platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - platform = self.get_platform() - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if self.get_platform() == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if self.get_platform() == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def get_platform(self): - # Get value of sys.platform - for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: - if sys.platform.startswith(platform): - return platform - return sys.platform - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # end PCMDI change - - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - lib_dirs = self.compiler.library_dirs + [ - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu', - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - platform = self.get_platform() - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - if platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c'], - depends = ['socketmodule.h']) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if sys.platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (4, 8) - min_db_ver = (4, 1) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - db_incdir.replace("include", 'lib/x86_64-linux-gnu') - ] - - if sys.platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguoius dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - for d in inc_dirs + sqlite_inc_paths: - f = os.path.join(d, "sqlite3.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "sqlite3.h") - - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if sys.platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if sys.platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if sys.platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif curses_library == 'curses' and platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if sys.platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if sys.platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - platform = self.get_platform() - ## PCMDI changes look for AQUA_CDAT env variable to decide - if os.environ.get("AQUA_CDAT","no")=="yes" : - if (platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - ## End of pcmdi changes (we just added the if test - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in sys.platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if sys.platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [] - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if sys.platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif sys.platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif sys.platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if sys.platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup-2.7.2.py b/pysrc/src/setup-2.7.2.py deleted file mode 100644 index 1f9c9b83df..0000000000 --- a/pysrc/src/setup-2.7.2.py +++ /dev/null @@ -1,2090 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/') - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if sys.platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - platform = self.get_platform() - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if self.get_platform() == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if self.get_platform() == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def get_platform(self): - # Get value of sys.platform - for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: - if sys.platform.startswith(platform): - return platform - return sys.platform - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - if not find_executable('dpkg-architecture'): - return - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - tmpfile) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # end PCMDI change - - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - lib_dirs = self.compiler.library_dirs + [ - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu', - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - platform = self.get_platform() - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - if platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c'], - depends = ['socketmodule.h']) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if sys.platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (4, 8) - min_db_ver = (4, 1) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - db_incdir.replace("include", 'lib/x86_64-linux-gnu') - ] - - if sys.platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - for d in inc_dirs + sqlite_inc_paths: - f = os.path.join(d, "sqlite3.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "sqlite3.h") - - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if sys.platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if sys.platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if sys.platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif curses_library == 'curses' and platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if sys.platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if sys.platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - platform = self.get_platform() - ## PCMDI changes look for AQUA_CDAT env variable to decide - if os.environ.get("AQUA_CDAT","no")=="yes" : - if (platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - ## End of pcmdi changes (we just added the if test - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in sys.platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if sys.platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [] - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if sys.platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif sys.platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif sys.platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if sys.platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup-2.7.3.py b/pysrc/src/setup-2.7.3.py deleted file mode 100644 index 4026128ebd..0000000000 --- a/pysrc/src/setup-2.7.3.py +++ /dev/null @@ -1,2094 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/') - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if sys.platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - platform = self.get_platform() - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if self.get_platform() == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if self.get_platform() == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def get_platform(self): - # Get value of sys.platform - for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: - if sys.platform.startswith(platform): - return platform - return sys.platform - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - if not find_executable('dpkg-architecture'): - return - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - tmpfile) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # end PCMDI change - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # end PCMDI change - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - lib_dirs = self.compiler.library_dirs + [ - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu', - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - platform = self.get_platform() - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - if platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c'], - depends = ['socketmodule.h']) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if sys.platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (4, 8) - min_db_ver = (4, 1) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - db_incdir.replace("include", 'lib/x86_64-linux-gnu') - ] - - if sys.platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - for d in inc_dirs + sqlite_inc_paths: - f = os.path.join(d, "sqlite3.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "sqlite3.h") - - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if sys.platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if sys.platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if sys.platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif curses_library == 'curses' and platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if sys.platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if sys.platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - platform = self.get_platform() - if (platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in sys.platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if sys.platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [] - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if sys.platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif sys.platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif sys.platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if sys.platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup-2.7.4.py b/pysrc/src/setup-2.7.4.py deleted file mode 100644 index ea8a5f51e9..0000000000 --- a/pysrc/src/setup-2.7.4.py +++ /dev/null @@ -1,2186 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ - -def get_platform(): - # cross build - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - # Get value of sys.platform - if sys.platform.startswith('osf1'): - return 'osf1' - return sys.platform -host_platform = get_platform() - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS")) - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return ( (path.startswith('/usr/') and not path.startswith('/usr/local')) - or path.startswith('/System/') - or path.startswith('/Library/') ) - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if host_platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if host_platform == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if host_platform == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - - # Don't try to load extensions for cross builds - if cross_compiling: - return - - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - cc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile)) - multiarch_path_component = '' - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - finally: - os.unlink(tmpfile) - - if multiarch_path_component != '': - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - return - - if not find_executable('dpkg-architecture'): - return - opt = '' - if cross_compiling: - opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - (opt, tmpfile)) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def add_gcc_paths(self): - gcc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'gccpaths') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system('%s -E -v - %s 1>/dev/null' % (gcc, tmpfile)) - is_gcc = False - in_incdirs = False - inc_dirs = [] - lib_dirs = [] - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - for line in fp.readlines(): - if line.startswith("gcc version"): - is_gcc = True - elif line.startswith("#include <...>"): - in_incdirs = True - elif line.startswith("End of search list"): - in_incdirs = False - elif is_gcc and line.startswith("LIBRARY_PATH"): - for d in line.strip().split("=")[1].split(":"): - d = os.path.normpath(d) - if '/gcc/' not in d: - add_dir_to_list(self.compiler.library_dirs, - d) - elif is_gcc and in_incdirs and '/gcc/' not in line: - add_dir_to_list(self.compiler.include_dirs, - line.strip()) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - inc_dirs = self.compiler.include_dirs[:] - lib_dirs = self.compiler.library_dirs[:] - if not cross_compiling: - for d in ( - '/usr/include', - ): - add_dir_to_list(inc_dirs, d) - for d in ( - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', - ): - add_dir_to_list(lib_dirs, d) - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if host_platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if host_platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - # HP-UX11iv3 keeps files in lib/hpux folders. - if host_platform == 'hp-ux11': - lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32'] - - if host_platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if host_platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if host_platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if host_platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if host_platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if host_platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'], - depends=['socketmodule.h'], - libraries=math_libs) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if host_platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (5, 3) - min_db_ver = (4, 3) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 5: - for x in range(max_db_ver[1]+1): - if allow_db_ver((5, x)): - yield x - elif major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - if cross_compiling: - db_inc_paths = [] - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if host_platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - ] - - if host_platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - if cross_compiling: - sqlite_inc_paths = [] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - for d_ in inc_dirs + sqlite_inc_paths: - d = d_ - if host_platform == 'darwin' and is_macosx_sdk_path(d): - d = os.path.join(sysroot, d[1:]) - - f = os.path.join(d, "sqlite3.h") - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if host_platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if host_platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if host_platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = host_platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if host_platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others have -lgdbm_compat, - # others don't have either - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - elif self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - ndbm_libs = ['gdbm_compat'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if host_platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if host_platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif curses_library == 'curses' and host_platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if host_platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if host_platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - expat_depends = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - expat_depends = ['expat/ascii.h', - 'expat/asciitab.h', - 'expat/expat.h', - 'expat/expat_config.h', - 'expat/expat_external.h', - 'expat/internal.h', - 'expat/latin1tab.h', - 'expat/utf8tab.h', - 'expat/xmlrole.h', - 'expat/xmltok.h', - 'expat/xmltok_impl.h' - ] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources, - depends = expat_depends, - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - depends = ['pyexpat.c'] + expat_sources + - expat_depends, - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (host_platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if host_platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif host_platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif host_platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif host_platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif host_platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if host_platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if host_platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or host_platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if host_platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if host_platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - if (host_platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in host_platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if host_platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if host_platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if host_platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if host_platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if host_platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split() - if (('--host=' in arg) or ('--build=' in arg))] - if not self.verbose: - config_args.append("-q") - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if host_platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif host_platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif host_platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if host_platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup-2.7.7.py b/pysrc/src/setup-2.7.7.py deleted file mode 100644 index 9a92bc3a79..0000000000 --- a/pysrc/src/setup-2.7.7.py +++ /dev/null @@ -1,2244 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ - -def get_platform(): - # cross build - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - # Get value of sys.platform - if sys.platform.startswith('osf1'): - return 'osf1' - return sys.platform -host_platform = get_platform() - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS")) - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return ( (path.startswith('/usr/') and not path.startswith('/usr/local')) - or path.startswith('/System/') - or path.startswith('/Library/') ) - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if host_platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if host_platform == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if host_platform == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - - # Don't try to load extensions for cross builds - if cross_compiling: - return - - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - cc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile)) - multiarch_path_component = '' - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - finally: - os.unlink(tmpfile) - - if multiarch_path_component != '': - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - return - - if not find_executable('dpkg-architecture'): - return - opt = '' - if cross_compiling: - opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - (opt, tmpfile)) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def add_gcc_paths(self): - gcc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'gccpaths') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system('%s -E -v - %s 1>/dev/null' % (gcc, tmpfile)) - is_gcc = False - in_incdirs = False - inc_dirs = [] - lib_dirs = [] - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - for line in fp.readlines(): - if line.startswith("gcc version"): - is_gcc = True - elif line.startswith("#include <...>"): - in_incdirs = True - elif line.startswith("End of search list"): - in_incdirs = False - elif is_gcc and line.startswith("LIBRARY_PATH"): - for d in line.strip().split("=")[1].split(":"): - d = os.path.normpath(d) - if '/gcc/' not in d: - add_dir_to_list(self.compiler.library_dirs, - d) - elif is_gcc and in_incdirs and '/gcc/' not in line: - add_dir_to_list(self.compiler.include_dirs, - line.strip()) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # End PCMDI Changes - # Ensure that /usr/local is always used - if not cross_compiling: - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - if cross_compiling: - self.add_gcc_paths() - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - inc_dirs = self.compiler.include_dirs[:] - lib_dirs = self.compiler.library_dirs[:] - if not cross_compiling: - for d in ( - '/usr/include', - ): - add_dir_to_list(inc_dirs, d) - for d in ( - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', - ): - add_dir_to_list(lib_dirs, d) - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if host_platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if host_platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - # HP-UX11iv3 keeps files in lib/hpux folders. - if host_platform == 'hp-ux11': - lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32'] - - if host_platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if host_platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if host_platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if host_platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if host_platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if host_platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'], - depends=['socketmodule.h'], - libraries=math_libs) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if host_platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (5, 3) - min_db_ver = (4, 3) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 5: - for x in range(max_db_ver[1]+1): - if allow_db_ver((5, x)): - yield x - elif major == 4: - for x in range(9): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - if cross_compiling: - db_inc_paths = [] - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if host_platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - ] - - if host_platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - if cross_compiling: - sqlite_inc_paths = [] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - for d_ in inc_dirs + sqlite_inc_paths: - d = d_ - if host_platform == 'darwin' and is_macosx_sdk_path(d): - d = os.path.join(sysroot, d[1:]) - - f = os.path.join(d, "sqlite3.h") - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if host_platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if host_platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if host_platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = host_platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if host_platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others have -lgdbm_compat, - # others don't have either - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - elif self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - ndbm_libs = ['gdbm_compat'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if host_platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if host_platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - curses_incs = None - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - curses_incs = find_file('curses.h', inc_dirs, - [os.path.join(d, 'ncursesw') for d in inc_dirs]) - exts.append( Extension('_curses', ['_cursesmodule.c'], - include_dirs = curses_incs, - libraries = curses_libs) ) - elif curses_library == 'curses' and host_platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - include_dirs = curses_incs, - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h): - zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:]) - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if host_platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if host_platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - expat_depends = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - expat_depends = ['expat/ascii.h', - 'expat/asciitab.h', - 'expat/expat.h', - 'expat/expat_config.h', - 'expat/expat_external.h', - 'expat/internal.h', - 'expat/latin1tab.h', - 'expat/utf8tab.h', - 'expat/xmlrole.h', - 'expat/xmltok.h', - 'expat/xmltok_impl.h' - ] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources, - depends = expat_depends, - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - depends = ['pyexpat.c'] + expat_sources + - expat_depends, - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (host_platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if host_platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif host_platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif host_platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif host_platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif host_platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if host_platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if host_platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or host_platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if host_platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if host_platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - return missing - - def detect_tkinter_explicitly(self): - # Build _tkinter using explicit locations for Tcl/Tk. - # - # This is enabled when both arguments are given to ./configure: - # - # --with-tcltk-includes="-I/path/to/tclincludes \ - # -I/path/to/tkincludes" - # --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \ - # -L/path/to/tklibs -ltkm.n" - # - # These values can also be specified or overriden via make: - # make TCLTK_INCLUDES="..." TCLTK_LIBS="..." - # - # This can be useful for building and testing tkinter with multiple - # versions of Tcl/Tk. Note that a build of Tk depends on a particular - # build of Tcl so you need to specify both arguments and use care when - # overriding. - - # The _TCLTK variables are created in the Makefile sharedmods target. - tcltk_includes = os.environ.get('_TCLTK_INCLUDES') - tcltk_libs = os.environ.get('_TCLTK_LIBS') - if not (tcltk_includes and tcltk_libs): - # Resume default configuration search. - return 0 - - extra_compile_args = tcltk_includes.split() - extra_link_args = tcltk_libs.split() - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - extra_compile_args = extra_compile_args, - extra_link_args = extra_link_args, - ) - self.extensions.append(ext) - return 1 - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Check whether --with-tcltk-includes and --with-tcltk-libs were - # configured or passed into the make target. If so, use these values - # to build tkinter and bypass the searches for Tcl and TK in standard - # locations. - if self.detect_tkinter_explicitly(): - return - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - if (host_platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in host_platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if host_platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if host_platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if host_platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if host_platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if host_platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split() - if (('--host=' in arg) or ('--build=' in arg))] - if not self.verbose: - config_args.append("-q") - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if host_platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif host_platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif host_platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if host_platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup.py b/pysrc/src/setup.py deleted file mode 100644 index 9a92bc3a79..0000000000 --- a/pysrc/src/setup.py +++ /dev/null @@ -1,2244 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ - -def get_platform(): - # cross build - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - # Get value of sys.platform - if sys.platform.startswith('osf1'): - return 'osf1' - return sys.platform -host_platform = get_platform() - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS")) - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return ( (path.startswith('/usr/') and not path.startswith('/usr/local')) - or path.startswith('/System/') - or path.startswith('/Library/') ) - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if host_platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if host_platform == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if host_platform == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - - # Don't try to load extensions for cross builds - if cross_compiling: - return - - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - cc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile)) - multiarch_path_component = '' - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - finally: - os.unlink(tmpfile) - - if multiarch_path_component != '': - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - return - - if not find_executable('dpkg-architecture'): - return - opt = '' - if cross_compiling: - opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - (opt, tmpfile)) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def add_gcc_paths(self): - gcc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'gccpaths') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system('%s -E -v - %s 1>/dev/null' % (gcc, tmpfile)) - is_gcc = False - in_incdirs = False - inc_dirs = [] - lib_dirs = [] - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - for line in fp.readlines(): - if line.startswith("gcc version"): - is_gcc = True - elif line.startswith("#include <...>"): - in_incdirs = True - elif line.startswith("End of search list"): - in_incdirs = False - elif is_gcc and line.startswith("LIBRARY_PATH"): - for d in line.strip().split("=")[1].split(":"): - d = os.path.normpath(d) - if '/gcc/' not in d: - add_dir_to_list(self.compiler.library_dirs, - d) - elif is_gcc and in_incdirs and '/gcc/' not in line: - add_dir_to_list(self.compiler.include_dirs, - line.strip()) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # End PCMDI Changes - # Ensure that /usr/local is always used - if not cross_compiling: - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - if cross_compiling: - self.add_gcc_paths() - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - inc_dirs = self.compiler.include_dirs[:] - lib_dirs = self.compiler.library_dirs[:] - if not cross_compiling: - for d in ( - '/usr/include', - ): - add_dir_to_list(inc_dirs, d) - for d in ( - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', - ): - add_dir_to_list(lib_dirs, d) - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if host_platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if host_platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - # HP-UX11iv3 keeps files in lib/hpux folders. - if host_platform == 'hp-ux11': - lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32'] - - if host_platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if host_platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if host_platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if host_platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if host_platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if host_platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'], - depends=['socketmodule.h'], - libraries=math_libs) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if host_platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (5, 3) - min_db_ver = (4, 3) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 5: - for x in range(max_db_ver[1]+1): - if allow_db_ver((5, x)): - yield x - elif major == 4: - for x in range(9): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - if cross_compiling: - db_inc_paths = [] - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if host_platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - ] - - if host_platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - if cross_compiling: - sqlite_inc_paths = [] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - for d_ in inc_dirs + sqlite_inc_paths: - d = d_ - if host_platform == 'darwin' and is_macosx_sdk_path(d): - d = os.path.join(sysroot, d[1:]) - - f = os.path.join(d, "sqlite3.h") - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if host_platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if host_platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if host_platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = host_platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if host_platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others have -lgdbm_compat, - # others don't have either - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - elif self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - ndbm_libs = ['gdbm_compat'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if host_platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if host_platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - curses_incs = None - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - curses_incs = find_file('curses.h', inc_dirs, - [os.path.join(d, 'ncursesw') for d in inc_dirs]) - exts.append( Extension('_curses', ['_cursesmodule.c'], - include_dirs = curses_incs, - libraries = curses_libs) ) - elif curses_library == 'curses' and host_platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - include_dirs = curses_incs, - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h): - zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:]) - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if host_platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if host_platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - expat_depends = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - expat_depends = ['expat/ascii.h', - 'expat/asciitab.h', - 'expat/expat.h', - 'expat/expat_config.h', - 'expat/expat_external.h', - 'expat/internal.h', - 'expat/latin1tab.h', - 'expat/utf8tab.h', - 'expat/xmlrole.h', - 'expat/xmltok.h', - 'expat/xmltok_impl.h' - ] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources, - depends = expat_depends, - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - depends = ['pyexpat.c'] + expat_sources + - expat_depends, - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (host_platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if host_platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif host_platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif host_platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif host_platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif host_platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if host_platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if host_platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or host_platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if host_platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if host_platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - return missing - - def detect_tkinter_explicitly(self): - # Build _tkinter using explicit locations for Tcl/Tk. - # - # This is enabled when both arguments are given to ./configure: - # - # --with-tcltk-includes="-I/path/to/tclincludes \ - # -I/path/to/tkincludes" - # --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \ - # -L/path/to/tklibs -ltkm.n" - # - # These values can also be specified or overriden via make: - # make TCLTK_INCLUDES="..." TCLTK_LIBS="..." - # - # This can be useful for building and testing tkinter with multiple - # versions of Tcl/Tk. Note that a build of Tk depends on a particular - # build of Tcl so you need to specify both arguments and use care when - # overriding. - - # The _TCLTK variables are created in the Makefile sharedmods target. - tcltk_includes = os.environ.get('_TCLTK_INCLUDES') - tcltk_libs = os.environ.get('_TCLTK_LIBS') - if not (tcltk_includes and tcltk_libs): - # Resume default configuration search. - return 0 - - extra_compile_args = tcltk_includes.split() - extra_link_args = tcltk_libs.split() - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - extra_compile_args = extra_compile_args, - extra_link_args = extra_link_args, - ) - self.extensions.append(ext) - return 1 - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Check whether --with-tcltk-includes and --with-tcltk-libs were - # configured or passed into the make target. If so, use these values - # to build tkinter and bypass the searches for Tcl and TK in standard - # locations. - if self.detect_tkinter_explicitly(): - return - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - if (host_platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in host_platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if host_platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if host_platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if host_platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if host_platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if host_platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split() - if (('--host=' in arg) or ('--build=' in arg))] - if not self.verbose: - config_args.append("-q") - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if host_platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif host_platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif host_platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if host_platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/site-2.7.7.py b/pysrc/src/site-2.7.7.py deleted file mode 100644 index c22c48ab5e..0000000000 --- a/pysrc/src/site-2.7.7.py +++ /dev/null @@ -1,602 +0,0 @@ -"""Append module search paths for third-party packages to sys.path. - -**************************************************************** -* This module is automatically imported during initialization. * -**************************************************************** - -In earlier versions of Python (up to 1.5a3), scripts or modules that -needed to use site-specific modules would place ``import site'' -somewhere near the top of their code. Because of the automatic -import, this is no longer necessary (but code that does it still -works). - -This will append site-specific paths to the module search path. On -Unix (including Mac OSX), it starts with sys.prefix and -sys.exec_prefix (if different) and appends -lib/python/site-packages as well as lib/site-python. -On other platforms (such as Windows), it tries each of the -prefixes directly, as well as with lib/site-packages appended. The -resulting directories, if they exist, are appended to sys.path, and -also inspected for path configuration files. - -A path configuration file is a file whose name has the form -.pth; its contents are additional directories (one per line) -to be added to sys.path. Non-existing directories (or -non-directories) are never added to sys.path; no directory is added to -sys.path more than once. Blank lines and lines beginning with -'#' are skipped. Lines starting with 'import' are executed. - -For example, suppose sys.prefix and sys.exec_prefix are set to -/usr/local and there is a directory /usr/local/lib/python2.5/site-packages -with three subdirectories, foo, bar and spam, and two path -configuration files, foo.pth and bar.pth. Assume foo.pth contains the -following: - - # foo package configuration - foo - bar - bletch - -and bar.pth contains: - - # bar package configuration - bar - -Then the following directories are added to sys.path, in this order: - - /usr/local/lib/python2.5/site-packages/bar - /usr/local/lib/python2.5/site-packages/foo - -Note that bletch is omitted because it doesn't exist; bar precedes foo -because bar.pth comes alphabetically before foo.pth; and spam is -omitted because it is not mentioned in either path configuration file. - -After these path manipulations, an attempt is made to import a module -named sitecustomize, which can perform arbitrary additional -site-specific customizations. If this import fails with an -ImportError exception, it is silently ignored. - -""" - -import sys -import os -import __builtin__ -import traceback - -# Prefixes for site-packages; add additional prefixes like /usr/local here -PREFIXES = [sys.prefix, sys.exec_prefix] -# Enable per user site-packages directory -# set it to False to disable the feature or True to force the feature -ENABLE_USER_SITE = None - -# for distutils.commands.install -# These values are initialized by the getuserbase() and getusersitepackages() -# functions, through the main() function when Python starts. -USER_SITE = None -USER_BASE = None - - -def makepath(*paths): - dir = os.path.join(*paths) - try: - dir = os.path.abspath(dir) - except OSError: - pass - return dir, os.path.normcase(dir) - - -def abs__file__(): - """Set all module' __file__ attribute to an absolute path""" - for m in sys.modules.values(): - if hasattr(m, '__loader__'): - continue # don't mess with a PEP 302-supplied __file__ - try: - m.__file__ = os.path.abspath(m.__file__) - except (AttributeError, OSError): - pass - - -def removeduppaths(): - """ Remove duplicate entries from sys.path along with making them - absolute""" - # This ensures that the initial path provided by the interpreter contains - # only absolute pathnames, even if we're running from the build directory. - L = [] - known_paths = set() - for dir in sys.path: - # Filter out duplicate paths (on case-insensitive file systems also - # if they only differ in case); turn relative paths into absolute - # paths. - dir, dircase = makepath(dir) - if not dircase in known_paths: - L.append(dir) - known_paths.add(dircase) - sys.path[:] = L - return known_paths - - -def _init_pathinfo(): - """Return a set containing all existing directory entries from sys.path""" - d = set() - for dir in sys.path: - try: - if os.path.isdir(dir): - dir, dircase = makepath(dir) - d.add(dircase) - except TypeError: - continue - return d - - -def addpackage(sitedir, name, known_paths): - """Process a .pth file within the site-packages directory: - For each line in the file, either combine it with sitedir to a path - and add that to known_paths, or execute it if it starts with 'import '. - """ - if known_paths is None: - _init_pathinfo() - reset = 1 - else: - reset = 0 - fullname = os.path.join(sitedir, name) - try: - f = open(fullname, "rU") - except IOError: - return - with f: - for n, line in enumerate(f): - if line.startswith("#"): - continue - try: - if line.startswith(("import ", "import\t")): - exec line - continue - line = line.rstrip() - dir, dircase = makepath(sitedir, line) - if not dircase in known_paths and os.path.exists(dir): - sys.path.append(dir) - known_paths.add(dircase) - except Exception as err: - print >>sys.stderr, "Error processing line {:d} of {}:\n".format( - n+1, fullname) - for record in traceback.format_exception(*sys.exc_info()): - for line in record.splitlines(): - print >>sys.stderr, ' '+line - print >>sys.stderr, "\nRemainder of file ignored" - break - if reset: - known_paths = None - return known_paths - - -def addsitedir(sitedir, known_paths=None): - """Add 'sitedir' argument to sys.path if missing and handle .pth files in - 'sitedir'""" - if known_paths is None: - known_paths = _init_pathinfo() - reset = 1 - else: - reset = 0 - sitedir, sitedircase = makepath(sitedir) - if not sitedircase in known_paths: - sys.path.append(sitedir) # Add path component - try: - names = os.listdir(sitedir) - except os.error: - return - dotpth = os.extsep + "pth" - names = [name for name in names if name.endswith(dotpth)] - for name in sorted(names): - addpackage(sitedir, name, known_paths) - if reset: - known_paths = None - return known_paths - - -def check_enableusersite(): - """Check if user site directory is safe for inclusion - - The function tests for the command line flag (including environment var), - process uid/gid equal to effective uid/gid. - - None: Disabled for security reasons - False: Disabled by user (command line option) - True: Safe and enabled - """ - if sys.flags.no_user_site: - return False - - if hasattr(os, "getuid") and hasattr(os, "geteuid"): - # check process uid == effective uid - if os.geteuid() != os.getuid(): - return None - if hasattr(os, "getgid") and hasattr(os, "getegid"): - # check process gid == effective gid - if os.getegid() != os.getgid(): - return None - - return True - -def getuserbase(): - """Returns the `user base` directory path. - - The `user base` directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. - """ - global USER_BASE - if USER_BASE is not None: - return USER_BASE - from sysconfig import get_config_var - USER_BASE = get_config_var('userbase') - return USER_BASE - -def getusersitepackages(): - """Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. - """ - global USER_SITE - user_base = getuserbase() # this will also set USER_BASE - - if USER_SITE is not None: - return USER_SITE - - from sysconfig import get_path - import os - - if sys.platform == 'darwin': - from sysconfig import get_config_var - if get_config_var('PYTHONFRAMEWORK'): - USER_SITE = get_path('purelib', 'osx_framework_user') - return USER_SITE - - USER_SITE = get_path('purelib', '%s_user' % os.name) - return USER_SITE - -def addusersitepackages(known_paths): - """Add a per user site-package to sys.path - - Each user has its own python directory with site-packages in the - home directory. - """ - # get the per user site-package path - # this call will also make sure USER_BASE and USER_SITE are set - user_site = getusersitepackages() - - if ENABLE_USER_SITE and os.path.isdir(user_site): - addsitedir(user_site, known_paths) - return known_paths - -def getsitepackages(): - """Returns a list containing all global site-packages directories - (and possibly site-python). - - For each directory present in the global ``PREFIXES``, this function - will find its `site-packages` subdirectory depending on the system - environment, and will return a list of full paths. - """ - sitepackages = [] - seen = set() - - for prefix in PREFIXES: - if not prefix or prefix in seen: - continue - seen.add(prefix) - - if sys.platform in ('os2emx', 'riscos'): - sitepackages.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitepackages.append(os.path.join(prefix, "lib", - "python" + sys.version[:3], - "site-packages")) - sitepackages.append(os.path.join(prefix, "lib", "site-python")) - else: - sitepackages.append(prefix) - sitepackages.append(os.path.join(prefix, "lib", "site-packages")) - if sys.platform == "darwin": - # for framework builds *only* we add the standard Apple - # locations. - # DISABLED FOR UV-CDAT! - pass - #from sysconfig import get_config_var - #framework = get_config_var("PYTHONFRAMEWORK") - #if framework: - # sitepackages.append( - # os.path.join("/Library", framework, - # sys.version[:3], "site-packages")) - return sitepackages - -def addsitepackages(known_paths): - """Add site-packages (and possibly site-python) to sys.path""" - for sitedir in getsitepackages(): - if os.path.isdir(sitedir): - addsitedir(sitedir, known_paths) - - return known_paths - -def setBEGINLIBPATH(): - """The OS/2 EMX port has optional extension modules that do double duty - as DLLs (and must use the .DLL file extension) for other extensions. - The library search path needs to be amended so these will be found - during module import. Use BEGINLIBPATH so that these are at the start - of the library search path. - - """ - dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") - libpath = os.environ['BEGINLIBPATH'].split(';') - if libpath[-1]: - libpath.append(dllpath) - else: - libpath[-1] = dllpath - os.environ['BEGINLIBPATH'] = ';'.join(libpath) - - -def setquit(): - """Define new builtins 'quit' and 'exit'. - - These are objects which make the interpreter exit when called. - The repr of each object contains a hint at how it works. - - """ - if os.sep == ':': - eof = 'Cmd-Q' - elif os.sep == '\\': - eof = 'Ctrl-Z plus Return' - else: - eof = 'Ctrl-D (i.e. EOF)' - - class Quitter(object): - def __init__(self, name): - self.name = name - def __repr__(self): - return 'Use %s() or %s to exit' % (self.name, eof) - def __call__(self, code=None): - # Shells like IDLE catch the SystemExit, but listen when their - # stdin wrapper is closed. - try: - sys.stdin.close() - except: - pass - raise SystemExit(code) - __builtin__.quit = Quitter('quit') - __builtin__.exit = Quitter('exit') - - -class _Printer(object): - """interactive prompt objects for printing the license text, a list of - contributors and the copyright notice.""" - - MAXLINES = 23 - - def __init__(self, name, data, files=(), dirs=()): - self.__name = name - self.__data = data - self.__files = files - self.__dirs = dirs - self.__lines = None - - def __setup(self): - if self.__lines: - return - data = None - for dir in self.__dirs: - for filename in self.__files: - filename = os.path.join(dir, filename) - try: - fp = file(filename, "rU") - data = fp.read() - fp.close() - break - except IOError: - pass - if data: - break - if not data: - data = self.__data - self.__lines = data.split('\n') - self.__linecnt = len(self.__lines) - - def __repr__(self): - self.__setup() - if len(self.__lines) <= self.MAXLINES: - return "\n".join(self.__lines) - else: - return "Type %s() to see the full %s text" % ((self.__name,)*2) - - def __call__(self): - self.__setup() - prompt = 'Hit Return for more, or q (and Return) to quit: ' - lineno = 0 - while 1: - try: - for i in range(lineno, lineno + self.MAXLINES): - print self.__lines[i] - except IndexError: - break - else: - lineno += self.MAXLINES - key = None - while key is None: - key = raw_input(prompt) - if key not in ('', 'q'): - key = None - if key == 'q': - break - -def setcopyright(): - """Set 'copyright' and 'credits' in __builtin__""" - __builtin__.copyright = _Printer("copyright", sys.copyright) - if sys.platform[:4] == 'java': - __builtin__.credits = _Printer( - "credits", - "Jython is maintained by the Jython developers (www.jython.org).") - else: - __builtin__.credits = _Printer("credits", """\ - Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands - for supporting Python development. See www.python.org for more information.""") - here = os.path.dirname(os.__file__) - __builtin__.license = _Printer( - "license", "See http://www.python.org/%.3s/license.html" % sys.version, - ["LICENSE.txt", "LICENSE"], - [os.path.join(here, os.pardir), here, os.curdir]) - - -class _Helper(object): - """Define the builtin 'help'. - This is a wrapper around pydoc.help (with a twist). - - """ - - def __repr__(self): - return "Type help() for interactive help, " \ - "or help(object) for help about object." - def __call__(self, *args, **kwds): - import pydoc - return pydoc.help(*args, **kwds) - -def sethelper(): - __builtin__.help = _Helper() - -def aliasmbcs(): - """On Windows, some default encodings are not provided by Python, - while they are always available as "mbcs" in each locale. Make - them usable by aliasing to "mbcs" in such a case.""" - if sys.platform == 'win32': - import locale, codecs - enc = locale.getdefaultlocale()[1] - if enc.startswith('cp'): # "cp***" ? - try: - codecs.lookup(enc) - except LookupError: - import encodings - encodings._cache[enc] = encodings._unknown - encodings.aliases.aliases[enc] = 'mbcs' - -def setencoding(): - """Set the string encoding used by the Unicode implementation. The - default is 'ascii', but if you're willing to experiment, you can - change this.""" - encoding = "ascii" # Default value set by _PyUnicode_Init() - if 0: - # Enable to support locale aware default string encodings. - import locale - loc = locale.getdefaultlocale() - if loc[1]: - encoding = loc[1] - if 0: - # Enable to switch off string to Unicode coercion and implicit - # Unicode to string conversion. - encoding = "undefined" - if encoding != "ascii": - # On Non-Unicode builds this will raise an AttributeError... - sys.setdefaultencoding(encoding) # Needs Python Unicode build ! - - -def execsitecustomize(): - """Run custom site specific code, if available.""" - try: - import sitecustomize - except ImportError: - pass - except Exception: - if sys.flags.verbose: - sys.excepthook(*sys.exc_info()) - else: - print >>sys.stderr, \ - "'import sitecustomize' failed; use -v for traceback" - - -def execusercustomize(): - """Run custom user specific code, if available.""" - try: - import usercustomize - except ImportError: - pass - except Exception: - if sys.flags.verbose: - sys.excepthook(*sys.exc_info()) - else: - print>>sys.stderr, \ - "'import usercustomize' failed; use -v for traceback" - - -def main(): - global ENABLE_USER_SITE - - abs__file__() - known_paths = removeduppaths() - if ENABLE_USER_SITE is None: - ENABLE_USER_SITE = check_enableusersite() - known_paths = addusersitepackages(known_paths) - known_paths = addsitepackages(known_paths) - if sys.platform == 'os2emx': - setBEGINLIBPATH() - setquit() - setcopyright() - sethelper() - aliasmbcs() - setencoding() - execsitecustomize() - if ENABLE_USER_SITE: - execusercustomize() - # Remove sys.setdefaultencoding() so that users cannot change the - # encoding after initialization. The test for presence is needed when - # this module is run as a script, because this code is executed twice. - if hasattr(sys, "setdefaultencoding"): - del sys.setdefaultencoding - -main() - -def _script(): - help = """\ - %s [--user-base] [--user-site] - - Without arguments print some useful information - With arguments print the value of USER_BASE and/or USER_SITE separated - by '%s'. - - Exit codes with --user-base or --user-site: - 0 - user site directory is enabled - 1 - user site directory is disabled by user - 2 - uses site directory is disabled by super user - or for security reasons - >2 - unknown error - """ - args = sys.argv[1:] - if not args: - print "sys.path = [" - for dir in sys.path: - print " %r," % (dir,) - print "]" - print "USER_BASE: %r (%s)" % (USER_BASE, - "exists" if os.path.isdir(USER_BASE) else "doesn't exist") - print "USER_SITE: %r (%s)" % (USER_SITE, - "exists" if os.path.isdir(USER_SITE) else "doesn't exist") - print "ENABLE_USER_SITE: %r" % ENABLE_USER_SITE - sys.exit(0) - - buffer = [] - if '--user-base' in args: - buffer.append(USER_BASE) - if '--user-site' in args: - buffer.append(USER_SITE) - - if buffer: - print os.pathsep.join(buffer) - if ENABLE_USER_SITE: - sys.exit(0) - elif ENABLE_USER_SITE is False: - sys.exit(1) - elif ENABLE_USER_SITE is None: - sys.exit(2) - else: - sys.exit(3) - else: - import textwrap - print textwrap.dedent(help % (sys.argv[0], os.pathsep)) - sys.exit(10) - -if __name__ == '__main__': - _script() diff --git a/pysrc/tcl.sh b/pysrc/tcl.sh deleted file mode 100755 index d8c7fbf6c2..0000000000 --- a/pysrc/tcl.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -. ./prolog.sh -# tcl -cd tcl8* -cd unix -if (test "${OS}" = "Darwin") then # MacIntosh OSX - ./configure --prefix=${prefix}/Externals -else - ./configure --disable-shared --prefix=${prefix}/Externals -fi - -if (test $? -ne 0) then - echo "tcl configuration failed."; - exit 1; -fi -make -if (test $? -ne 0) then - echo "tcl make failed."; - exit 1; -fi -make install -if (test $? -ne 0) then - echo "tcl install failed."; - exit 1; -fi diff --git a/pysrc/tk.sh b/pysrc/tk.sh deleted file mode 100755 index 8878bbe754..0000000000 --- a/pysrc/tk.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -. ./prolog.sh -cd tk8* -cd unix -if (test "${OS}" = "Darwin") then # MacIntosh OSX - ./configure --prefix=${prefix}/Externals -else - ./configure --disable-shared --prefix=${prefix}/Externals -fi - -if (test $? -ne 0) then - echo "tk configuration failed."; - exit 1; -fi -make -if (test $? -ne 0) then - echo "tk make failed."; - exit 1; -fi -make install -if (test $? -ne 0) then - echo "tk installation failed."; - exit 1; -fi diff --git a/pysrc/zlib.sh b/pysrc/zlib.sh deleted file mode 100755 index c2497bc94f..0000000000 --- a/pysrc/zlib.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -. ./prolog.sh $* -cd zlib-* -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -./configure --prefix=${prefix}/Externals -if (test $? -ne 0) then - echo "zlib configuration failed."; - exit 1; -fi -make -if (test $? -ne 0) then - echo "zlib make failed."; - exit 1; -fi -make install -if (test $? -ne 0) then - echo "zlib installation failed."; - exit 1; -fi diff --git a/resources/uvcdat.icns b/resources/uvcdat.icns deleted file mode 100644 index 3d1efc0aa5..0000000000 Binary files a/resources/uvcdat.icns and /dev/null differ diff --git a/resources/uvcdat.jpg b/resources/uvcdat.jpg deleted file mode 100644 index 16835ba2c6..0000000000 Binary files a/resources/uvcdat.jpg and /dev/null differ diff --git a/scripts/clean_script b/scripts/clean_script deleted file mode 100755 index ebf3bfa963..0000000000 --- a/scripts/clean_script +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -if ( test "$1" = "all" ) then - (cd pysrc >/dev/null ; ./clean_script) - (cd exsrc >/dev/null; ./clean_script) - (cd logs >/dev/null ; /bin/rm *.LOG >/dev/null 2>&1) -fi - -(cd libcdms;./clean_script >cdms.LOG 2>&1; /bin/rm -f cdms.LOG rebuild.py rebuild.LOG >/dev/null 2>&1) -(cd esg; /bin/rm -fr build *.LOG rebuild.py *.log >/dev/null 2>&1) -(cd Packages; /bin/rm -fr vcs/cdatwrap */build */rebuild.py */*.LOG */Test/Good */*.log >/dev/null 2>&1) -(cd Packages/visus/src/pyvisus ; /bin/rm -rf build >/dev/null 2>&1) -(cd contrib;/bin/rm -fr */build */*.o */*.a */*.pyc */Src/*.o */Src/*.a */rebuild.py */*.LOG */Test/Good */*.log >/dev/null 2>&1) -/bin/rm -fr build *.pyc installation/build installation/cdat_info.* *~ rebuild.py >/dev/null 2>&1 -find . -name 'config.cache' -print -exec rm {} \; diff --git a/scripts/get_git_version.sh b/scripts/get_git_version.sh deleted file mode 100755 index 7d27fa7fe1..0000000000 --- a/scripts/get_git_version.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env sh - -if [ "X"${CC} = "X" ] ; then - gcc show_git.c -o a.out ; ./a.out ; rm a.out -else - ${CC} show_git.c -o a.out ; ./a.out ; rm a.out -fi diff --git a/scripts/git_hooks/commit-msg b/scripts/git_hooks/commit-msg deleted file mode 100755 index 672bfaae2f..0000000000 --- a/scripts/git_hooks/commit-msg +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -# placeholder for custom commit-msg hooks diff --git a/scripts/git_hooks/pre-commit b/scripts/git_hooks/pre-commit deleted file mode 100755 index e7b50ac4ae..0000000000 --- a/scripts/git_hooks/pre-commit +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -# Reject commits directly to 'master' to encourage use of topic branches. -if test -z "$HOOKS_ALLOW_COMMIT_MASTER"; then - if git symbolic-ref HEAD | egrep -q '^refs/heads/master$'; then - echo 'Please do not commit directly to "master". Create a topic instead: - - git checkout -b my-topic - git commit -' - exit 1 - fi -fi diff --git a/scripts/git_hooks/pre-push b/scripts/git_hooks/pre-push deleted file mode 100755 index 424f890f9a..0000000000 --- a/scripts/git_hooks/pre-push +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -# Reject pushes directly to 'master' to encourage use of topic branches. -if test -z "$HOOKS_ALLOW_PUSH_MASTER"; then - while IFS=' ' read local_ref local_sha1 remote_ref remote_sha1; do - if test "x$remote_ref" = "xrefs/heads/master"; then - echo 'Please do not push directly to "master". Push to a topic instead: - - git push '"$1"' '"$local_ref"':my-topic -' - exit 1 - fi - done -fi diff --git a/scripts/git_hooks/prepare-commit-msg b/scripts/git_hooks/prepare-commit-msg deleted file mode 100755 index 1571a7d203..0000000000 --- a/scripts/git_hooks/prepare-commit-msg +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -# placeholder for custom prepare-commit-msg hooks diff --git a/scripts/git_setup/.gitattributes b/scripts/git_setup/.gitattributes deleted file mode 100644 index 3323f94b9b..0000000000 --- a/scripts/git_setup/.gitattributes +++ /dev/null @@ -1,9 +0,0 @@ -.git* export-ignore - -# Exclude from source archives files specific to Git work tree. -* export-ignore - -config* eol=lf whitespace=indent-with-non-tab -git-* eol=lf whitespace=indent-with-non-tab -tips eol=lf whitespace=indent-with-non-tab -setup-* eol=lf whitespace=indent-with-non-tab diff --git a/scripts/git_setup/LICENSE b/scripts/git_setup/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/scripts/git_setup/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/scripts/git_setup/NOTICE b/scripts/git_setup/NOTICE deleted file mode 100644 index 0d32c02eb6..0000000000 --- a/scripts/git_setup/NOTICE +++ /dev/null @@ -1,5 +0,0 @@ -Kitware Local Git Setup Scripts -Copyright 2010-2012 Kitware, Inc. - -This product includes software developed at Kitware, Inc. -(http://www.kitware.com/). diff --git a/scripts/git_setup/README b/scripts/git_setup/README deleted file mode 100644 index cf468fb68b..0000000000 --- a/scripts/git_setup/README +++ /dev/null @@ -1,80 +0,0 @@ -Kitware Local Git Setup Scripts - - -Introduction ------------- - -This is a collection of local Git development setup scripts meant for -inclusion in project source trees to aid their development workflow. -Project-specific information needed by the scripts may be configured -in a "config" file added next to them in the project. - - -Import ------- - -A project may import these scripts into their source tree by -initializing a subtree merge. Bring up a Git prompt and set the -current working directory inside a clone of the target project. -Fetch the "setup" branch from the GitSetup repository: - - $ git fetch ../GitSetup setup:setup - -Prepare to merge the branch but place the content in a subdirectory. -Any prefix (with trailing '/') may be chosen so long as it is used -consistently within a project through the rest of these instructions: - - $ git merge -s ours --no-commit setup - $ git read-tree -u --prefix=Utilities/GitSetup/ setup - -Commit the merge with an informative message: - - $ git commit - ------------------------------------------------------------------------ - Merge branch 'setup' - - Add Utilities/GitSetup/ directory using subtree merge from - the general GitSetup repository "setup" branch. - ------------------------------------------------------------------------ - - -Configuration -------------- - -Read the "Project configuration instructions" comment in each script. -Add a "config" file next to the scripts with desired configuration -(optionally copy and modify "config.sample"). For example, to -configure the "setup-hooks" script: - - $ git config -f Utilities/GitSetup/config hooks.url "$url" - -where "$url" is the project repository publishing the "hooks" branch. -When finished, add and commit the configuration file: - - $ git add Utilities/GitSetup/config - $ git commit - - -Update ------- - -A project may update these scripts from the GitSetup repository. -Bring up a Git prompt and set the current working directory inside a -clone of the target project. Fetch the "setup" branch from the -GitSetup repository: - - $ git fetch ../GitSetup setup:setup - -Merge the "setup" branch into the subtree: - - $ git merge -X subtree=Utilities/GitSetup setup - -where "Utilities/GitSetup" is the same prefix used during the import -setup, but without a trailing '/'. - - -License -------- - -Distributed under the Apache License 2.0. -See LICENSE and NOTICE for details. diff --git a/scripts/git_setup/config b/scripts/git_setup/config deleted file mode 100644 index 6fd06e8cc0..0000000000 --- a/scripts/git_setup/config +++ /dev/null @@ -1,2 +0,0 @@ -[hooks] - url = https://github.com/UV-CDAT/uvcdat diff --git a/scripts/git_setup/config.sample b/scripts/git_setup/config.sample deleted file mode 100644 index bba2382c3c..0000000000 --- a/scripts/git_setup/config.sample +++ /dev/null @@ -1,22 +0,0 @@ -# Kitware Local Git Setup Scripts - Sample Project Configuration -# -# Copy to "config" and edit as necessary. - -[hooks] - url = http://public.kitware.com/GitSetup.git - #branch = hooks - -[ssh] - host = public.kitware.com - key = id_git_public - request-url = https://www.kitware.com/Admin/SendPassword.cgi - -[stage] - #url = git://public.kitware.com/stage/Project.git - #pushurl = git@public.kitware.com:stage/Project.git - -[gerrit] - #project = Project - site = http://review.source.kitware.com - # pushurl placeholder "$username" is literal - pushurl = $username@review.source.kitware.com:Project diff --git a/scripts/git_setup/git-gerrit-push b/scripts/git_setup/git-gerrit-push deleted file mode 100755 index 2471490c25..0000000000 --- a/scripts/git_setup/git-gerrit-push +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -USAGE="[] [--no-topic] [--dry-run] [--]" -OPTIONS_SPEC= -SUBDIRECTORY_OK=Yes -. "$(git --exec-path)/git-sh-setup" - -#----------------------------------------------------------------------------- - -remote='' -refspecs='' -no_topic='' -dry_run='' - -# Parse the command line options. -while test $# != 0; do - case "$1" in - --no-topic) no_topic=1 ;; - --dry-run) dry_run=--dry-run ;; - --) shift; break ;; - -*) usage ;; - *) test -z "$remote" || usage ; remote="$1" ;; - esac - shift -done -test $# = 0 || usage - -# Default remote. -test -n "$remote" || remote="gerrit" - -if test -z "$no_topic"; then - # Identify and validate the topic branch name. - topic="$(git symbolic-ref HEAD | sed -e 's|^refs/heads/||')" - if test "$topic" = "master"; then - die 'Please name your topic: - git checkout -b descriptive-name' - fi - refspecs="HEAD:refs/for/master/$topic" -fi - -# Exit early if we have nothing to push. -if test -z "$refspecs"; then - echo "Nothing to push!" - exit 0 -fi - -# Fetch the current upstream master branch head. -# This helps the computation of a minimal pack to push. -echo "Fetching $remote master" -fetch_out=$(git fetch "$remote" master 2>&1) || die "$fetch_out" - -# Push. Save output and exit code. -echo "Pushing to $remote" -push_stdout=$(git push --porcelain $dry_run "$remote" $refspecs); push_exit=$? -echo "$push_stdout" - -# Reproduce the push exit code. -exit $push_exit diff --git a/scripts/git_setup/setup-gerrit b/scripts/git_setup/setup-gerrit deleted file mode 100755 index 6d46e3ccf5..0000000000 --- a/scripts/git_setup/setup-gerrit +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to set up the local Git repository to push to -# a Gerrit Code Review instance for this project. - -# Project configuration instructions: -# -# - Run a Gerrit Code Review server -# -# - Populate adjacent "config" file with: -# gerrit.site = Top Gerrit URL (not project-specific) -# gerrit.project = Name of project in Gerrit -# gerrit.pushurl = Review site push URL with "$username" placeholder -# gerrit.remote = Gerrit remote name, if not "gerrit" -# gerrit.url = Gerrit project URL, if not "$site/p/$project" -# optionally with "$username" placeholder - -die() { - echo 1>&2 "$@" ; exit 1 -} - -# Make sure we are inside the repository. -cd "${BASH_SOURCE%/*}" && - -# Load the project configuration. -site=$(git config -f config --get gerrit.site) && -project=$(git config -f config --get gerrit.project) && -remote=$(git config -f config --get gerrit.remote || - echo "gerrit") && -fetchurl_=$(git config -f config --get gerrit.url || - echo "$site/p/$project") && -pushurl_=$(git config -f config --get gerrit.pushurl || - git config -f config --get gerrit.url) || -die 'This project is not configured to use Gerrit.' - -# Get current gerrit push URL. -pushurl=$(git config --get remote."$remote".pushurl || - git config --get remote."$remote".url || echo '') && - -# Tell user about current configuration. -if test -n "$pushurl"; then - echo 'Remote "'"$remote"'" is currently configured to push to - - '"$pushurl"' -' && - read -ep 'Reconfigure Gerrit? [y/N]: ' ans && - if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then - setup=1 - else - setup='' - fi -else - echo 'Remote "'"$remote"'" is not yet configured. - -'"$project"' changes must be pushed to our Gerrit Code Review site: - - '"$site/p/$project"' - -Register a Gerrit account and select a username (used below). -You will need an OpenID: - - http://openid.net/get-an-openid/ -' && - read -ep 'Configure Gerrit? [Y/n]: ' ans && - if [ "$ans" == "n" ] || [ "$ans" == "N" ]; then - exit 0 - else - setup=1 - fi -fi && - -# Perform setup if necessary. -if test -n "$setup"; then - echo 'Sign-in to Gerrit to get/set your username at - - '"$site"'/#/settings - -Add your SSH public keys at - - '"$site"'/#/settings/ssh-keys -' && - read -ep "Gerrit username? [$USER]: " gu && - if test -z "$gu"; then - gu="$USER" - fi && - fetchurl="${fetchurl_/\$username/$gu}" && - if test -z "$pushurl"; then - git remote add "$remote" "$fetchurl" - else - git config remote."$remote".url "$fetchurl" - fi && - pushurl="${pushurl_/\$username/$gu}" && - if test "$pushurl" != "$fetchurl"; then - git config remote."$remote".pushurl "$pushurl" - fi && - echo 'Remote "'"$remote"'" is now configured to push to - - '"$pushurl"' -' -fi && - -# Optionally test Gerrit access. -if test -n "$pushurl"; then - read -ep 'Test access to Gerrit (SSH)? [y/N]: ' ans && - if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then - echo -n 'Testing Gerrit access by SSH...' - if git ls-remote --heads "$pushurl" >/dev/null; then - echo 'passed.' - else - echo 'failed.' && - die 'Could not access Gerrit. Add your SSH public keys at - - '"$site"'/#/settings/ssh-keys -' - fi - fi -fi && - -# Set up GerritId hook. -hook=$(git config --get hooks.GerritId || echo '') && -if test -z "$hook"; then - echo ' -Enabling GerritId hook to add a "Change-Id" footer to commit -messages for interaction with Gerrit. Run - - git config hooks.GerritId false - -to disable this feature (but you will be on your own).' && - git config hooks.GerritId true -else - echo 'GerritId hook already configured to "'"$hook"'".' -fi diff --git a/scripts/git_setup/setup-hooks b/scripts/git_setup/setup-hooks deleted file mode 100755 index c07985ae56..0000000000 --- a/scripts/git_setup/setup-hooks +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to set up local Git hooks for this project. - -# Project configuration instructions: -# -# - Publish a "hooks" branch in the project repository such that -# clones will have "refs/remotes/origin/hooks". -# -# - Populate adjacent "config" file with: -# hooks.url = Repository URL publishing "hooks" branch -# hooks.branch = Repository branch instead of "hooks" - -egrep-q() { - egrep "$@" >/dev/null 2>/dev/null -} - -die() { - echo 1>&2 "$@" ; exit 1 -} - -# Make sure we are inside the repository. -cd "${BASH_SOURCE%/*}" && - -# Select a hooks branch. -if url=$(git config --get hooks.url); then - # Fetch hooks from locally configured repository. - branch=$(git config hooks.branch || echo hooks) -elif git for-each-ref refs/remotes/origin/hooks 2>/dev/null | - egrep-q 'refs/remotes/origin/hooks$'; then - # Use hooks cloned from origin. - url=.. && branch=remotes/origin/hooks -elif url=$(git config -f config --get hooks.url); then - # Fetch hooks from project-configured repository. - branch=$(git config -f config hooks.branch || echo hooks) -else - die 'This project is not configured to install local hooks.' -fi && - -# Populate ".git/hooks". -echo 'Setting up git hooks...' && -git_dir=$(git rev-parse --git-dir) && -cd "$git_dir/hooks" && -if ! test -e .git; then - git init -q || die 'Could not run git init for hooks.' -fi && -git fetch -q "$url" "$branch" && -git reset -q --hard FETCH_HEAD || die 'Failed to install hooks' diff --git a/scripts/git_setup/setup-ssh b/scripts/git_setup/setup-ssh deleted file mode 100755 index 8920a5bd33..0000000000 --- a/scripts/git_setup/setup-ssh +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to set up ssh push access to the repository host. - -# Project configuration instructions: -# -# - Populate adjacent "config" file with: -# ssh.host = Repository host name -# ssh.user = Username on host, if not "git" -# ssh.key = Local ssh key name -# ssh.request-url = Web page URL to request ssh access - -egrep-q() { - egrep "$@" >/dev/null 2>/dev/null -} - -die() { - echo 1>&2 "$@" ; exit 1 -} - -# Make sure we are inside the repository. -cd "${BASH_SOURCE%/*}" && - -# Load the project configuration. -host=$(git config -f config --get ssh.host) && -user=$(git config -f config --get ssh.user || echo git) && -key=$(git config -f config --get ssh.key) && -request_url=$(git config -f config --get ssh.request-url) || -die 'This project is not configured for ssh push access.' - -# Check for existing configuration. -if test -r ~/.ssh/config && - egrep-q 'Host[= ]'"${host//\./\\.}" ~/.ssh/config; then - echo 'Host "'"$host"'" is already in ~/.ssh/config' && - setup= && - question='Test' -else - echo 'Host "'"$host"'" not found in ~/.ssh/config' && - setup=1 && - question='Setup and test' -fi && - -# Ask the user whether to make changes. -echo '' && -read -ep "${question} push access by ssh to $user@$host? [y/N]: " access && -if test "$access" != "y" -a "$access" != "Y"; then - exit 0 -fi && - -# Setup host configuration if necessary. -if test -n "$setup"; then - if ! test -d ~/.ssh; then - mkdir -p ~/.ssh && - chmod 700 ~/.ssh - fi && - if ! test -f ~/.ssh/config; then - touch ~/.ssh/config && - chmod 600 ~/.ssh/config - fi && - ssh_config='Host='"$host"' - IdentityFile ~/.ssh/'"$key" && - echo "Adding to ~/.ssh/config: - -$ssh_config -" && - echo "$ssh_config" >> ~/.ssh/config && - if ! test -e ~/.ssh/"$key"; then - if test -f ~/.ssh/id_rsa; then - # Take care of the common case. - ln -s id_rsa ~/.ssh/"$key" - echo ' -Assuming ~/.ssh/id_rsa is the private key corresponding to the public key for - - '"$user@$host"' - -If this is incorrect place private key at "~/.ssh/'"$key"'".' - else - echo ' -Place the private key corresponding to the public key registered for - - '"$user@$host"' - -at "~/.ssh/'"$key"'".' - fi - read -e -n 1 -p 'Press any key to continue...' - fi -fi || exit 1 - -# Test access configuration. -echo 'Testing ssh push access to "'"$user@$host"'"...' && -if ! ssh "$user@$host" info; then - die 'No ssh push access to "'"$user@$host"'". You may need to request access at - - '"$request_url"' -' -fi diff --git a/scripts/git_setup/setup-stage b/scripts/git_setup/setup-stage deleted file mode 100755 index ce6ec45748..0000000000 --- a/scripts/git_setup/setup-stage +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to set up the topic stage for pushing changes. - -# Project configuration instructions: -# -# - Run a Topic Stage repository next to the main project repository. -# -# - Populate adjacent "config" file with: -# stage.url = Topic Stage repository URL -# stage.pushurl = Topic Stage push URL if not "$url" - -egrep-q() { - egrep "$@" >/dev/null 2>/dev/null -} - -die() { - echo 1>&2 "$@" ; exit 1 -} - -# Make sure we are inside the repository. -cd "${BASH_SOURCE%/*}" && - -# Load the project configuration. -fetchurl_=$(git config -f config --get stage.url) && -pushurl_=$(git config -f config --get stage.pushurl || echo "$fetchurl_") && -remote=$(git config -f config --get stage.remote || echo 'stage') || -die 'This project is not configured to use a topic stage.' - -# Get current stage push URL. -pushurl=$(git config --get remote."$remote".pushurl || - git config --get remote."$remote".url || echo '') && - -# Tell user about current configuration. -if test -n "$pushurl"; then - echo 'Remote "'"$remote"'" is currently configured to push to - - '"$pushurl"' -' && - read -ep 'Reconfigure Topic Stage? [y/N]: ' ans && - if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then - setup=1 - else - setup='' - fi -else - setup=1 -fi - -# Perform setup if necessary. -if test -n "$setup"; then - echo 'Setting up the topic stage...' && - fetchurl="${fetchurl_}" && - if test -z "$pushurl"; then - git remote add "$remote" "$fetchurl" - else - git config remote."$remote".url "$fetchurl" - fi && - pushurl="${pushurl_}" && - if test "$pushurl" != "$fetchurl"; then - git config remote."$remote".pushurl "$pushurl" - fi && - echo 'Remote "'"$remote"'" is now configured to push to - - '"$pushurl"' -' -fi || die 'Could not configure the topic stage remote.' diff --git a/scripts/git_setup/setup-user b/scripts/git_setup/setup-user deleted file mode 100755 index 1af439c45e..0000000000 --- a/scripts/git_setup/setup-user +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to configure Git user info in this repository. - -# Project configuration instructions: NONE - -for (( ; ; )); do - user_name=$(git config user.name || echo '') && - user_email=$(git config user.email || echo '') && - if test -n "$user_name" -a -n "$user_email"; then - echo 'Your commits will record as Author: - - '"$user_name <$user_email>"' -' && - read -ep 'Is the author name and email address above correct? [Y/n] ' correct && - if test "$correct" != "n" -a "$correct" != "N"; then - break - fi - fi && - read -ep 'Enter your full name e.g. "John Doe": ' name && - read -ep 'Enter your email address e.g. "john@gmail.com": ' email && - git config user.name "$name" && - git config user.email "$email" -done diff --git a/scripts/git_setup/setup_aliases.sh b/scripts/git_setup/setup_aliases.sh deleted file mode 100755 index 9771708161..0000000000 --- a/scripts/git_setup/setup_aliases.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -echo "Setting up useful Git aliases..." && - -# General aliases that could be global -git config alias.prepush 'log --graph --stat origin/master..' && - -true diff --git a/scripts/git_setup/tips b/scripts/git_setup/tips deleted file mode 100755 index 784e1ed890..0000000000 --- a/scripts/git_setup/tips +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# This script makes optional suggestions for working with Git. - -# Project configuration instructions: NONE - -egrep-q() { - egrep "$@" >/dev/null 2>/dev/null -} - -# Suggest color configuration. -if test -z "$(git config --get color.ui)"; then - echo ' -One may enable color output from Git commands with - - git config --global color.ui auto -' -fi - -# Suggest bash completion. -if ! bash -i -c 'echo $PS1' | egrep-q '__git_ps1'; then - echo ' -A dynamic, informative Git shell prompt can be obtained by sourcing -the git bash-completion script in your "~/.bashrc". Set the PS1 -environmental variable as suggested in the comments at the top of the -bash-completion script. You may need to install the bash-completion -package from your distribution to obtain it. -' -fi - -# Suggest merge tool. -if test -z "$(git config --get merge.tool)"; then - echo ' -One may configure Git to load a merge tool with - - git config merge.tool - -See "git help mergetool" for more information. -' -fi diff --git a/scripts/last_update_time.py b/scripts/last_update_time.py deleted file mode 100644 index a0bd0aed4c..0000000000 --- a/scripts/last_update_time.py +++ /dev/null @@ -1,12 +0,0 @@ -import os - -ln=os.popen('p4 changes -m 1 //depot/main/...').readlines() - - -for l in ln: - sp=l.split() - date='_'.join(sp[3].split('/')) - date=sp[3] - print 'Last change on:',date, 'for more info run: p4 changes -m 1 //depot/main/...' - - diff --git a/scripts/nightly.sh b/scripts/nightly.sh deleted file mode 100755 index 29e23fa357..0000000000 --- a/scripts/nightly.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - - -## script to build automatically (UV-)CDAT - -INSTALL_PATH=$1 -GIT_BRANCH=$2 -GIT_PATH=$3 -QMAKE_EXE=$4 -XTRA_ARGS=$5 - -here=`pwd` - -: ${INSTALL_PATH:="/lgm/uvcdat/nightly"} -: ${GIT_BRANCH:="next"} -: ${GIT_PATH:="/git/uv-cdat"} -: ${QMAKE_EXE:="/usr/bin/qmake"} -#: ${XTRA_ARGS:="-DCDAT_USE_LIBXML2=ON -DCDAT_USE_SYSTEM_PNG=ON"} - -echo "XTRA_ARGS:"${XTRA_ARGS} - -cd ${GIT_PATH} ; \ -git checkout ${GIT_BRANCH} ; \ -git pull ; \ -/bin/rm -rf ${INSTALL_PATH} ; \ -cd ${here} ; \ -rm -rf build_nightly ;\ -mkdir build_nightly ;\ -cd build_nightly ;\ -cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} -DQT_QMAKE_EXECUTABLE=${QMAKE_EXE} ${XTRA_ARGS} ${GIT_PATH} ; \ -cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} -DQT_QMAKE_EXECUTABLE=${QMAKE_EXE} ${XTRA_ARGS} ${GIT_PATH} ; \ -pwd ; \ -make -j16 ; \ - - diff --git a/scripts/setup_for_development.sh b/scripts/setup_for_development.sh deleted file mode 100755 index 190f39a816..0000000000 --- a/scripts/setup_for_development.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -cd "${BASH_SOURCE%/*}/.." && -scripts/git_setup/setup-user && echo && -scripts/git_setup/setup-hooks && echo && -scripts/git_setup/setup_aliases.sh && echo && -scripts/git_setup/tips - -# Rebase master by default -git config branch.master.rebase true - -# Configure remote push URL. -if url="$(git config --get remote.origin.url)" && - echo "$url" | egrep -q '^(https?|git)://github.com/UV-CDAT/uvcdat(\.git)?$' && - ! pushurl="$(git config --get remote.origin.pushurl)"; then - pushurl='git@github.com:UV-CDAT/uvcdat.git' - echo 'Setting origin pushurl to '"$pushurl" - git config remote.origin.pushurl "$pushurl" -fi diff --git a/scripts/tarballit.sh b/scripts/tarballit.sh deleted file mode 100755 index 1217260c6e..0000000000 --- a/scripts/tarballit.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env sh - -svn export http://www-pcmdi.llnl.gov/svn/repository/cdat/trunk $1 -tar czvf $1-everything.tar.gz $1 -tar czvf $1-cdat.tar.gz --exclude $1/pysrc* --exclude $1/exsrc* $1 -tar czvf $1-pysrc.tar.gz $1/pysrc -tar czvf $1-exsrc.tar.gz $1/exsrc diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt index 2294881725..f890a8e22f 100644 --- a/testing/CMakeLists.txt +++ b/testing/CMakeLists.txt @@ -1,5 +1,5 @@ -# Disabling GUI tests as they don't work -#add_subdirectory(uvcdat) +set(PYTHON_EXECUTABLE python) +set(CDAT_DOWNLOAD_SAMPLE_DATA ON) # Helper macro that sets the environment correctly macro (cdat_add_test name) @@ -12,8 +12,10 @@ macro (cdat_add_test name) endif() endif() - add_test(${name} "${CMAKE_INSTALL_PREFIX}/bin/runtest" - ${ARGS}) + add_test(${name} ${cdat_CMAKE_BINARY_DIR}/runtest ${ARGS}) + if ( NOT (${name} STREQUAL download_sample_data )) + set_tests_properties(${name} PROPERTIES DEPENDS download_sample_data) + endif() if(DEFINED ENV{UVCDAT_ANONYMOUS_LOG}) set_tests_properties (${name} @@ -25,48 +27,20 @@ macro (cdat_add_test name) ) endif() endmacro() -if (CDAT_BUILD_GRAPHICS) - add_subdirectory(regrid) - add_subdirectory(vcs) - add_subdirectory(vcsaddons) - add_subdirectory(dv3d) -endif() + +#separate_arguments(DOWNLOAD_ARGS) +# make sure data is downloaded +cdat_add_test(download_sample_data + vcs_download_sample_data + ) + +add_subdirectory(regrid) +add_subdirectory(vcs) +add_subdirectory(vcsaddons) +add_subdirectory(dv3d) add_subdirectory(cdutil) add_subdirectory(Thermo) add_subdirectory(unidata) add_subdirectory(cdms2) add_subdirectory(xmgrace) -if (CDAT_BUILD_OCGIS) - add_subdirectory(ocgis) -endif() -if (CDAT_BUILD_UVCMETRICSPKG) - add_subdirectory(metrics) -endif() - -# Disabling ParaView tests -#if (CDAT_BUILD_PARAVIEW) -# add_subdirectory(paraview) -#endif() - -# Test RPY2 -if (CDAT_BUILD_RPY2) - add_subdirectory(rpy2) -endif() - -# Test Matplotlib -if (CDAT_BUILD_MATPLOTLIB) - add_subdirectory(matplotlib) -endif() - -# PCMDI Tools -if (CDAT_BUILD_PCMDI) - add_subdirectory(pcmdi) -endif() - -# CMake module tests: -# Test that out-of-source build detection is working: -add_test(cmake_checkBuildOutOfSource - "${CMAKE_COMMAND}" - -DTEST_check_build_out_of_source=ON - -P "${cdat_SOURCE_DIR}/CMake/cmake_modules/CheckBuildOutOfSource.cmake" -) +#add_subdirectory(pcmdi) diff --git a/testing/Thermo/CMakeLists.txt b/testing/Thermo/CMakeLists.txt index bae57cea12..c855dc9534 100644 --- a/testing/Thermo/CMakeLists.txt +++ b/testing/Thermo/CMakeLists.txt @@ -1,5 +1,5 @@ -add_test(flake8_Thermo - "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/Thermo/Lib/" +cdat_add_test(flake8_Thermo + flake8 "${cdat_SOURCE_DIR}/Packages/Thermo/Lib/" --show-source # Show context for detected errors --statistics # Show summary of errors at end of output --max-line-length=120 # Reasonable line length diff --git a/testing/regrid/testDistSrc.py b/testing/regrid/testDistSrc.py index 183efd4982..9da231af57 100644 --- a/testing/regrid/testDistSrc.py +++ b/testing/regrid/testDistSrc.py @@ -14,7 +14,7 @@ import unittest import ESMP from regrid2 import esmf -import scipy.io.netcdf +#import scipy.io.netcdf from regrid2 import ESMFRegrid import sys HAS_MPI = False @@ -75,7 +75,7 @@ def setUp(self): def Xtest0_ESMP(self): - import scipy.io.netcdf + #import scipy.io.netcdf # # 1. input @@ -85,7 +85,8 @@ def Xtest0_ESMP(self): inFile = cdat_info.get_sampledata_path() + \ '/so_Omon_ACCESS1-0_historical_r1i1p1_185001-185412_2timesteps.nc' - srcF = scipy.io.netcdf.netcdf_file(inFile) + #srcF = scipy.io.netcdf.netcdf_file(inFile) + srcF = cdms2.open(inFile) #so = srcF.variables['so'][0, 0,...] missing_value = 1.e20 srcGrd = [srcF.variables['lat'][:], srcF.variables['lon'][:]] diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index cc82bf0479..a3b3b155e0 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -1,10 +1,11 @@ set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs") -add_test(flake8_vcs - "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/vcs/vcs/" +cdat_add_test(flake8_vcs + flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/" --show-source # Show context for detected errors --statistics # Show summary of errors at end of output --max-line-length=120 # Reasonable line length + --ignore=F999,E121,E123,E126,E226,E24,E704 # recent version show zillions of errors if object come from an import * line ) cdat_add_test(test_vcs_bad_png_path diff --git a/testing/vcs/test_vcs_line_patterns.py b/testing/vcs/test_vcs_line_patterns.py index 7597403fc1..848ebb110f 100755 --- a/testing/vcs/test_vcs_line_patterns.py +++ b/testing/vcs/test_vcs_line_patterns.py @@ -8,8 +8,6 @@ pth = os.path.join(os.path.dirname(__file__), "..") sys.path.append(pth) -import checkimage - x = regression.init(bg=1, geometry=(1620, 1080)) f = cdms2.open(vcs.sample_data + "/clt.nc") diff --git a/testing/vcs/test_vcs_matplotlib_colormap.py b/testing/vcs/test_vcs_matplotlib_colormap.py index baf0e1bb07..b7ba251f5b 100644 --- a/testing/vcs/test_vcs_matplotlib_colormap.py +++ b/testing/vcs/test_vcs_matplotlib_colormap.py @@ -1,4 +1,9 @@ import os, sys, cdms2, vcs, testing.regression as regression +import matplotlib +sp = matplotlib.__version__.split(".") +if int(sp[0])*10+int(sp[1])<15: + # This only works with matplotlib 1.5 and greater + sys.exit() # Load the clt data: dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc")) @@ -10,4 +15,4 @@ canvas = regression.init() canvas.setcolormap(vcs.matplotlib2vcs("viridis")) canvas.plot(clt, bg=1) -regression.run(canvas, "test_matplotlib_colormap.png") \ No newline at end of file +regression.run(canvas, "test_matplotlib_colormap.png") diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row.py b/testing/vcsaddons/test_12_plot_one_leg_per_row.py index 0c02985277..9e8f25ab9f 100644 --- a/testing/vcsaddons/test_12_plot_one_leg_per_row.py +++ b/testing/vcsaddons/test_12_plot_one_leg_per_row.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x = vcs.init() diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py index b57b767245..8daf50456f 100644 --- a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py +++ b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py @@ -1,7 +1,8 @@ import os, sys, testing.regression as regression -import EzTemplate,vcs -import cdms,EzTemplate,vcs,sys +import vcs +from vcsaddons import EzTemplate +import cdms,vcs,sys ## 12 plots 1 legend per row on the right ## Initialize VCS x = vcs.init() diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py b/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py index 6a9d50284d..141d94e09b 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py @@ -1,5 +1,5 @@ import vcs -import EzTemplate +form vcsaddons import EzTemplate M=EzTemplate.Multi(rows=2,columns=2) diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py index d46e7b9acd..e941fba161 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x = vcs.init() diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py index 73b7c8dbf7..60d3541273 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x = vcs.init() diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py index 6e9398fe80..2e9be25214 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x=vcs.init() x.drawlogooff() diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py index 5d4cd293b9..3b60a8d07a 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x = vcs.init() x.drawlogooff() diff --git a/testing/xmgrace/CMakeLists.txt b/testing/xmgrace/CMakeLists.txt index e1de5fd911..470aa056d0 100644 --- a/testing/xmgrace/CMakeLists.txt +++ b/testing/xmgrace/CMakeLists.txt @@ -1,5 +1,5 @@ -add_test(flake8_xmgrace - "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/xmgrace/Lib/" +cdat_add_test(flake8_xmgrace + flake8 "${cdat_SOURCE_DIR}/Packages/xmgrace/Lib/" --show-source # Show context for detected errors --statistics # Show summary of errors at end of output --max-line-length=128 # Max line 128 not 80 diff --git a/tests/cdat/test_cdat.py b/tests/cdat/test_cdat.py deleted file mode 100644 index 3c87d0c0bf..0000000000 --- a/tests/cdat/test_cdat.py +++ /dev/null @@ -1,500 +0,0 @@ -#!/usr/bin/env python -version='%prog 1.0' -usage = "usage: %prog [options] PACKAGE1, PACKAGE2, CONTRIB1, CONTRIB2, ..." -import subprocess,os,sys -import optparse -import time -import bz2,ftplib -ftp_site = "climate.llnl.gov" -ftp_dir = "Shadow" -ftp_user = "cdat" -ftp_password = "Changeme1" - -import cdat_info -default_time_format = "%Y-%m-%d %H:%M:%S" - -def get_shadow_name(test_dir,test): - fnm = os.path.join(test_dir,test)[:-3]+'.shadow.bz2' - path = list(os.path.split(fnm)) - while path[0]!='': - tmp = os.path.split(path.pop(0)) - path.insert(0,tmp[1]) - path.insert(0,tmp[0]) - fnm2 = '.'.join(path[1:]) - return fnm,fnm2 - -def get_shadow_ftp(test_dir,test): - fnm,ftpnm = get_shadow_name(test_dir,test) - f = open(fnm,"w") - try: - ftp=ftplib.FTP(ftp_site) - ftp.login(ftp_user,ftp_password) - ftp.cwd(ftp_dir) - ftp.retrbinary('RETR %s' % ftpnm, f.write) - ftp.close() - f.close() - f = open(fnm) - s=f.read() - f.close() - s = bz2.decompress(s) - f = open(fnm[:-4],"w") # open w/o bz2 ext - f.write(s) - f.close() - os.remove(fnm) - except Exception,err: - f.close() - os.remove(fnm) - pass - - -def get_shadow_local(test_dir,test): - fnm = os.path.join(test_dir,test)[:-3]+'.shadow' - if os.path.exists(fnm): - f=open(fnm,"r") - s=f.read() - f.close() - shadow_dict=eval(s) - else: - shadow_dict={} - return shadow_dict - -def get_shadow(test_dir,test): - # first try from ftp - get_shadow_ftp(test_dir,test) - return get_shadow_local(test_dir,test) - -def set_shadow_local(test_dir,test,dict): - try: - fnm = os.path.join(test_dir,test)[:-3]+'.shadow' - os.remove(fnm) - except: - pass - try: - fnm = os.path.join(test_dir,test)[:-3]+'.shadow.bz2' - f=open(fnm,"w") - s= bz2.compress(repr(dict)) - print >> f, s - f.close() - except Exception,err: - pass - return - -def put_shadow_ftp(test_dir,test): - fnm,ftpnm = get_shadow_name(test_dir,test) - try: - ftp=ftplib.FTP(ftp_site) - ftp.login(ftp_user,ftp_password) - ftp.cwd(ftp_dir) - f=open(fnm) - ftp.storbinary('STOR %s' % ftpnm, f) - ftp.close() - os.remove(fnm) - except Exception,err: - print 'Error putting ftp bz2',err - pass - -def set_shadow(test_dir,test,dict): - set_shadow_local(test_dir,test,dict) - if int(o.upload)>0: - put_shadow_ftp(test_dir,test) - return - -def make_tests_string_machine(machine,dict): - details="" - details = "\t\t\tlast successful run: %s" % dict.get("last","never") - if dict.has_key("time"): - details+="\n\t\t\tduration (min,avg,max) %i, %i, %i seconds" % (dict["fastest"],dict["time"],dict["slowest"]) - if dict.has_key("count") and o.verbose>1: - details+='\n\t\t\tSuccesfully tested %i times on at least : %i independent machines' % (dict["count"],len(dict["machines"])) - return details - -def make_tests_string(dict_all): - details="" - for os in dict_all.keys(): - details += "\n\t\tOS: %s" % os - dict_os = dict_all[os] - for v in dict_os.keys(): - details += "\n\t\t Version: %s" % v - dict_system = dict_os[v] - for m in dict_system.keys(): - details += "\n\t\t Machine: %s" % m - dict=dict_system[m] - details+='\n'+make_tests_string_machine(m,dict) - return details - -def run_dir(test_dir,lst): - lst.sort() - passed=True - output={} - for test in lst: - if test[-3:]=='.py' and (test.lower()[:4]=='test' or test.lower()[:6]=='cdtest'): - Dict_all = get_shadow(test_dir,test) - if o.query_mode: - output[(test_dir,test)]=Dict_all - try: - fnm = os.path.join(test_dir,test)[:-3]+'.shadow' - os.remove(fnm) - except: - pass - continue - myversion = ".".join(map(str,cdat_info.version())) - dict_all = Dict_all.get(myversion,{}) - myos = os.uname()[0] - system = os.uname()[2] - machine = os.uname()[4] - dict_os = dict_all.get(myos,{}) - dict_system = dict_os.get(system,{}) - dict = dict_system.get(machine,{}) - dict_system[machine] = dict - dict_os[system] = dict_system - dict_all[myos] = dict_os - details = "" - last = dict.get("last","1980-01-01 00:00:00") # ok ever ago! - format = dict.get("format",default_time_format) - tlast = time.strptime(last,format) - delta = time.mktime(tlast)-time.mktime(time.strptime(o.date,o.format)) - if delta>0: - if o.verbose>0: - print "\tRunning: %s" % (test) - print "\t\tSuccessful run newer than threshold %s vs %s " % (last,o.date) - continue - if o.verbose>0: - print "\tRunning: %s" % (test) - if o.verbose<3 or dict_all.keys()==[]: - details=make_tests_string_machine(machine,dict) - else: - details+=make_tests_string(dict_all) - print details - t = time.time() - out,err= run_test(os.path.join(test_dir,test)) - err2 = [] - for l in err: - if l.find("Warning")>-1: - pass - else: - err2.append(l) - err=err2 - t2 = time.time() - if err!=[]: - passed = False - if o.verbose>1: - for l in out: - st='\t\t%s' % l.strip() - print st - if o.verbose>0: - if err!=[]: - print '\t FAILED\n\n',err - if o.verbose>1: - for l in err: - st='\t\t%s' % l.strip() - print st - else: - print '\t PASSED\n\n' - runtime = int(t2-t)+1 - fastest = dict.get("fastest",runtime+1) - if fastest>runtime: - fastest = runtime - dict["fastest"]=fastest - slowest = dict.get("slowest",runtime-1) - if slowest1: - mymachine = os.uname()[1] - else: - mymachine = "private" - if not mymachine in machines: - machines.append(mymachine) - dict["machines"] = machines - - dict_system[machine] = dict - dict_os[system] = dict_system - dict_all[myos] = dict_os - Dict_all[myversion] = dict_all - output[(test_dir,test)]=dict - if out==[] or str(out[-1]).lower().find('skipped')==-1: - # ok the test havent been skipped - # we can replace stat file - set_shadow(test_dir,test,Dict_all) - - if o.skip is False and passed is False: - sys.exit() - return output - -def run_test(test): - wd, test = os.path.split(test) - cmd = 'cd %s ; %s %s' % (wd, sys.executable, test) - if o.full_testing: - cmd+=' --full --extended' - if o.extended_testing: - cmd += ' --extended' - #print cmd - P=subprocess.Popen(cmd,stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True,shell=True) - - out = P.stdout.readlines() - err = P.stderr.readlines() - rmv =[] - for l in err: - for e in o.error_strings: - if l.find(e)>-1: - rmv.append(l) - break - for l in rmv: - err.remove(l) - - return out,err - -format = default_time_format -date = time.strftime(format,time.localtime()) # Now! - -p=optparse.OptionParser(version=version,usage=usage) - -time_format_help_string = """format for time, default: %default -Format can be constructed from the following keys: -%a Locale's abbreviated weekday name. -%A Locale's full weekday name. -%b Locale's abbreviated month name. -%B Locale's full month name. -%c Locale's appropriate date and time representation. -%d Day of the month as a decimal number [01,31]. -%H Hour (24-hour clock) as a decimal number [00,23]. -%I Hour (12-hour clock) as a decimal number [01,12]. -%j Day of the year as a decimal number [001,366]. -%m Month as a decimal number [01,12]. -%M Minute as a decimal number [00,59]. -%p Locale's equivalent of either AM or PM. -%S Second as a decimal number [00,61]. -%U Week number of the year (Sunday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Sunday are considered to be in week 0. -%w Weekday as a decimal number [0(Sunday),6]. -%W Week number of the year (Monday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Monday are considered to be in week 0. -%x Locale's appropriate date representation. -%X Locale's appropriate time representation. -%y Year without century as a decimal number [00,99]. -%Y Year with century as a decimal number. -%Z Time zone name (no characters if no time zone exists). -%% A literal "%" character. -""" - -## Adds options to test utility -p.add_option("-a","--all","-A","--ALL",dest="all",help="Run test for ALL Packages and contributed Packages",action="store_true",default=False) -p.add_option("-P","--packages",dest="all_packages",help="Run test on all packages",action="store_true",default=False) -p.add_option("-C","--contribs",dest="all_contrib",help="Run test on all contributed packages",action="store_true",default=False) -p.add_option("-p","--package",dest="Packages",metavar="PACKAGE",help="Run test on this package",action="append",type="string",default=[]) -p.add_option("-c","--contrib","--contributed",dest="Contribs",metavar="CONTRIBUTED",help="Run test on this contributed package",action="append",type="string",default=[]) -p.add_option("-s","--stop","--noerror",dest="skip",help="Stop on errors (default: %default)",action="store_false",default=False) -p.add_option("-S","--nostop","--skip",dest="skip",help="Do not stop on errors",action="store_true",default=False) -p.add_option("-v","--verbose",metavar="LEVEL",dest="verbose",help="Level of verbosity (0, 1, 2 or 3), default is %default",type="choice",default="1",choices=("0","1","2","3")) -p.add_option("-u","--upload",metavar="LEVEL",dest="upload",help="Level of upload privacy (0, 1, or 2), 0 no data uploaded, 1 no private data uploaded, 2 uploads hostname, default is %default",type="choice",default="2",choices=("0","1","2")) -p.add_option("-e","--okerror",metavar="ERROR STRING",dest="error_strings",help="Identify 'none' error merror messages (removes lines in error messages containing this)",default=["ppmtogif","pnmcolormap","pnmremap","ppmtogif","ppmquant","pnmcrop","Definition of","DeprecationWarning","self.nxo"],action="append",type="string") -p.add_option("-d","--date",dest="date",type="string",help="Will run a test if last successfull time is older than 'date', default is now: %default See --timeformat option for date format",default=date) -p.add_option("-f","--timeformat",dest="format",type="string",help=time_format_help_string,default=default_time_format) -p.add_option("-q","--query_mode",dest="query_mode",help="Runs a query of successfully run test only, does not execute anything",action="store_true",default=False) -p.add_option("-F","--full",dest="full_testing",help="Full testing (more detailed testing) default is %default",default=False,action="store_true") - - -# short test is default -jd082007 -p.add_option("-E","--extended",dest="extended_testing",help="Extended testing (runs testing completely) default is %default",default=False,action="store_true") - - -(o,args) = p.parse_args() - -if int(o.upload)==2 and o.query_mode is False: - print 'Your upload level is set to 2\nThis means CDAT will recover your machine\'s name (only when running the test suite).\nTo turn this off use option: --upload=1 (no private data uploaded) or 0 (no data uploaded at all)' - print "Your machine's name (%s) will be stored for statistical purposes only" % os.uname()[1] - cont = raw_input("Do you wish to continue? (y/n) [y]") - if not cont.lower() in ['','y','yes']: - sys.exit() -try: - time.strptime(o.date,o.format) -except: - p.error('date must be in format: "%s", or specify format on command line (use --help)' % o.format) -# Handles case where nothing is passed! -if not (o.all_packages or o.all_contrib or o.all) and o.Packages==[] and o.Contribs==[] and args==[]: - (o,args) = p.parse_args(["-h"]) - -if o.all: - o.all_packages=True - o.all_contrib=True - -# Append all the Packages -packages=[] -pckgs = os.listdir("Packages") -pckgs.sort() -for pk in pckgs: - if pk in ['cmor','cdms','regrid','Properties']: - continue - if os.path.isdir(os.path.join("Packages",pk)): - lst=[] - try: - dr = os.path.join("Packages",pk,"Test") - lst = os.listdir(os.path.join("Packages",pk,"Test")) - except: - pass - try: - lst = os.listdir(os.path.join("Packages",pk,"test")) - except: - pass - if lst!=[]: - packages.append(pk) - -if o.all_packages: - for pk in packages: - if not pk in o.Packages: - o.Packages.append(pk) - -contribs=o.Contribs -if contribs==[]: - pckgs = os.listdir("contrib") - pckgs.sort() - for pk in pckgs: - if pk in ['spanlib']: - try: - import spanlib - except: - continue - if os.path.isdir(os.path.join("contrib",pk)): - lst=[] - try: - lst = os.listdir(os.path.join("contrib",pk,"Test")) - except: - pass - try: - lst = os.listdir(os.path.join("contrib",pk,"test")) - except: - pass - if lst!=[] and pk not in o.Contribs: - # first try to see if contrib has been built - contribs.append(pk) - -if o.all_contrib: - for pk in contribs: - if pk not in o.Contribs: - o.Contribs.append(pk) - -#Now adds the extra arguments -for pk in args: - ok=False - if pk in packages: - ok = True - if not pk in o.Packages: - o.Packages.append(pk) - if pk in contribs: - ok = True - if not pk in o.Contribs: - o.Contribs.append(pk) - if not ok: - if o.skip: - print 'Will skip Package:',pk - else: - print "Package %s does not exists or has not test suite" % pk - print 'type "%s --help" for help and usage' % sys.argv[0] - sys.exit() - - -# Ok now runs the test to see if packages are good -skipped=[] -for pk in o.Packages: - if not pk in packages: - if o.skip: - print 'Will skip Package:',pk - skipped.append(pk) - else: - print "Package %s does not exists or has no test suite" % pk - print 'type "%s --help" for help and usage' % sys.argv[0] - sys.exit() -for pk in skipped: - o.Packages.remove(pk) -# Ok now runs the test to see if contribs are good -skipped=[] -for pk in o.Contribs: - if not pk in contribs: - if o.skip: - print 'Will skip Contributed Package:',pk - skipped.append(pk) - else: - print "Contributed Package %s does not exists or has not test suite" % pk - print 'type "%s --help" for help and usage' % sys.argv[0] - print 'valid contributed packages: %s' % ' '.join(contribs) - sys.exit() -for pk in skipped: - o.Contribs.remove(pk) -o.verbose=int(o.verbose) -results ={} -for pk in o.Packages: - print "Running Test on Official Package: %s" % pk - test_dir = os.path.join("Packages",pk,"Test") - try: - lst = os.listdir(test_dir) - except: - test_dir = os.path.join("Packages",pk,"test") - lst = os.listdir(test_dir) - tmp = run_dir(test_dir,lst) - for k in tmp.keys(): - results[k]=tmp[k] -for pk in o.Contribs: - print "Running Test on Contributed Package: %s" % pk - test_dir = os.path.join("contrib",pk,"Test") - try: - lst = os.listdir(test_dir) - except: - test_dir = os.path.join("contrib",pk,"test") - lst = os.listdir(test_dir) - tmp = run_dir(test_dir,lst) - for k in tmp.keys(): - results[k]=tmp[k] - - - -import cdat_info -Packages=[] -OS=[] -Versions=[] -Machines=[] -CDATVersions=[] -#code to display nicely all the results -if o.query_mode: - for test in results.keys(): - pnm =test[0] - if not pnm in Packages: - Packages.append(pnm) - CDATVersions=results[test] - oses = CDATVersions.get(str(cdat_info.version()),{}) - for aos in oses.keys(): - if not aos in OS: - OS.append(aos) - versions = oses[aos] - for v in versions.keys(): - syst = versions[v] - for asys in syst: - full = "%s_%s_%s" % (aos,v,asys) - if not full in Versions: - Versions.append(full) - res = syst[asys] - machines = res["machines"] - for m in machines: - if not m in Machines: - Machines.append(m) - print 'Your version:',cdat_info.version() - print 'Total Test:',len(results.keys()) - print 'Total Packages:',len(Packages) - print 'Total OS:',len(OS),'---',', '.join(OS) - print 'Total OS Versions:',len(Versions) - print 'Total Independent Machines:',len(Machines) -## else: -## for test_dir,test in results.keys(): -## print '\n\n' -## fn = test_dir+test -## print fn,'--------------' -## tr = results[test_dir,test] -## for t in tr: -## print '\t',t,': ' ,tr[t] diff --git a/tests/cdat/test_exsrc_ok.py b/tests/cdat/test_exsrc_ok.py deleted file mode 100644 index 923dfc9d7a..0000000000 --- a/tests/cdat/test_exsrc_ok.py +++ /dev/null @@ -1,107 +0,0 @@ -""" Test external packages dependencies for CDAT -Prints out Packages that need to be installed and why -""" -import sys,os - -## Test 1: Pyfort -min_ver=8.5 -a=os.popen4(sys.prefix+'/bin/pyfort -V')[1].readlines() -sp=a[0].split() -if sp[0]!='Pyfort': - print 'Pyfort : Not Present in your python distribution' -elif float(sp[1])-1: - print 'gplot : Not present on your system' - -## Test 5: xgks -if not os.path.exists(sys.prefix+'/lib/xgksfonts'): - print 'xgks : xgksfonts directory not present in your python distribution' - -## Test 6: gifsicle -a=os.popen4('which gifsicle')[1].readlines()[0] -if a.find('not found')>-1: - print 'gifsicle : Not present on your system' - -## Test 7: ghostscript and fonts -a=os.popen4('which gs')[1].readlines()[0] -if a.find('not found')>-1: - print 'ghostscript : Not present on your system' -else: - jpeg=0 - png=0 - a=os.popen4('gs -h')[1].readlines() - while a.pop(0).find('Available devices:')<0: - continue - for l in a: - s=l.strip().split() - if 'jpeg' in s: - jpeg=1 - if 'png16' in s: - png=1 - - font=0 - a=os.popen4('gs -h')[1].readlines() - while a.pop(0).find('Search path:')<0: - continue - for l in a: - if l[0]==' ': # path lines starts with blank - s=l.strip().split(':') - for p in s: - #print os.listdir(p.strip()) - if os.path.exists(p.strip()+'/n022003l.afm'): - font=1 - else: - break - if jpeg==0 and png==0 and font==0: - print 'ghostscript : no jpeg nor png support built, missing extra fonts' - elif jpeg==0 and png==0: - print 'ghostscript : no jpeg nor png support built' - elif jpeg==0: - print 'ghostscript : no jpeg support built' - elif png==0: - print 'ghostscript : no png support built' - elif font==0: - print 'ghostscript : extra fonts not installed' - -## Test 8: Netpbm/pbmplus -a=os.popen4('which ppmtogif')[1].readlines()[0] -if a.find('not found')>-1: - if sys.platform in ['linux2','darwin','cygwin']: - print 'netpbm : Not present on your system' - else: - print 'pbmplus : Not present on your system' - - -## Test 9: R libraries (not python module) -a=os.popen4('which R')[1].readlines()[0] -if a.find('not found')>-1: - print 'R : Not present on your system' - -## Test 10: VTK -try: - import vtk -except: - print 'VTK : Not present on your Python' - diff --git a/tests/test_script b/tests/test_script deleted file mode 100755 index 883bb3b5b7..0000000000 --- a/tests/test_script +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/sh -# usage: ./test_script [-v] [targetdir] -# -v prevents run of vcdat -# -unset PYTHONPATH -unset PYTHONHOME -unset PYTHONSTARTUP -if (test "$1" = "-v") then - vopt=0; shift; -else - vopt=1 -fi -if (test $# -eq 0) then - p=`which python`; v=`which vcdat` -else - here=`pwd`; - cd $1/bin; - pdir=`pwd`; - p=$pdir/python; v=$pdir/vcdat; - cd $here -fi -prefix=`${p} -c "import sys; print sys.exec_prefix"` -echo "Testing $p" -(cd Packages/cdms/Test; $p cdtest.py) -if (test $vopt -eq 1) then - echo "Testing $v"; - $v -fi -echo "-------------------------------------------------------------------" -echo "Tests completed." - diff --git a/uvcdatspt/scripts/MHTScreenshots.py b/uvcdatspt/scripts/MHTScreenshots.py deleted file mode 100644 index 4396c60743..0000000000 --- a/uvcdatspt/scripts/MHTScreenshots.py +++ /dev/null @@ -1,170 +0,0 @@ -try: paraview.simple -except: from paraview.simple import * - -import sys -import os -import paraview - -if len(sys.argv) != 3: - print 'Usage: pvbatch --symmetric MHTScreenshots.py ""' - sys.exit(1) - -print 'input file names are: ', sys.argv[2] -print 'output file name is: ', sys.argv[1] - -# trying to import the library where I can specify the global and subcontrollers -try: - import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set -except ImportError: - import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH - -paraview.options.batch = True # this may not be necessary -paraview.simple._DisableFirstRenderCameraReset() - -def CreateTimeCompartments(globalController, timeCompartmentSize): - if globalController.GetNumberOfProcesses() == 1: - print 'single process' - return - elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0: - print 'number of processes must be an integer multiple of time compartment size' - return - elif timeCompartmentSize == globalController.GetNumberOfProcesses(): - return globalController - - gid = globalController.GetLocalProcessId() - timeCompartmentGroupId = int (gid / timeCompartmentSize ) - newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize) - # must unregister if the reference count is greater than 1 - if newController.GetReferenceCount() > 1: - newController.UnRegister(None) - - #print gid, timeCompartmentGroupId, gid % timeCompartmentSize - print gid, ' of global comm is ', newController.GetLocalProcessId() - globalController.SetGlobalController(newController) - return newController - -def CheckReader(reader): - if hasattr(reader, "FileName") == False: - print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName() - sys.exit(-1) - - if hasattr(reader, "TimestepValues") == False: - print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information" - sys.exit(-1) - -def CreateControllers(timeCompartmentSize): - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - globalController = pm.GetGlobalController() - if timeCompartmentSize > globalController.GetNumberOfProcesses(): - timeCompartmentSize = globalController.GetNumberOfProcesses() - - temporalController = CreateTimeCompartments(globalController, timeCompartmentSize) - return globalController, temporalController, timeCompartmentSize - -currentTimeStep = -1 -def UpdateCurrentTimeStep(globalController, timeCompartmentSize): - global currentTimeStep - if currentTimeStep == -1: - currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize - return currentTimeStep - - numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize - currentTimeStep = currentTimeStep + numTimeStepsPerIteration - return currentTimeStep - -def WriteImages(currentTimeStep, currentTime, views): - for view in views: - filename = view.tpFileName.replace("%t", str(currentTimeStep)) - view.ViewTime = currentTime - WriteImage(filename, view, Magnification=view.tpMagnification) - -def WriteFiles(currentTimeStep, currentTime, writers): - for writer in writers: - originalfilename = writer.FileName - fname = originalfilename.replace("%t", str(currentTimeStep)) - writer.FileName = fname - writer.UpdatePipeline(currentTime) - writer.FileName = originalfilename - -def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views): - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - while currentTimeStep < len(timeSteps): - print globalController.GetLocalProcessId(), " is working on ", currentTimeStep - WriteImages(currentTimeStep, timeSteps[currentTimeStep], views) - WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers) - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - -def CreateReader(ctor, args, fileInfo): - "Creates a reader, checks if it can be used, and sets the filenames" - reader = ctor() - CheckReader(reader) - import glob - files = glob.glob(fileInfo) - files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering - reader.FileName = files - for a in args: - s = "reader."+a - exec (s) - - return reader - -def CreateWriter(ctor, filename, tp_writers): - writer = ctor() - writer.FileName = filename - tp_writers.append(writer) - return writer - -def CreateView(proxy_ctor, filename, magnification, width, height, tp_views): - view = proxy_ctor() - view.add_attribute("tpFileName", filename) - view.add_attribute("tpMagnification", magnification) - tp_views.append(view) - view.ViewSize = [width, height] - return view - -tp_writers = [] -tp_views = [] -# ==================== end of specialized temporal parallelism sections ================== - -timeCompartmentSize = 8 -globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize) - -in_msf_moc = CreateReader( MHTFileSeriesReader, [], sys.argv[2] ) -timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0] - -XYChartView1 = CreateView( CreateXYPlotView, sys.argv[1], 1, 549, 583, tp_views ) -XYChartView1.ShowAxis = [1, 1, 0, 0] -XYChartView1.ShowAxisGrid = [1, 1, 0, 0] -XYChartView1.AxisLabelsBottom = [] -XYChartView1.LegendLocation = 1 -XYChartView1.AxisLabelsLeft = [] -XYChartView1.ViewTime = 0.0 -XYChartView1.ShowLegend = 1 -XYChartView1.AxisRange = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -XYChartView1.AxisTitleFont = ['Arial', '12', '1', '0', 'Arial', '12', '1', '0', 'Arial', '12', '1', '0', 'Arial', '12', '1', '0'] -XYChartView1.AxisLabelColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] -XYChartView1.AxisTitleColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5] -XYChartView1.ChartTitleColor = [0.0, 0.0, 0.0] -XYChartView1.ChartTitleAlignment = 1 -XYChartView1.AxisColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] -XYChartView1.AxisLabelsTop = [] -XYChartView1.AxisLabelFont = ['Arial', '12', '0', '0', 'Arial', '12', '0', '0', 'Arial', '12', '0', '0', 'Arial', '12', '0', '0'] -XYChartView1.ShowAxisLabels = [1, 1, 1, 1] -XYChartView1.AxisLabelNotation = [0, 0, 0, 0] -XYChartView1.AxisLabelPrecision = [2, 2, 2, 2] -XYChartView1.AxisGridColor = [0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95] -XYChartView1.ChartTitle = '' -XYChartView1.AxisLabelsRight = [] -XYChartView1.AxisBehavior = [0, 0, 0, 0] -XYChartView1.AxisTitle = ['', '', '', ''] -XYChartView1.ChartTitleFont = ['Arial', '14', '0', '0'] -XYChartView1.AxisLogScale = [0, 0, 0, 0] - -DataRepresentation1 = Show() #GetDisplayProperties(in_msf_moc) -DataRepresentation1.XArrayName = 'reader_mht_global' -DataRepresentation1.SeriesVisibility = ['vtkOriginalIndices', '0'] -DataRepresentation1.SeriesVisibility = ['reader_mht_global', '1'] - -IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views) - -print 'ending' diff --git a/uvcdatspt/scripts/MHTTemporalStatistics.py b/uvcdatspt/scripts/MHTTemporalStatistics.py deleted file mode 100644 index 18cfca03c5..0000000000 --- a/uvcdatspt/scripts/MHTTemporalStatistics.py +++ /dev/null @@ -1,26 +0,0 @@ -print 'starting' -import sys -from paraview.simple import * - -if len(sys.argv) < 3: - print 'Usage: pvbatch MHTTemporalStatistics.py ' - sys.exit(1) - -paraview.simple._DisableFirstRenderCameraReset() -reader = MHTFileSeriesReader() -print 'input file names are: ', sys.argv[2:len(sys.argv)] -print 'output file name is: ', sys.argv[1] -reader.FileName = sys.argv[2:len(sys.argv)] - -MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics() -MultiBlockTemporalStatistics1.TimeStepType = 0 -MultiBlockTemporalStatistics1.SamplingMethod = 1 -MultiBlockTemporalStatistics1.TimeSpan = 0 -MultiBlockTemporalStatistics1.TimeStepLength = 1 -MultiBlockTemporalStatistics1.TimeCompartmentSize = 8 - -writer = XMLMultiBlockDataWriter() -writer.FileName = sys.argv[1] -writer.UpdatePipeline() - -print 'ending' diff --git a/uvcdatspt/scripts/MOCScreenshots.py b/uvcdatspt/scripts/MOCScreenshots.py deleted file mode 100644 index 1cb05ea8f0..0000000000 --- a/uvcdatspt/scripts/MOCScreenshots.py +++ /dev/null @@ -1,535 +0,0 @@ - -try: paraview.simple -except: from paraview.simple import * - -import sys -import os -import paraview - -if len(sys.argv) != 3: - print 'Usage: pvbatch --symmetric MOCScreenshots.py ""' - sys.exit(1) - -print 'input file names are: ', sys.argv[2] -print 'output file name is: ', sys.argv[1] - -# trying to import the library where I can specify the global and subcontrollers -try: - import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set -except ImportError: - import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH - -paraview.options.batch = True # this may not be necessary -paraview.simple._DisableFirstRenderCameraReset() - -def CreateTimeCompartments(globalController, timeCompartmentSize): - if globalController.GetNumberOfProcesses() == 1: - print 'single process' - return - elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0: - print 'number of processes must be an integer multiple of time compartment size' - return - elif timeCompartmentSize == globalController.GetNumberOfProcesses(): - return globalController - - gid = globalController.GetLocalProcessId() - timeCompartmentGroupId = int (gid / timeCompartmentSize ) - newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize) - # must unregister if the reference count is greater than 1 - if newController.GetReferenceCount() > 1: - newController.UnRegister(None) - - #print gid, timeCompartmentGroupId, gid % timeCompartmentSize - print gid, ' of global comm is ', newController.GetLocalProcessId() - globalController.SetGlobalController(newController) - return newController - -def CheckReader(reader): - if hasattr(reader, "FileName") == False: - print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName() - sys.exit(-1) - - if hasattr(reader, "TimestepValues") == False: - print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information" - sys.exit(-1) - -def CreateControllers(timeCompartmentSize): - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - globalController = pm.GetGlobalController() - if timeCompartmentSize > globalController.GetNumberOfProcesses(): - timeCompartmentSize = globalController.GetNumberOfProcesses() - - temporalController = CreateTimeCompartments(globalController, timeCompartmentSize) - return globalController, temporalController, timeCompartmentSize - -currentTimeStep = -1 -def UpdateCurrentTimeStep(globalController, timeCompartmentSize): - global currentTimeStep - if currentTimeStep == -1: - currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize - return currentTimeStep - - numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize - currentTimeStep = currentTimeStep + numTimeStepsPerIteration - return currentTimeStep - -def WriteImages(currentTimeStep, currentTime, views): - for view in views: - filename = view.tpFileName.replace("%t", str(currentTimeStep)) - view.ViewTime = currentTime - WriteImage(filename, view, Magnification=view.tpMagnification) - -def WriteFiles(currentTimeStep, currentTime, writers): - for writer in writers: - originalfilename = writer.FileName - fname = originalfilename.replace("%t", str(currentTimeStep)) - writer.FileName = fname - writer.UpdatePipeline(currentTime) - writer.FileName = originalfilename - -def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views): - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - while currentTimeStep < len(timeSteps): - print globalController.GetLocalProcessId(), " is working on ", currentTimeStep - WriteImages(currentTimeStep, timeSteps[currentTimeStep], views) - WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers) - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - -def CreateReader(ctor, args, fileInfo): - "Creates a reader, checks if it can be used, and sets the filenames" - reader = ctor() - CheckReader(reader) - import glob - files = glob.glob(fileInfo) - files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering - reader.FileName = files - for a in args: - s = "reader."+a - exec (s) - - return reader - -def CreateWriter(ctor, filename, tp_writers): - writer = ctor() - writer.FileName = filename - tp_writers.append(writer) - return writer - -def CreateView(proxy_ctor, filename, magnification, width, height, tp_views): - view = proxy_ctor() - view.add_attribute("tpFileName", filename) - view.add_attribute("tpMagnification", magnification) - tp_views.append(view) - view.ViewSize = [width, height] - return view - -tp_writers = [] -tp_views = [] -# ==================== end of specialized temporal parallelism sections ================== - -timeCompartmentSize = 16 -globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize) - -RenderView1 = CreateView( CreateRenderView, sys.argv[1], 1, 549, 583, tp_views ) -RenderView1.LightSpecularColor = [1.0, 1.0, 1.0] -RenderView1.InteractionMode = '3D' -RenderView1.UseTexturedBackground = 0 -RenderView1.UseLight = 1 -RenderView1.CameraPosition = [15.0, -2624.999755859375, 14496.62787197619] -RenderView1.FillLightKFRatio = 3.0 -RenderView1.Background2 = [0.0, 0.0, 0.16470588235294117] -RenderView1.FillLightAzimuth = -10.0 -RenderView1.LODResolution = 50.0 -RenderView1.BackgroundTexture = [] -RenderView1.KeyLightAzimuth = 10.0 -RenderView1.StencilCapable = 1 -RenderView1.LightIntensity = 1.0 -RenderView1.CameraFocalPoint = [15.0, -2624.999755859375, 0.0] -RenderView1.ImageReductionFactor = 2 -RenderView1.CameraViewAngle = 30.0 -RenderView1.CameraParallelScale = 3766.3151510583625 -RenderView1.EyeAngle = 2.0 -RenderView1.HeadLightKHRatio = 3.0 -RenderView1.StereoRender = 0 -RenderView1.KeyLightIntensity = 0.75 -RenderView1.BackLightAzimuth = 110.0 -RenderView1.OrientationAxesInteractivity = 0 -RenderView1.UseInteractiveRenderingForSceenshots = 0 -RenderView1.UseOffscreenRendering = 0 -RenderView1.Background = [1.0, 1.0, 1.0] -RenderView1.UseOffscreenRenderingForScreenshots = 1 -RenderView1.NonInteractiveRenderDelay = 2 -RenderView1.CenterOfRotation = [15.0, -2624.999755859375, 0.0] -RenderView1.CameraParallelProjection = 0 -RenderView1.CompressorConfig = 'vtkSquirtCompressor 0 3' -RenderView1.HeadLightWarmth = 0.5 -RenderView1.MaximumNumberOfPeels = 4 -RenderView1.LightDiffuseColor = [1.0, 1.0, 1.0] -RenderView1.StereoType = 'Red-Blue' -RenderView1.DepthPeeling = 1 -RenderView1.BackLightKBRatio = 3.5 -RenderView1.StereoCapableWindow = 1 -RenderView1.CameraViewUp = [0.0, 1.0, 0.0] -RenderView1.LightType = 'HeadLight' -RenderView1.LightAmbientColor = [1.0, 1.0, 1.0] -RenderView1.RemoteRenderThreshold = 3.0 -RenderView1.KeyLightElevation = 50.0 -RenderView1.CenterAxesVisibility = 0 -RenderView1.MaintainLuminance = 0 -RenderView1.StillRenderImageReductionFactor = 1 -RenderView1.BackLightWarmth = 0.5 -RenderView1.FillLightElevation = -75.0 -RenderView1.MultiSamples = 0 -RenderView1.FillLightWarmth = 0.4 -RenderView1.AlphaBitPlanes = 1 -RenderView1.LightSwitch = 0 -RenderView1.OrientationAxesVisibility = 0 -RenderView1.CameraClippingRange = [14351.66159325643, 14714.077290055833] -RenderView1.BackLightElevation = 0.0 -RenderView1.ViewTime = 0.0 -RenderView1.OrientationAxesOutlineColor = [1.0, 1.0, 1.0] -RenderView1.LODThreshold = 5.0 -RenderView1.CollectGeometryThreshold = 100.0 -RenderView1.UseGradientBackground = 0 -RenderView1.KeyLightWarmth = 0.6 -RenderView1.OrientationAxesLabelColor = [1.0, 1.0, 1.0] - -in_msf_moc = CreateReader( MOCFileSeriesReader, [], sys.argv[2]) -timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0] -Threshold1 = Threshold( guiName="Threshold1", Scalars=['POINTS', 'reader_moc_global'], ThresholdRange=[-1000.0, 592.3663330078125], AllScalars=1 ) - -Transform1 = Transform( guiName="Transform1", Transform="Transform" ) -Transform1.Transform.Scale = [40.0, -1.0, 1.0] -Transform1.Transform.Rotate = [0.0, 0.0, 0.0] -Transform1.Transform.Translate = [0.0, 0.0, 0.0] - -a1_reader_moc_global_PiecewiseFunction = CreatePiecewiseFunction( Points=[0.0, 0.0, 1.0, 1.0] ) - -a1_reader_moc_global_PVLookupTable = GetLookupTableForArray( "reader_moc_global", 1, Discretize=1, RGBPoints=[-151.5101776123047, 0.23, 0.299, 0.754, 592.3663330078125, 0.706, 0.016, 0.15], UseLogScale=0, VectorComponent=0, NanColor=[0.25, 0.0, 0.0], NumberOfTableValues=256, ColorSpace='Diverging', VectorMode='Magnitude', HSVWrap=0, ScalarRangeInitialized=1.0, LockScalarRange=0 ) - -SetActiveSource(in_msf_moc) -DataRepresentation1 = Show() -DataRepresentation1.CubeAxesZAxisVisibility = 1 -DataRepresentation1.SelectionPointLabelColor = [0.5, 0.5, 0.5] -DataRepresentation1.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds' -DataRepresentation1.SuppressLOD = 0 -DataRepresentation1.CubeAxesXGridLines = 0 -DataRepresentation1.CubeAxesYAxisTickVisibility = 1 -DataRepresentation1.Position = [0.0, 0.0, 0.0] -DataRepresentation1.BackfaceRepresentation = 'Follow Frontface' -DataRepresentation1.SelectionOpacity = 1.0 -DataRepresentation1.SelectionPointLabelShadow = 0 -DataRepresentation1.CubeAxesYGridLines = 0 -DataRepresentation1.OrientationMode = 'Direction' -DataRepresentation1.Source.TipResolution = 6 -DataRepresentation1.ScaleMode = 'No Data Scaling Off' -DataRepresentation1.Diffuse = 1.0 -DataRepresentation1.SelectionUseOutline = 0 -DataRepresentation1.CubeAxesZTitle = 'Z-Axis' -DataRepresentation1.Specular = 0.1 -DataRepresentation1.SelectionVisibility = 1 -DataRepresentation1.InterpolateScalarsBeforeMapping = 1 -DataRepresentation1.CubeAxesZAxisTickVisibility = 1 -DataRepresentation1.Origin = [0.0, 0.0, 0.0] -DataRepresentation1.CubeAxesVisibility = 0 -DataRepresentation1.Scale = [1.0, 1.0, 1.0] -DataRepresentation1.SelectionCellLabelJustification = 'Left' -DataRepresentation1.DiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation1.SelectionCellLabelOpacity = 1.0 -DataRepresentation1.Source = "Arrow" -DataRepresentation1.Source.Invert = 0 -DataRepresentation1.Masking = 0 -DataRepresentation1.Opacity = 1.0 -DataRepresentation1.LineWidth = 1.0 -DataRepresentation1.MeshVisibility = 0 -DataRepresentation1.Visibility = 0 -DataRepresentation1.SelectionCellLabelFontSize = 18 -DataRepresentation1.CubeAxesCornerOffset = 0.0 -DataRepresentation1.SelectionPointLabelJustification = 'Left' -DataRepresentation1.Ambient = 0.0 -DataRepresentation1.SelectOrientationVectors = '' -DataRepresentation1.CubeAxesTickLocation = 'Inside' -DataRepresentation1.BackfaceDiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation1.CubeAxesYAxisVisibility = 1 -DataRepresentation1.SelectionPointLabelFontFamily = 'Arial' -DataRepresentation1.Source.ShaftResolution = 6 -DataRepresentation1.CubeAxesFlyMode = 'Closest Triad' -DataRepresentation1.SelectScaleArray = '' -DataRepresentation1.CubeAxesYTitle = 'Y-Axis' -DataRepresentation1.ColorAttributeType = 'POINT_DATA' -DataRepresentation1.SpecularPower = 100.0 -DataRepresentation1.Texture = [] -DataRepresentation1.SelectionCellLabelShadow = 0 -DataRepresentation1.AmbientColor = [1.0, 1.0, 1.0] -DataRepresentation1.MapScalars = 1 -DataRepresentation1.PointSize = 2.0 -DataRepresentation1.Source.TipLength = 0.35 -DataRepresentation1.SelectionCellLabelFormat = '' -DataRepresentation1.Scaling = 0 -DataRepresentation1.StaticMode = 0 -DataRepresentation1.SelectionCellLabelColor = [0.0, 1.0, 0.0] -DataRepresentation1.Source.TipRadius = 0.1 -DataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483] -DataRepresentation1.CubeAxesXAxisTickVisibility = 1 -DataRepresentation1.SelectionCellLabelVisibility = 0 -DataRepresentation1.NonlinearSubdivisionLevel = 1 -DataRepresentation1.CubeAxesColor = [1.0, 1.0, 1.0] -DataRepresentation1.Representation = 'Surface' -DataRepresentation1.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -DataRepresentation1.CubeAxesXAxisMinorTickVisibility = 1 -DataRepresentation1.Orientation = [0.0, 0.0, 0.0] -DataRepresentation1.CubeAxesXTitle = 'X-Axis' -DataRepresentation1.CubeAxesInertia = 1 -DataRepresentation1.BackfaceOpacity = 1.0 -DataRepresentation1.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds' -DataRepresentation1.SelectionColor = [1.0, 0.0, 1.0] -DataRepresentation1.SelectionPointLabelVisibility = 0 -DataRepresentation1.SelectionPointLabelFontSize = 18 -DataRepresentation1.ScaleFactor = 1.0 -DataRepresentation1.BackfaceAmbientColor = [1.0, 1.0, 1.0] -DataRepresentation1.Source.ShaftRadius = 0.03 -DataRepresentation1.SelectMaskArray = '' -DataRepresentation1.SelectionLineWidth = 2.0 -DataRepresentation1.CubeAxesZAxisMinorTickVisibility = 1 -DataRepresentation1.CubeAxesXAxisVisibility = 1 -DataRepresentation1.Interpolation = 'Gouraud' -DataRepresentation1.SelectionCellLabelFontFamily = 'Arial' -DataRepresentation1.SelectionCellLabelItalic = 0 -DataRepresentation1.CubeAxesYAxisMinorTickVisibility = 1 -DataRepresentation1.CubeAxesZGridLines = 0 -DataRepresentation1.SelectionPointLabelFormat = '' -DataRepresentation1.SelectionPointLabelOpacity = 1.0 -DataRepresentation1.Pickable = 1 -DataRepresentation1.CustomBoundsActive = [0, 0, 0] -DataRepresentation1.SelectionRepresentation = 'Wireframe' -DataRepresentation1.SelectionPointLabelBold = 0 -DataRepresentation1.ColorArrayName = 'reader_moc_global' -DataRepresentation1.SelectionPointLabelItalic = 0 -DataRepresentation1.SpecularColor = [1.0, 1.0, 1.0] -DataRepresentation1.LookupTable = a1_reader_moc_global_PVLookupTable -DataRepresentation1.SelectionPointSize = 5.0 -DataRepresentation1.SelectionCellLabelBold = 0 -DataRepresentation1.Orient = 0 - -SetActiveSource(Threshold1) -DataRepresentation2 = Show() -DataRepresentation2.CubeAxesZAxisVisibility = 1 -DataRepresentation2.SelectionPointLabelColor = [0.5, 0.5, 0.5] -DataRepresentation2.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds' -DataRepresentation2.SuppressLOD = 0 -DataRepresentation2.CubeAxesXGridLines = 0 -DataRepresentation2.CubeAxesYAxisTickVisibility = 1 -DataRepresentation2.Position = [0.0, 0.0, 0.0] -DataRepresentation2.BackfaceRepresentation = 'Follow Frontface' -DataRepresentation2.SelectionOpacity = 1.0 -DataRepresentation2.SelectionPointLabelShadow = 0 -DataRepresentation2.CubeAxesYGridLines = 0 -DataRepresentation2.OrientationMode = 'Direction' -DataRepresentation2.Source.TipResolution = 6 -DataRepresentation2.ScaleMode = 'No Data Scaling Off' -DataRepresentation2.Diffuse = 1.0 -DataRepresentation2.SelectionUseOutline = 0 -DataRepresentation2.SelectionPointLabelFormat = '' -DataRepresentation2.CubeAxesZTitle = 'Z-Axis' -DataRepresentation2.Specular = 0.1 -DataRepresentation2.SelectionVisibility = 1 -DataRepresentation2.InterpolateScalarsBeforeMapping = 1 -DataRepresentation2.CubeAxesZAxisTickVisibility = 1 -DataRepresentation2.Origin = [0.0, 0.0, 0.0] -DataRepresentation2.CubeAxesVisibility = 0 -DataRepresentation2.Scale = [1.0, 1.0, 1.0] -DataRepresentation2.SelectionCellLabelJustification = 'Left' -DataRepresentation2.DiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation2.SelectionCellLabelOpacity = 1.0 -DataRepresentation2.CubeAxesInertia = 1 -DataRepresentation2.Source = "Arrow" -DataRepresentation2.Source.Invert = 0 -DataRepresentation2.Masking = 0 -DataRepresentation2.Opacity = 1.0 -DataRepresentation2.LineWidth = 1.0 -DataRepresentation2.MeshVisibility = 0 -DataRepresentation2.Visibility = 0 -DataRepresentation2.SelectionCellLabelFontSize = 18 -DataRepresentation2.CubeAxesCornerOffset = 0.0 -DataRepresentation2.SelectionPointLabelJustification = 'Left' -DataRepresentation2.SelectionPointLabelVisibility = 0 -DataRepresentation2.SelectOrientationVectors = '' -DataRepresentation2.CubeAxesTickLocation = 'Inside' -DataRepresentation2.BackfaceDiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation2.CubeAxesYAxisVisibility = 1 -DataRepresentation2.SelectionPointLabelFontFamily = 'Arial' -DataRepresentation2.Source.ShaftResolution = 6 -DataRepresentation2.CubeAxesFlyMode = 'Closest Triad' -DataRepresentation2.SelectScaleArray = '' -DataRepresentation2.CubeAxesYTitle = 'Y-Axis' -DataRepresentation2.ColorAttributeType = 'POINT_DATA' -DataRepresentation2.SpecularPower = 100.0 -DataRepresentation2.Texture = [] -DataRepresentation2.SelectionCellLabelShadow = 0 -DataRepresentation2.AmbientColor = [1.0, 1.0, 1.0] -DataRepresentation2.MapScalars = 1 -DataRepresentation2.PointSize = 2.0 -DataRepresentation2.Source.TipLength = 0.35 -DataRepresentation2.SelectionCellLabelFormat = '' -DataRepresentation2.Scaling = 0 -DataRepresentation2.StaticMode = 0 -DataRepresentation2.SelectionCellLabelColor = [0.0, 1.0, 0.0] -DataRepresentation2.Source.TipRadius = 0.1 -DataRepresentation2.EdgeColor = [0.0, 0.0, 0.5000076295109483] -DataRepresentation2.CubeAxesXAxisTickVisibility = 1 -DataRepresentation2.SelectionCellLabelVisibility = 0 -DataRepresentation2.NonlinearSubdivisionLevel = 1 -DataRepresentation2.CubeAxesColor = [1.0, 1.0, 1.0] -DataRepresentation2.Representation = 'Surface' -DataRepresentation2.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -DataRepresentation2.CubeAxesXAxisMinorTickVisibility = 1 -DataRepresentation2.Orientation = [0.0, 0.0, 0.0] -DataRepresentation2.CubeAxesXTitle = 'X-Axis' -DataRepresentation2.ScalarOpacityUnitDistance = 287.4628538795667 -DataRepresentation2.BackfaceOpacity = 1.0 -DataRepresentation2.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds' -DataRepresentation2.SelectionColor = [1.0, 0.0, 1.0] -DataRepresentation2.Ambient = 0.0 -DataRepresentation2.SelectionPointLabelFontSize = 18 -DataRepresentation2.ScaleFactor = 1.0 -DataRepresentation2.BackfaceAmbientColor = [1.0, 1.0, 1.0] -DataRepresentation2.Source.ShaftRadius = 0.03 -DataRepresentation2.ScalarOpacityFunction = a1_reader_moc_global_PiecewiseFunction -DataRepresentation2.SelectMaskArray = '' -DataRepresentation2.SelectionLineWidth = 2.0 -DataRepresentation2.CubeAxesZAxisMinorTickVisibility = 1 -DataRepresentation2.CubeAxesXAxisVisibility = 1 -DataRepresentation2.Interpolation = 'Gouraud' -DataRepresentation2.SelectMapper = 'Projected tetra' -DataRepresentation2.SelectionCellLabelFontFamily = 'Arial' -DataRepresentation2.SelectionCellLabelItalic = 0 -DataRepresentation2.CubeAxesYAxisMinorTickVisibility = 1 -DataRepresentation2.CubeAxesZGridLines = 0 -DataRepresentation2.ExtractedBlockIndex = 0 -DataRepresentation2.SelectionPointLabelOpacity = 1.0 -DataRepresentation2.Pickable = 1 -DataRepresentation2.CustomBoundsActive = [0, 0, 0] -DataRepresentation2.SelectionRepresentation = 'Wireframe' -DataRepresentation2.SelectionPointLabelBold = 0 -DataRepresentation2.ColorArrayName = 'reader_moc_global' -DataRepresentation2.SelectionPointLabelItalic = 0 -DataRepresentation2.SpecularColor = [1.0, 1.0, 1.0] -DataRepresentation2.LookupTable = a1_reader_moc_global_PVLookupTable -DataRepresentation2.SelectionPointSize = 5.0 -DataRepresentation2.SelectionCellLabelBold = 0 -DataRepresentation2.Orient = 0 - -SetActiveSource(Transform1) -DataRepresentation3 = Show() -DataRepresentation3.CubeAxesZAxisVisibility = 1 -DataRepresentation3.SelectionPointLabelColor = [0.5, 0.5, 0.5] -DataRepresentation3.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds' -DataRepresentation3.SuppressLOD = 0 -DataRepresentation3.CubeAxesXGridLines = 0 -DataRepresentation3.CubeAxesYAxisTickVisibility = 1 -DataRepresentation3.Position = [0.0, 0.0, 0.0] -DataRepresentation3.BackfaceRepresentation = 'Follow Frontface' -DataRepresentation3.SelectionOpacity = 1.0 -DataRepresentation3.SelectionPointLabelShadow = 0 -DataRepresentation3.CubeAxesYGridLines = 0 -DataRepresentation3.OrientationMode = 'Direction' -DataRepresentation3.Source.TipResolution = 6 -DataRepresentation3.ScaleMode = 'No Data Scaling Off' -DataRepresentation3.Diffuse = 1.0 -DataRepresentation3.SelectionUseOutline = 0 -DataRepresentation3.SelectionPointLabelFormat = '' -DataRepresentation3.CubeAxesZTitle = 'Z-Axis' -DataRepresentation3.Specular = 0.1 -DataRepresentation3.SelectionVisibility = 1 -DataRepresentation3.InterpolateScalarsBeforeMapping = 1 -DataRepresentation3.CubeAxesZAxisTickVisibility = 1 -DataRepresentation3.Origin = [0.0, 0.0, 0.0] -DataRepresentation3.CubeAxesVisibility = 0 -DataRepresentation3.Scale = [1.0, 1.0, 1.0] -DataRepresentation3.SelectionCellLabelJustification = 'Left' -DataRepresentation3.DiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation3.SelectionCellLabelOpacity = 1.0 -DataRepresentation3.CubeAxesInertia = 1 -DataRepresentation3.Source = "Arrow" -DataRepresentation3.Source.Invert = 0 -DataRepresentation3.Masking = 0 -DataRepresentation3.Opacity = 1.0 -DataRepresentation3.LineWidth = 1.0 -DataRepresentation3.MeshVisibility = 0 -DataRepresentation3.Visibility = 1 -DataRepresentation3.SelectionCellLabelFontSize = 18 -DataRepresentation3.CubeAxesCornerOffset = 0.0 -DataRepresentation3.SelectionPointLabelJustification = 'Left' -DataRepresentation3.SelectionPointLabelVisibility = 0 -DataRepresentation3.SelectOrientationVectors = '' -DataRepresentation3.CubeAxesTickLocation = 'Inside' -DataRepresentation3.BackfaceDiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation3.CubeAxesYAxisVisibility = 1 -DataRepresentation3.SelectionPointLabelFontFamily = 'Arial' -DataRepresentation3.Source.ShaftResolution = 6 -DataRepresentation3.CubeAxesFlyMode = 'Closest Triad' -DataRepresentation3.SelectScaleArray = '' -DataRepresentation3.CubeAxesYTitle = 'Y-Axis' -DataRepresentation3.ColorAttributeType = 'POINT_DATA' -DataRepresentation3.SpecularPower = 100.0 -DataRepresentation3.Texture = [] -DataRepresentation3.SelectionCellLabelShadow = 0 -DataRepresentation3.AmbientColor = [1.0, 1.0, 1.0] -DataRepresentation3.MapScalars = 1 -DataRepresentation3.PointSize = 2.0 -DataRepresentation3.Source.TipLength = 0.35 -DataRepresentation3.SelectionCellLabelFormat = '' -DataRepresentation3.Scaling = 0 -DataRepresentation3.StaticMode = 0 -DataRepresentation3.SelectionCellLabelColor = [0.0, 1.0, 0.0] -DataRepresentation3.Source.TipRadius = 0.1 -DataRepresentation3.EdgeColor = [0.0, 0.0, 0.5000076295109483] -DataRepresentation3.CubeAxesXAxisTickVisibility = 1 -DataRepresentation3.SelectionCellLabelVisibility = 0 -DataRepresentation3.NonlinearSubdivisionLevel = 1 -DataRepresentation3.CubeAxesColor = [1.0, 1.0, 1.0] -DataRepresentation3.Representation = 'Surface' -DataRepresentation3.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -DataRepresentation3.CubeAxesXAxisMinorTickVisibility = 1 -DataRepresentation3.Orientation = [0.0, 0.0, 0.0] -DataRepresentation3.CubeAxesXTitle = 'X-Axis' -DataRepresentation3.ScalarOpacityUnitDistance = 388.2163580108114 -DataRepresentation3.BackfaceOpacity = 1.0 -DataRepresentation3.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds' -DataRepresentation3.SelectionColor = [1.0, 0.0, 1.0] -DataRepresentation3.Ambient = 0.0 -DataRepresentation3.SelectionPointLabelFontSize = 18 -DataRepresentation3.ScaleFactor = 1.0 -DataRepresentation3.BackfaceAmbientColor = [1.0, 1.0, 1.0] -DataRepresentation3.Source.ShaftRadius = 0.03 -DataRepresentation3.ScalarOpacityFunction = a1_reader_moc_global_PiecewiseFunction -DataRepresentation3.SelectMaskArray = '' -DataRepresentation3.SelectionLineWidth = 2.0 -DataRepresentation3.CubeAxesZAxisMinorTickVisibility = 1 -DataRepresentation3.CubeAxesXAxisVisibility = 1 -DataRepresentation3.Interpolation = 'Gouraud' -DataRepresentation3.SelectMapper = 'Projected tetra' -DataRepresentation3.SelectionCellLabelFontFamily = 'Arial' -DataRepresentation3.SelectionCellLabelItalic = 0 -DataRepresentation3.CubeAxesYAxisMinorTickVisibility = 1 -DataRepresentation3.CubeAxesZGridLines = 0 -DataRepresentation3.ExtractedBlockIndex = 0 -DataRepresentation3.SelectionPointLabelOpacity = 1.0 -DataRepresentation3.Pickable = 1 -DataRepresentation3.CustomBoundsActive = [0, 0, 0] -DataRepresentation3.SelectionRepresentation = 'Wireframe' -DataRepresentation3.SelectionPointLabelBold = 0 -DataRepresentation3.ColorArrayName = 'reader_moc_global' -DataRepresentation3.SelectionPointLabelItalic = 0 -DataRepresentation3.SpecularColor = [1.0, 1.0, 1.0] -DataRepresentation3.LookupTable = a1_reader_moc_global_PVLookupTable -DataRepresentation3.SelectionPointSize = 5.0 -DataRepresentation3.SelectionCellLabelBold = 0 -DataRepresentation3.Orient = 0 - - - -IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views) - - -print 'ending' diff --git a/uvcdatspt/scripts/MOCTemporalStatistics.py b/uvcdatspt/scripts/MOCTemporalStatistics.py deleted file mode 100644 index c6d51900bc..0000000000 --- a/uvcdatspt/scripts/MOCTemporalStatistics.py +++ /dev/null @@ -1,26 +0,0 @@ -print 'starting' -import sys -from paraview.simple import * - -if len(sys.argv) < 3: - print 'Usage: pvbatch MOCTemporalStatistics.py ' - sys.exit(1) - -paraview.simple._DisableFirstRenderCameraReset() -reader = MOCFileSeriesReader() -print 'input file names are: ', sys.argv[2:len(sys.argv)] -print 'output file name is: ', sys.argv[1] -reader.FileName = sys.argv[2:len(sys.argv)] - -MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics() -MultiBlockTemporalStatistics1.TimeStepType = 0 -MultiBlockTemporalStatistics1.SamplingMethod = 1 -MultiBlockTemporalStatistics1.TimeSpan = 0 -MultiBlockTemporalStatistics1.TimeStepLength = 1 -MultiBlockTemporalStatistics1.TimeCompartmentSize = 16 - -writer = XMLMultiBlockDataWriter() -writer.FileName = sys.argv[1] -writer.UpdatePipeline() - -print 'ending' diff --git a/uvcdatspt/scripts/MWehnerTemporalStatistics.py b/uvcdatspt/scripts/MWehnerTemporalStatistics.py deleted file mode 100644 index d9f2f4c1af..0000000000 --- a/uvcdatspt/scripts/MWehnerTemporalStatistics.py +++ /dev/null @@ -1,47 +0,0 @@ -# Script for computing temporal statistics (average, minimum, maximum -# and standard deviation) on hopper.nersc.gov. The input is a single -# file that contains multipe time steps. The time compartment size is -# a command line argument. - -import sys -import time -start = time.time() - -try: paraview.simple -except: from paraview.simple import * -paraview.simple._DisableFirstRenderCameraReset() - -import libvtkParallelPython -import paraview -pm = paraview.servermanager.vtkProcessModule.GetProcessModule() -globalController = pm.GetGlobalController() -pid = globalController.GetLocalProcessId() - -tcsize = sys.argv[1] - -fileName = "statsmwhenertwod.vtm" - -if pid == 0: - print 'starting script with tcsize of ', tcsize, ' and output filename using ', fileName - -V_cam5_1_amip_run2_cam2_h0_1994_nc = NetCDFReader( FileName=['/global/project/projectdirs/m1517/ACE/cam5.1/control/0.25_degre -e/monthly/run2/zg_Amon_CAM5.1_0.25degree_control_v1.0_run2_197901-200512.nc'] ) - -V_cam5_1_amip_run2_cam2_h0_1994_nc.Dimensions = '(plev, lat, lon)' -V_cam5_1_amip_run2_cam2_h0_1994_nc.SphericalCoordinates = 0 - -MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics() -MultiBlockTemporalStatistics1.TimeStepType = 'Months' -#MultiBlockTemporalStatistics1.SamplingMethod = 'Consecutive' -MultiBlockTemporalStatistics1.SamplingMethod = 'Climatology' -#MultiBlockTemporalStatistics1.TimeSpan = 'Year' -MultiBlockTemporalStatistics1.TimeSpan = 'AllTimeSteps' -MultiBlockTemporalStatistics1.TimeCompartmentSize = int(tcsize) - -writer = XMLMultiBlockDataWriter() -writer.FileName = fileName - -writer.UpdatePipeline() -if pid == 0: - print 'finished run in ', time.time()-start - diff --git a/uvcdatspt/scripts/POPGenerateImages.py b/uvcdatspt/scripts/POPGenerateImages.py deleted file mode 100644 index 86f61e47fd..0000000000 --- a/uvcdatspt/scripts/POPGenerateImages.py +++ /dev/null @@ -1,310 +0,0 @@ -# Spatio-temporal script for generating images for POP NetCDF -# output files. This one pseudo-colors by TEMP. It has a -# time compartment size of 4 so the number of processes -# also needs to be a multiple of 4. To run it, do: -# mpirun -np ./pvbatch --symmetric POPGenerateImages.py - -try: paraview.simple -except: from paraview.simple import * - -import sys -import os -import paraview - -# trying to import the library where I can specify the global and subcontrollers -try: - import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set -except ImportError: - import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH - -paraview.options.batch = True # this may not be necessary -paraview.simple._DisableFirstRenderCameraReset() - -def CreateTimeCompartments(globalController, timeCompartmentSize): - if globalController.GetNumberOfProcesses() == 1: - print 'single process' - return - elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0: - print 'number of processes must be an integer multiple of time compartment size' - return - elif timeCompartmentSize == globalController.GetNumberOfProcesses(): - return globalController - - gid = globalController.GetLocalProcessId() - timeCompartmentGroupId = int (gid / timeCompartmentSize ) - newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize) - # must unregister if the reference count is greater than 1 - if newController.GetReferenceCount() > 1: - newController.UnRegister(None) - - #print gid, timeCompartmentGroupId, gid % timeCompartmentSize - print gid, ' of global comm is ', newController.GetLocalProcessId() - globalController.SetGlobalController(newController) - return newController - -def CheckReader(reader): - if hasattr(reader, "FileName") == False: - print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName() - sys.exit(-1) - - if hasattr(reader, "TimestepValues") == False: - print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information" - sys.exit(-1) - -def CreateControllers(timeCompartmentSize): - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - globalController = pm.GetGlobalController() - if timeCompartmentSize > globalController.GetNumberOfProcesses(): - timeCompartmentSize = globalController.GetNumberOfProcesses() - - temporalController = CreateTimeCompartments(globalController, timeCompartmentSize) - return globalController, temporalController, timeCompartmentSize - -currentTimeStep = -1 -def UpdateCurrentTimeStep(globalController, timeCompartmentSize): - global currentTimeStep - if currentTimeStep == -1: - currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize - return currentTimeStep - - numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize - currentTimeStep = currentTimeStep + numTimeStepsPerIteration - return currentTimeStep - -def WriteImages(currentTimeStep, currentTime, views): - for view in views: - filename = view.tpFileName.replace("%t", str(currentTimeStep)) - view.ViewTime = currentTime - WriteImage(filename, view, Magnification=view.tpMagnification) - -def WriteFiles(currentTimeStep, currentTime, writers): - for writer in writers: - originalfilename = writer.FileName - fname = originalfilename.replace("%t", str(currentTimeStep)) - writer.FileName = fname - writer.UpdatePipeline(currentTime) - writer.FileName = originalfilename - -def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views): - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - while currentTimeStep < len(timeSteps): - print globalController.GetLocalProcessId(), " is working on ", currentTimeStep - WriteImages(currentTimeStep, timeSteps[currentTimeStep], views) - WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers) - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - -def CreateReader(ctor, args, fileInfo): - "Creates a reader, checks if it can be used, and sets the filenames" - reader = ctor() - CheckReader(reader) - import glob - files = glob.glob(fileInfo) - files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering - reader.FileName = files - for a in args: - s = "reader."+a - exec (s) - - return reader - -def CreateWriter(ctor, filename, tp_writers): - writer = ctor() - writer.FileName = filename - tp_writers.append(writer) - return writer - -def CreateView(proxy_ctor, filename, magnification, width, height, tp_views): - view = proxy_ctor() - view.add_attribute("tpFileName", filename) - view.add_attribute("tpMagnification", magnification) - tp_views.append(view) - view.ViewSize = [width, height] - return view - -tp_writers = [] -tp_views = [] -# ==================== end of specialized temporal parallelism sections ================== - -timeCompartmentSize = 4 -globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize) - -RenderView1 = CreateView( CreateRenderView, "POP_TEMP_%t.png", 1, 549, 583, tp_views ) -RenderView1.LightSpecularColor = [1.0, 1.0, 1.0] -RenderView1.InteractionMode = '3D' -RenderView1.UseTexturedBackground = 0 -RenderView1.UseLight = 1 -RenderView1.CameraPosition = [24413625.828416377, -24592716.541236263, 5758186.884780747] -RenderView1.FillLightKFRatio = 3.0 -RenderView1.Background2 = [0.0, 0.0, 0.165] -RenderView1.FillLightAzimuth = -10.0 -RenderView1.LODResolution = 50.0 -RenderView1.BackgroundTexture = [] -RenderView1.KeyLightAzimuth = 10.0 -RenderView1.StencilCapable = 1 -RenderView1.LightIntensity = 1.0 -RenderView1.CameraFocalPoint = [1.78529588937719e-12, 1.4505529101189668e-12, 64147.750000000015] -RenderView1.ImageReductionFactor = 2 -RenderView1.CameraViewAngle = 30.0 -RenderView1.CameraParallelScale = 30343845.664423227 -RenderView1.EyeAngle = 2.0 -RenderView1.HeadLightKHRatio = 3.0 -RenderView1.StereoRender = 0 -RenderView1.KeyLightIntensity = 0.75 -RenderView1.BackLightAzimuth = 110.0 -RenderView1.OrientationAxesInteractivity = 0 -RenderView1.UseInteractiveRenderingForSceenshots = 0 -RenderView1.UseOffscreenRendering = 0 -RenderView1.Background = [0.31999694819562063, 0.3400015259021897, 0.4299992370489052] -RenderView1.UseOffscreenRenderingForScreenshots = 1 -RenderView1.NonInteractiveRenderDelay = 2 -RenderView1.CenterOfRotation = [0.0, 0.0, 64147.75] -RenderView1.CameraParallelProjection = 0 -RenderView1.CompressorConfig = 'vtkSquirtCompressor 0 3' -RenderView1.HeadLightWarmth = 0.5 -RenderView1.MaximumNumberOfPeels = 4 -RenderView1.LightDiffuseColor = [1.0, 1.0, 1.0] -RenderView1.StereoType = 'Red-Blue' -RenderView1.DepthPeeling = 1 -RenderView1.BackLightKBRatio = 3.5 -RenderView1.StereoCapableWindow = 1 -RenderView1.CameraViewUp = [0.0471859955443886, 0.2695389330828218, 0.9618327533293193] -RenderView1.LightType = 'HeadLight' -RenderView1.LightAmbientColor = [1.0, 1.0, 1.0] -RenderView1.RemoteRenderThreshold = 3.0 -RenderView1.KeyLightElevation = 50.0 -RenderView1.CenterAxesVisibility = 0 -RenderView1.MaintainLuminance = 0 -RenderView1.StillRenderImageReductionFactor = 1 -RenderView1.BackLightWarmth = 0.5 -RenderView1.FillLightElevation = -75.0 -RenderView1.MultiSamples = 0 -RenderView1.FillLightWarmth = 0.4 -RenderView1.AlphaBitPlanes = 1 -RenderView1.LightSwitch = 0 -RenderView1.OrientationAxesVisibility = 0 -RenderView1.CameraClippingRange = [15039199.876017962, 60476974.08593859] -RenderView1.BackLightElevation = 0.0 -RenderView1.ViewTime = 0.0 -RenderView1.OrientationAxesOutlineColor = [1.0, 1.0, 1.0] -RenderView1.LODThreshold = 5.0 -RenderView1.CollectGeometryThreshold = 100.0 -RenderView1.UseGradientBackground = 0 -RenderView1.KeyLightWarmth = 0.6 -RenderView1.OrientationAxesLabelColor = [1.0, 1.0, 1.0] - -TEMP_t_t0_1_42l_oilspill12c_00060101_pop_nc = CreateReader( UnstructuredNetCDFPOPreader, ['Stride=[10, 10, 10]', 'VerticalVelocity=0', 'VOI=[0, -1, 0, -1, 0, -1]'], "/home/acbauer/DATA/UVCDAT/TEMP.t.t0.1_42l_oilspill12c.*.pop.nc" ) -timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0] -a1_TEMP_PiecewiseFunction = CreatePiecewiseFunction( Points=[0.0, 0.0, 1.0, 1.0] ) - -a1_TEMP_PVLookupTable = GetLookupTableForArray( "TEMP", 1, Discretize=1, RGBPoints=[-20.0, 0.23, 0.299, 0.754, 31.338409423828125, 0.706, 0.016, 0.15], UseLogScale=0, VectorComponent=0, NanColor=[0.25, 0.0, 0.0], NumberOfTableValues=256, ColorSpace='Diverging', VectorMode='Magnitude', HSVWrap=0, ScalarRangeInitialized=1.0, LockScalarRange=0 ) - -DataRepresentation1 = Show() -DataRepresentation1.CubeAxesZAxisVisibility = 1 -DataRepresentation1.SelectionPointLabelColor = [0.5, 0.5, 0.5] -DataRepresentation1.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds' -DataRepresentation1.SuppressLOD = 0 -DataRepresentation1.CubeAxesXGridLines = 0 -DataRepresentation1.CubeAxesYAxisTickVisibility = 1 -DataRepresentation1.Position = [0.0, 0.0, 0.0] -DataRepresentation1.BackfaceRepresentation = 'Follow Frontface' -DataRepresentation1.SelectionOpacity = 1.0 -DataRepresentation1.SelectionPointLabelShadow = 0 -DataRepresentation1.CubeAxesYGridLines = 0 -DataRepresentation1.OrientationMode = 'Direction' -DataRepresentation1.Source.TipResolution = 6 -DataRepresentation1.ScaleMode = 'No Data Scaling Off' -DataRepresentation1.Diffuse = 1.0 -DataRepresentation1.SelectionUseOutline = 0 -DataRepresentation1.SelectionPointLabelFormat = '' -DataRepresentation1.CubeAxesZTitle = 'Z-Axis' -DataRepresentation1.Specular = 0.1 -DataRepresentation1.SelectionVisibility = 1 -DataRepresentation1.InterpolateScalarsBeforeMapping = 1 -DataRepresentation1.CubeAxesZAxisTickVisibility = 1 -DataRepresentation1.Origin = [0.0, 0.0, 0.0] -DataRepresentation1.CubeAxesVisibility = 0 -DataRepresentation1.Scale = [1.0, 1.0, 1.0] -DataRepresentation1.SelectionCellLabelJustification = 'Left' -DataRepresentation1.DiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation1.SelectionCellLabelOpacity = 1.0 -DataRepresentation1.CubeAxesInertia = 1 -DataRepresentation1.Source = "Arrow" -DataRepresentation1.Source.Invert = 0 -DataRepresentation1.Masking = 0 -DataRepresentation1.Opacity = 1.0 -DataRepresentation1.LineWidth = 1.0 -DataRepresentation1.MeshVisibility = 0 -DataRepresentation1.Visibility = 1 -DataRepresentation1.SelectionCellLabelFontSize = 18 -DataRepresentation1.CubeAxesCornerOffset = 0.0 -DataRepresentation1.SelectionPointLabelJustification = 'Left' -DataRepresentation1.SelectionPointLabelVisibility = 0 -DataRepresentation1.SelectOrientationVectors = '' -DataRepresentation1.CubeAxesTickLocation = 'Inside' -DataRepresentation1.BackfaceDiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation1.CubeAxesYAxisVisibility = 1 -DataRepresentation1.SelectionPointLabelFontFamily = 'Arial' -DataRepresentation1.Source.ShaftResolution = 6 -DataRepresentation1.CubeAxesFlyMode = 'Closest Triad' -DataRepresentation1.SelectScaleArray = '' -DataRepresentation1.CubeAxesYTitle = 'Y-Axis' -DataRepresentation1.ColorAttributeType = 'POINT_DATA' -DataRepresentation1.SpecularPower = 100.0 -DataRepresentation1.Texture = [] -DataRepresentation1.SelectionCellLabelShadow = 0 -DataRepresentation1.AmbientColor = [1.0, 1.0, 1.0] -DataRepresentation1.MapScalars = 1 -DataRepresentation1.PointSize = 2.0 -DataRepresentation1.Source.TipLength = 0.35 -DataRepresentation1.SelectionCellLabelFormat = '' -DataRepresentation1.Scaling = 0 -DataRepresentation1.StaticMode = 0 -DataRepresentation1.SelectionCellLabelColor = [0.0, 1.0, 0.0] -DataRepresentation1.Source.TipRadius = 0.1 -DataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483] -DataRepresentation1.CubeAxesXAxisTickVisibility = 1 -DataRepresentation1.SelectionCellLabelVisibility = 0 -DataRepresentation1.NonlinearSubdivisionLevel = 1 -DataRepresentation1.CubeAxesColor = [1.0, 1.0, 1.0] -DataRepresentation1.Representation = 'Surface' -DataRepresentation1.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -DataRepresentation1.CubeAxesXAxisMinorTickVisibility = 1 -DataRepresentation1.Orientation = [0.0, 0.0, 0.0] -DataRepresentation1.CubeAxesXTitle = 'X-Axis' -DataRepresentation1.ScalarOpacityUnitDistance = 313870.26193506655 -DataRepresentation1.BackfaceOpacity = 1.0 -DataRepresentation1.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds' -DataRepresentation1.SelectionColor = [1.0, 0.0, 1.0] -DataRepresentation1.Ambient = 0.0 -DataRepresentation1.SelectionPointLabelFontSize = 18 -DataRepresentation1.ScaleFactor = 1.0 -DataRepresentation1.BackfaceAmbientColor = [1.0, 1.0, 1.0] -DataRepresentation1.Source.ShaftRadius = 0.03 -DataRepresentation1.ScalarOpacityFunction = a1_TEMP_PiecewiseFunction -DataRepresentation1.SelectMaskArray = '' -DataRepresentation1.SelectionLineWidth = 2.0 -DataRepresentation1.CubeAxesZAxisMinorTickVisibility = 1 -DataRepresentation1.CubeAxesXAxisVisibility = 1 -DataRepresentation1.Interpolation = 'Gouraud' -DataRepresentation1.SelectMapper = 'Projected tetra' -DataRepresentation1.SelectionCellLabelFontFamily = 'Arial' -DataRepresentation1.SelectionCellLabelItalic = 0 -DataRepresentation1.CubeAxesYAxisMinorTickVisibility = 1 -DataRepresentation1.CubeAxesZGridLines = 0 -DataRepresentation1.ExtractedBlockIndex = 0 -DataRepresentation1.SelectionPointLabelOpacity = 1.0 -DataRepresentation1.Pickable = 1 -DataRepresentation1.CustomBoundsActive = [0, 0, 0] -DataRepresentation1.SelectionRepresentation = 'Wireframe' -DataRepresentation1.SelectionPointLabelBold = 0 -DataRepresentation1.ColorArrayName = 'TEMP' -DataRepresentation1.SelectionPointLabelItalic = 0 -DataRepresentation1.SpecularColor = [1.0, 1.0, 1.0] -DataRepresentation1.LookupTable = a1_TEMP_PVLookupTable -DataRepresentation1.SelectionPointSize = 5.0 -DataRepresentation1.SelectionCellLabelBold = 0 -DataRepresentation1.Orient = 0 - - - -IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views) diff --git a/uvcdatspt/scripts/benchmark.py b/uvcdatspt/scripts/benchmark.py deleted file mode 100644 index dca7f2ab87..0000000000 --- a/uvcdatspt/scripts/benchmark.py +++ /dev/null @@ -1,626 +0,0 @@ -""" -This module has utilities to benchmark paraview. - -First, when run standalone, this will do a simple rendering benchmark test. The -test renders a sphere with various rendering settings and reports the rendering -rate achieved in triangles/sec. run() is the entrypoint for that usage. - -Second, you can set up arbitrary pipelines and this module helps you obtain, -interpret and report the information recorded by ParaView's logs. -Do that like so: -1) optionally, call maximize logs first -2) setup and run your visualization pipeline (via GUI or script as you prefer) -3) either -- call print_logs() to print out the logs in raw format -or -- call parse_logs() to let the script identify and report on per frame and per -filter execution times - -WARNING: This was meant for server side rendering, but it could work -reasonably well when geometry is delivered to the client and rendered there -if the script were changed to recognize MPIMoveData as end of frame and did -something sensible on the server which has no other end of frame knowledge - -TODO: builtin mode shouldn't show server info, it is redundant -TODO: this doesn't handle split render/data server mode -TODO: the end of frame markers are heuristic, likely buggy, and have not -been tried since before 3.9's view restructuring -""" - -import time -import sys -from paraview.simple import * - -try: - import numpy - numpy_loaded = True -except ImportError: - numpy_loaded = False - -import re -import paraview -import copy -import pickle - -# a regular expression to parse filter execution time -match_filter = re.compile(" *Execute (\w+) id: +(\d+), +(\d*.*\d+) +seconds") -match_vfilter = re.compile(" *Execute (\w+) +, +(\d*.*\d+) +seconds") - -# a regular expression to parse overall rendering time -match_still_render = re.compile(" *(Still) Render, +(\d*.*\d+) +seconds") -match_interactive_render = \ -re.compile(" *(Interactive) Render, +(\d*.*\d+) +seconds") -match_render = re.compile(" *(\w+|\w+ Dev) Render, +(\d*.*\d+) +seconds") -match_icetrender = re.compile("(IceT Dev) Render, +(\d*.*\d+) +seconds") - -# more for parallel composite and delivery time -match_composite = re.compile(" *Compositing, +(\d*.*\d+) +seconds") -match_send = re.compile(" *Sending, +(\d*.*\d+) +seconds") -match_receive = re.compile(" *Receiving, +(\d*.*\d+) +seconds") - -match_comp_xmit = \ -re.compile(" *TreeComp (Send|Receive) (\d+) " + \ - "(to|from) (\d+) uchar (\d+), +(\d*.*\d+) +seconds") -match_comp_comp = re.compile(" *TreeComp composite, *(\d*.*\d+) +seconds") - -showparse = False - -#icet composite message comes after the render messages, -#where for bswap and manta it comes before so we have to treat icet differently -icetquirk = False - -start_frame = 0 -default_log_threshold = dict() -default_buffer_length = dict() - -class OneLog : - def __init__(self): - self.runmode = 'batch' - self.servertype = 'unified' - self.component = 0x10 - self.rank = 0 - self.lines = [] - - def componentString(self): - ret = "" - if self.component & 0x10: - ret = ret + " CLIENT " - if self.component & 0x4: - ret = ret + " RENDER " - if self.component & 0x1: - ret = ret + " DATA " - return ret - - def print_log(self, showlines=False): - print "#RunMode:", self.runmode, - print "ServerType:", self.servertype, - print "Component:", self.componentString(), - print "processor#:", self.rank - if showlines: - for i in self.lines: - print i - -logs = [] - -def maximize_logs () : - """ - Convenience method to ask paraview to produce logs with lots of space and - highest resolution. - """ - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - if pm == None: - return - - # Not used here... - default_buffer_length[str(0x01)] = 1000000 - default_buffer_length[str(0x04)] = 1000000 - default_buffer_length[str(0x10)] = 1000000 - - default_log_threshold[str(0x01)] = 0.0 - default_log_threshold[str(0x04)] = 0.0 - default_log_threshold[str(0x10)] = 0.0 - - -def dump_logs( filename ) : - """ - This saves off the logs we've gathered. - Ot allows you to run a benchmark somewhere, save off all of the details in - raw format, then load them somewhere else. You can then do a detailed - analysis and you always have the raw data to go back to. - """ - global logs - f = open(filename, "w") - pickle.dump(logs, f) - f.close() - -def import_logs( filename ) : - """ - This is for bringing in a saved log files and parse it after the fact. - TODO: add an option to load in raw parview logs in text format - """ - global logs - logs = [] - f = open(filename, "r") - logs = pickle.load(f) - f.close() - -def get_logs() : - """ - This is for bringing in logs at run time to parse while running. - """ - global logs - logs = [] - - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - if pm == None: - return - - connectionId = paraview.servermanager.ActiveConnection.ID - session = paraview.servermanager.ActiveConnection.Session - pmOptions = pm.GetOptions() - - """ - vtkPVOptions::ProcessTypeEnum - PARAVIEW = 0x2, - PVCLIENT = 0x4, - PVSERVER = 0x8, - PVRENDER_SERVER = 0x10, - PVDATA_SERVER = 0x20, - PVBATCH = 0x40, - """ - if pmOptions.GetProcessType() == 0x40: - runmode = 'batch' - else: - runmode = 'interactive' - - """ - vtkSMSession::RenderingMode - RENDERING_NOT_AVAILABLE = 0x00, - RENDERING_UNIFIED = 0x01, - RENDERING_SPLIT = 0x02 - """ - if session.GetRenderClientMode() == 0x01: - servertype = 'unified' - else: - servertype = 'split' - - """ - vtkProcessModule::SERVER_FLAGS - DATA_SERVER = 0x01, - DATA_SERVER_ROOT = 0x02, - RENDER_SERVER = 0x04, - RENDER_SERVER_ROOT = 0x08, - SERVERS = DATA_SERVER | RENDER_SERVER, - CLIENT = 0x10, - CLIENT_AND_SERVERS = DATA_SERVER | CLIENT | RENDER_SERVER - """ - if runmode == 'batch': - components = [0x04] - else: - if servertype == 'unified': - components = [0x10, 0x04] - else: - components = [0x10, 0x04, 0x01] - - for component in components: - timerInfo = paraview.servermanager.vtkPVTimerInformation() - if len(default_log_threshold) != 0: - timerInfo.SetLogThreshold(default_log_threshold[str(component)]) - session.GatherInformation(component, timerInfo, 0) - - for i in range(timerInfo.GetNumberOfLogs()): - alog = OneLog() - alog.runmode = runmode - alog.servertype = servertype - alog.component = component - alog.rank = i - for line in timerInfo.GetLog(i).split('\n'): - alog.lines.append(line) - logs.append(alog) - -def print_logs() : - global logs - - if len(logs) == 0: - get_logs() - - for i in logs: - i.print_log(True) - -def __process_frame() : - global filters - global current_frames_records - global frames - global start_frame - - max = len(current_frames_records) - - #determine ancestry of each record from order and indent - #subtract only immediate children from each record - - #TODO: Make this an option - for x in xrange(max): - indent = current_frames_records[x]['indent'] - minindent = 10000 - for y in xrange(x+1,max): - indent2 = current_frames_records[y]['indent'] - if indent2<=indent: - #found a record which is not a descendant - break - if indent2 < minindent: - minindent = indent2 - for y in xrange(x+1,max): - indent2 = current_frames_records[y]['indent'] - if indent2 == minindent: - current_frames_records[x]['local_duration'] = \ - current_frames_records[x]['local_duration'] -\ - current_frames_records[y]['duration'] - - for x in xrange(max): - #keep global statics per filter - record = current_frames_records[x] - id = record['id'] - if id in filters: - srecord = filters[id] - srecord['duration'] = srecord['duration'] + record['duration'] - srecord['local_duration'] = srecord['local_duration'] +\ - record['local_duration'] - srecord['count'] = srecord['count'] + 1 - filters[id] = srecord - else: - filters[id] = copy.deepcopy(record) - - #save off this frame and begin the next - frames.append(current_frames_records) - current_frames_records = [] - -def __parse_line (line) : - """ - Examine one line from the logs. If it is a report about a filter's - execution time, parse the relevant information out of the line and - collect those statistics. We record each filter's average execution - time as well as the each filters contribution to the each rendered frame. - """ - global filters - global current_frames_records - global cnt - global show_input - global icetquirk - - found = False - - #find indent - cnt = 0 - for c in range(len(line)): - if line[c] == " ": - cnt = cnt + 1 - else: - break - - #determine if this log comes from icet so we can - #do special case treatement for frame markings - icetline = False - match = match_icetrender.match(line) - if match != None: - icetquirk = True - icetline = True - - match = match_filter.match(line) - if match != None: - found = True - if showparse: - print "FILT:", cnt, line - name = match.group(1) - id = match.group(2) - duration = match.group(3) - - match = match_vfilter.match(line) - if match != None: - found = True - if showparse: - print "LFLT:", cnt, line - name = match.group(1) - id = name - duration = match.group(2) - - match = match_comp_comp.match(line) - if match != None: - found = True - if showparse: - print "TCMP:", cnt, line - name = "tree comp" - id = name - duration = match.group(1) - - match = match_comp_xmit.match(line) - if match != None: - found = True - if showparse: - print "TXMT:", cnt, line - name = match.group(1) - id = name - duration = match.group(6) - - match = match_composite.match(line) - if match != None: - found = True - if showparse: - print "COMP:", cnt, line - name = 'composite' - id = 'comp' - duration = match.group(1) - - match = match_send.match(line) - if match != None: - found = True - if showparse: - print "SEND:", cnt, line - name = 'send' - id = 'send' - duration = match.group(1) - - match = match_receive.match(line) - if match != None: - found = True - if showparse: - print "RECV:", cnt, line - name = 'receive' - id = 'recv' - duration = match.group(1) - - match = match_still_render.match(line) - if match != None: - found = True - if showparse: - print "STILL:", cnt, line - name = match.group(1) - id = 'still' - duration = match.group(2) - - if match == None: - match = match_interactive_render.match(line) - if match != None: - found = True - if showparse: - print "INTER:", cnt, line - name = match.group(1) - id = 'inter' - duration = match.group(2) - - if match == None: - match = match_render.match(line) - if match != None: - found = True - if showparse: - print "REND:", cnt, line - name = match.group(1) - id = 'render' - duration = match.group(2) - - if found == False: - # we didn't find anything we recognized in this line, ignore it - if showparse: - print "????:", cnt, line - return - - record = dict() - record['id'] = id - record['name'] = name - record['duration'] = float(duration) - record['local_duration'] = float(duration) - record['count'] = 1 - record['indent'] = cnt - - #watch for the beginning of the next frame/end of previous frame - if cnt == 0: - if (id == 'still') or \ - (id == 'inter') or \ - (icetquirk == False and id == 'comp') or \ - (icetquirk == True and icetline == True) : - if showparse: - print "SOF" #start of frame - #decipher parent child information from records in the frame - #and save off newly gathered per filter and per frame statistics - __process_frame() - - #keep a record of this execution as part for the current frame - current_frames_records.append(record) - - return - -def parse_logs(show_parse = False, tabular = False) : - """ - Parse the collected paraview log information. - This prints out per frame, and aggregated per filter statistics. - - If show_parse is true, debugging information is shown about the parsing - process that allows you to verify that the derived stats are correct. - This includes each and echo of each log line collected, prepended by - the token type and indent scanned in, or ???? if the line is unrecognized - and ignored. Frame boundaries are denoted by SOF, indicating the preceeding - line was determined to be the start of the next frame. - """ - - global filters - global current_frames_records - global frames - global cnt - global showparse - global start_frame - - showparse = show_parse - - if len(logs) == 0: - get_logs() - - for i in logs: - # per filter records - filters = dict() - filters.clear() - # per frame records - frames = [] - # components of current frame - current_frames_records = [] - cnt = 0 - - runmode = i.runmode - servertype = i.servertype - component = i.component - rank = i.rank - i.print_log(False) - - for line in i.lines: - __parse_line(line) - - #collect stats for the current frame in process but not officially ended - __process_frame() - - #print out the gathered per frame information - if tabular: - frecs = dict() - line = "#framenum, " - for x in filters: - line += filters[x]['name'] + ":" + filters[x]['id'] + ", " - #print line - for cnt in xrange(start_frame, len(frames)): - line = "" - line += str(cnt) + ", " - printed = dict() - for x in filters: - id = filters[x]['id'] - name = filters[x]['name'] - found = False - for record in frames[cnt]: - if 'id' in record: - if record['id'] == id and \ - record['name'] == name and \ - not id in printed: - found = True - printed[id] = 1 - line += str(record['local_duration']) + ", " - if not id in frecs: - frecs[id] = [] - frecs[id].append(record['local_duration']) - if not found: - line += "0, " - #print line - #print - for x in frecs.keys(): - v = frecs[x] - print "# ", x, len(v), - if numpy_loaded: - print numpy.min(v), numpy.mean(v), numpy.max(v), - print numpy.std(v) - else: - print "#FRAME TIMINGS" - print "#filter id, filter type, inclusive duration, local duration" - for cnt in xrange(start_frame, len(frames)): - print "#Frame ", cnt - for record in frames[cnt]: - if 'id' in record: - print record['id'], ",", - print record['name'], ",", - print record['duration'], ",", - print record['local_duration'] - #print - #print - - if not tabular: - #print out the gathered per filter information - print "#FILTER TIMINGS" - print "#filter id, filter type, count, "+\ - "sum inclusive duration, sum local duration" - for x in filters: - record = filters[x] - print record['id'], ",", - print record['name'], ",", - print record['count'], ",", - print record['duration'], ",", - print record['local_duration'] - print - -def __render(ss, v, title, nframes): - print '============================================================' - print title - res = [] - res.append(title) - for phires in (500, 1000): - ss.PhiResolution = phires - c = v.GetActiveCamera() - v.CameraPosition = [-3, 0, 0] - v.CameraFocalPoint = [0, 0, 0] - v.CameraViewUp = [0, 0, 1] - Render() - c1 = time.time() - for i in range(nframes): - c.Elevation(0.5) - Render() - if not servermanager.fromGUI: - sys.stdout.write(".") - sys.stdout.flush() - if not servermanager.fromGUI: - sys.stdout.write("\n") - tpr = (time.time() - c1)/nframes - ncells = ss.GetDataInformation().GetNumberOfCells() - print tpr, " secs/frame" - print ncells, " polys" - print ncells/tpr, " polys/sec" - - res.append((ncells, ncells/tpr)) - return res - -def run(filename=None, nframes=60): - """ Runs the benchmark. If a filename is specified, it will write the - results to that file as csv. The number of frames controls how many times - a particular configuration is rendered. Higher numbers lead to more accurate - averages. """ - # Turn off progress printing - paraview.servermanager.SetProgressPrintingEnabled(0) - - # Create a sphere source to use in the benchmarks - ss = Sphere(ThetaResolution=1000, PhiResolution=500) - rep = Show() - v = Render() - results = [] - - # Start with these defaults - #v.RemoteRenderThreshold = 0 - obj = servermanager.misc.GlobalMapperProperties() - obj.GlobalImmediateModeRendering = 0 - - # Test different configurations - title = 'display lists, no triangle strips, solid color' - obj.GlobalImmediateModeRendering = 0 - results.append(__render(ss, v, title, nframes)) - - title = 'no display lists, no triangle strips, solid color' - obj.GlobalImmediateModeRendering = 1 - results.append(__render(ss, v, title, nframes)) - - # Color by normals - lt = servermanager.rendering.PVLookupTable() - rep.LookupTable = lt - rep.ColorAttributeType = 0 # point data - rep.ColorArrayName = "Normals" - lt.RGBPoints = [-1, 0, 0, 1, 0.0288, 1, 0, 0] - lt.ColorSpace = 'HSV' - lt.VectorComponent = 0 - - title = 'display lists, no triangle strips, color by array' - obj.GlobalImmediateModeRendering = 0 - results.append(__render(ss, v, title, nframes)) - - title = 'no display lists, no triangle strips, color by array' - obj.GlobalImmediateModeRendering = 1 - results.append(__render(ss, v, title, nframes)) - - if filename: - f = open(filename, "w") - else: - f = sys.stdout - print >>f, 'configuration, %d, %d' % (results[0][1][0], results[0][2][0]) - for i in results: - print >>f, '"%s", %g, %g' % (i[0], i[1][1], i[2][1]) - -if __name__ == "__main__": - run() diff --git a/uvcdatspt/scripts/ocean.py b/uvcdatspt/scripts/ocean.py deleted file mode 100644 index 932d4e2dea..0000000000 --- a/uvcdatspt/scripts/ocean.py +++ /dev/null @@ -1,187 +0,0 @@ - -try: paraview.simple -except: from paraview.simple import * - -import sys -import os -import paraview - -import benchmark - -# trying to import the library where I can specify the global and subcontrollers -try: - import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set -except ImportError: - import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH - - -# global variables -timeCompartmentSize = 4 -input_files = "/home/boonth/Desktop/spatio/ocean_4/SALT*" -iso_files = "/home/boonth/Desktop/spatio/ocean/salt_%i.pvtp" - -currentTimeStep = -1 -log_lines_per_file = 5 - - -# some initial setup -benchmark.maximize_logs() - -pm = paraview.servermanager.vtkProcessModule.GetProcessModule() -timer = paraview.vtk.vtkTimerLog() -if len(sys.argv) < 1: - print 'usage: ' -else: - num_files = int(sys.argv[1]) -numprocs = pm.GetGlobalController().GetNumberOfProcesses() -timer.SetMaxEntries(log_lines_per_file * num_files * numprocs + 2) -pm.GetGlobalController().Barrier() -timer.StartTimer() - -paraview.options.batch = True # this may not be necessary -paraview.simple._DisableFirstRenderCameraReset() - -def CreateTimeCompartments(globalController, timeCompartmentSize): - if globalController.GetNumberOfProcesses() == 1: - print 'single process' - return - elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0: - print 'number of processes must be an integer multiple of time compartment size' - return - elif timeCompartmentSize == globalController.GetNumberOfProcesses(): - return globalController - - gid = globalController.GetLocalProcessId() - timeCompartmentGroupId = int (gid / timeCompartmentSize ) - newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize) - # must unregister if the reference count is greater than 1 - if newController.GetReferenceCount() > 1: - newController.UnRegister(None) - - #print gid, timeCompartmentGroupId, gid % timeCompartmentSize - print gid, ' of global comm is ', newController.GetLocalProcessId() - globalController.SetGlobalController(newController) - return newController - -def CheckReader(reader): - if hasattr(reader, "FileName") == False: - print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName() - sys.exit(-1) - - if hasattr(reader, "TimestepValues") == False: - print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information" - sys.exit(-1) - -def CreateControllers(timeCompartmentSize): - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - globalController = pm.GetGlobalController() - if timeCompartmentSize > globalController.GetNumberOfProcesses(): - timeCompartmentSize = globalController.GetNumberOfProcesses() - - temporalController = CreateTimeCompartments(globalController, timeCompartmentSize) - return globalController, temporalController, timeCompartmentSize - -def UpdateCurrentTimeStep(globalController, timeCompartmentSize): - global currentTimeStep - if currentTimeStep == -1: - currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize - return currentTimeStep - - numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize - currentTimeStep = currentTimeStep + numTimeStepsPerIteration - return currentTimeStep - -def WriteImages(currentTimeStep, currentTime, views): - for view in views: - filename = view.tpFileName.replace("%t", str(currentTimeStep)) - view.ViewTime = currentTime - WriteImage(filename, view, Magnification=view.tpMagnification) - -def WriteFiles(currentTimeStep, currentTime, writers, reader, contour): - for writer in writers: - originalfilename = writer.FileName - fname = originalfilename.replace("%t", str(currentTimeStep)) - #folder = (currentTimeStep % 3) + 1 - #fname = originalfilename % (folder, currentTimeStep) - writer.FileName = fname - writer.UpdatePipeline(currentTime) - writer.FileName = originalfilename - -def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views, reader, contour): - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - while currentTimeStep < len(timeSteps): - print globalController.GetLocalProcessId(), " is working on ", currentTimeStep - sys.stdout.flush() - WriteImages(currentTimeStep, timeSteps[currentTimeStep], views) - WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers, reader, contour) - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - -def CreateReader(ctor, args, fileInfo): - "Creates a reader, checks if it can be used, and sets the filenames" - reader = ctor() - CheckReader(reader) - import glob - files = glob.glob(fileInfo) - files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering - reader.FileName = files - for a in args: - s = "reader."+a - exec (s) - - return reader - -def CreateWriter(ctor, filename, tp_writers): - writer = ctor() - writer.FileName = filename - tp_writers.append(writer) - return writer - -def CreateView(proxy_ctor, filename, magnification, width, height, tp_views): - view = proxy_ctor() - view.add_attribute("tpFileName", filename) - view.add_attribute("tpMagnification", magnification) - tp_views.append(view) - view.ViewSize = [width, height] - return view - -def main(): - - global timer - global timeCompartmentSize - - tp_writers = [] - tp_views = [] - - # ============ end of specialized temporal parallelism sections ========== - - globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize) - - reader = CreateReader( NetCDFReader, ["Dimensions='(depth_t, t_lat, t_lon)'", 'ReplaceFillValueWithNan=0', 'VerticalBias=0.0', "OutputType='Automatic'", 'SphericalCoordinates=1', 'VerticalScale=1.0'], input_files ) - timeSteps = GetActiveSource().TimestepValues - if len(timeSteps) == 0: - timeSteps = [0.0] - contour = Contour( guiName="contour", Isosurfaces=[0.03], ComputeNormals=1, ComputeGradients=0, ComputeScalars=0, ContourBy=['POINTS', 'SALT'], PointMergeMethod="Uniform Binning" ) - contour.PointMergeMethod.Numberofpointsperbucket = 8 - contour.PointMergeMethod.Divisions = [50, 50, 50] - - ParallelPolyDataWriter2 = CreateWriter(XMLPPolyDataWriter,iso_files,tp_writers) - - IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views, reader, contour) - - globalController.Barrier() - timer.StopTimer() - - gid = globalController.GetLocalProcessId() - if gid == 0: - print 'all done! -- total time is', timer.GetElapsedTime(), 'seconds' - - benchmark.get_logs() - if gid == 0: - benchmark.print_logs() - -if __name__ == '__main__': - if len(sys.argv) < 1: - print 'usage: ' - else: - main() -