Index: trunk/libtool
===================================================================
--- trunk/libtool (revision 579)
+++ trunk/libtool (revision 580)
@@ -1,11983 +1,11983 @@
#! /bin/bash
# Generated automatically by config.status (npstat) 4.9.0
# Libtool was configured on host dawn:
# NOTE: Changes made to this file will be lost: look at ltmain.sh.
# Provide generalized library-building support services.
# Written by Gordon Matzigkeit, 1996
# Copyright (C) 2014 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# GNU Libtool is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of of the License, or
# (at your option) any later version.
#
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program or library that is built
# using GNU Libtool, you may include this file under the same
# distribution terms that you use for the rest of that program.
#
# GNU Libtool is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
# The names of the tagged configurations supported by this script.
available_tags='CXX F77 '
# Configured defaults for sys_lib_dlsearch_path munging.
: ${LT_SYS_LIBRARY_PATH=""}
# ### BEGIN LIBTOOL CONFIG
# Which release of libtool.m4 was used?
macro_version=2.4.6
macro_revision=2.4.6
# Whether or not to build shared libraries.
build_libtool_libs=yes
# Whether or not to build static libraries.
build_old_libs=yes
# What type of objects to build.
pic_mode=yes
# Whether or not to optimize for fast installation.
fast_install=needless
# Shared archive member basename,for filename based shared library versioning on AIX.
shared_archive_member_spec=
# Shell to use when invoking shell scripts.
SHELL="/bin/bash"
# An echo program that protects backslashes.
ECHO="printf %s\\n"
# The PATH separator for the build system.
PATH_SEPARATOR=":"
# The host system.
host_alias=
host=x86_64-pc-linux-gnu
host_os=linux-gnu
# The build system.
build_alias=
build=x86_64-pc-linux-gnu
build_os=linux-gnu
# A sed program that does not truncate output.
SED="/bin/sed"
# Sed that helps us avoid accidentally triggering echo(1) options like -n.
Xsed="$SED -e 1s/^X//"
# A grep program that handles long lines.
GREP="/bin/grep"
# An ERE matcher.
EGREP="/bin/grep -E"
# A literal string matcher.
FGREP="/bin/grep -F"
# A BSD- or MS-compatible name lister.
NM="/usr/bin/nm -B"
# Whether we need soft or hard links.
LN_S="ln -s"
# What is the maximum length of a command?
max_cmd_len=1572864
# Object file suffix (normally "o").
objext=o
# Executable file suffix (normally "").
exeext=
# whether the shell understands "unset".
lt_unset=unset
# turn spaces into newlines.
SP2NL="tr \\040 \\012"
# turn newlines into spaces.
NL2SP="tr \\015\\012 \\040\\040"
# convert $build file names to $host format.
to_host_file_cmd=func_convert_file_noop
# convert $build files to toolchain format.
to_tool_file_cmd=func_convert_file_noop
# An object symbol dumper.
OBJDUMP="objdump"
# Method to check whether dependent libraries are shared objects.
deplibs_check_method="pass_all"
# Command to use when deplibs_check_method = "file_magic".
file_magic_cmd="\$MAGIC_CMD"
# How to find potential files when deplibs_check_method = "file_magic".
file_magic_glob=""
# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
want_nocaseglob="no"
# DLL creation program.
DLLTOOL="false"
# Command to associate shared and link libraries.
sharedlib_from_linklib_cmd="printf %s\\n"
# The archiver.
AR="ar"
# Flags to create an archive.
AR_FLAGS="cru"
# How to feed a file listing to the archiver.
archiver_list_spec="@"
# A symbol stripping program.
STRIP="strip"
# Commands used to install an old-style archive.
RANLIB="ranlib"
old_postinstall_cmds="chmod 644 \$oldlib~\$RANLIB \$tool_oldlib"
old_postuninstall_cmds=""
# Whether to use a lock for old archive extraction.
lock_old_archive_extraction=no
# A C compiler.
LTCC="gcc"
# LTCC compiler flags.
LTCFLAGS="-g -O2"
# Take the output of nm and produce a listing of raw symbols and C names.
global_symbol_pipe="sed -n -e 's/^.*[ ]\\([ABCDGIRSTW][ABCDGIRSTW]*\\)[ ][ ]*\\([_A-Za-z][_A-Za-z0-9]*\\)\$/\\1 \\2 \\2/p' | sed '/ __gnu_lto/d'"
# Transform the output of nm in a proper C declaration.
global_symbol_to_cdecl="sed -n -e 's/^T .* \\(.*\\)\$/extern int \\1();/p' -e 's/^[ABCDGIRSTW][ABCDGIRSTW]* .* \\(.*\\)\$/extern char \\1;/p'"
# Transform the output of nm into a list of symbols to manually relocate.
global_symbol_to_import=""
# Transform the output of nm in a C name address pair.
global_symbol_to_c_name_address="sed -n -e 's/^: \\(.*\\) .*\$/ {\"\\1\", (void *) 0},/p' -e 's/^[ABCDGIRSTW][ABCDGIRSTW]* .* \\(.*\\)\$/ {\"\\1\", (void *) \\&\\1},/p'"
# Transform the output of nm in a C name address pair when lib prefix is needed.
global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \\(.*\\) .*\$/ {\"\\1\", (void *) 0},/p' -e 's/^[ABCDGIRSTW][ABCDGIRSTW]* .* \\(lib.*\\)\$/ {\"\\1\", (void *) \\&\\1},/p' -e 's/^[ABCDGIRSTW][ABCDGIRSTW]* .* \\(.*\\)\$/ {\"lib\\1\", (void *) \\&\\1},/p'"
# The name lister interface.
nm_interface="BSD nm"
# Specify filename containing input files for $NM.
nm_file_list_spec="@"
# The root where to search for dependent libraries,and where our libraries should be installed.
lt_sysroot=
# Command to truncate a binary pipe.
lt_truncate_bin="/bin/dd bs=4096 count=1"
# The name of the directory that contains temporary libtool files.
objdir=.libs
# Used to examine libraries when file_magic_cmd begins with "file".
MAGIC_CMD=file
# Must we lock files when doing compilation?
need_locks="no"
# Manifest tool.
MANIFEST_TOOL=":"
# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
DSYMUTIL=""
# Tool to change global to local symbols on Mac OS X.
NMEDIT=""
# Tool to manipulate fat objects and archives on Mac OS X.
LIPO=""
# ldd/readelf like tool for Mach-O binaries on Mac OS X.
OTOOL=""
# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
OTOOL64=""
# Old archive suffix (normally "a").
libext=a
# Shared library suffix (normally ".so").
shrext_cmds=".so"
# The commands to extract the exported symbol list from a shared archive.
extract_expsyms_cmds=""
# Variables whose values should be saved in libtool wrapper scripts and
# restored at link time.
variables_saved_for_relink="PATH LD_LIBRARY_PATH LD_RUN_PATH GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
# Do we need the "lib" prefix for modules?
need_lib_prefix=no
# Do we need a version for libraries?
need_version=no
# Library versioning type.
version_type=linux
# Shared library runtime path variable.
runpath_var=LD_RUN_PATH
# Shared library path variable.
shlibpath_var=LD_LIBRARY_PATH
# Is shlibpath searched before the hard-coded library search path?
shlibpath_overrides_runpath=yes
# Format of library name prefix.
libname_spec="lib\$name"
# List of archive names. First name is the real one, the rest are links.
# The last name is the one that the linker finds with -lNAME
library_names_spec="\$libname\$release\$shared_ext\$versuffix \$libname\$release\$shared_ext\$major \$libname\$shared_ext"
# The coded name of the library, if different from the real name.
soname_spec="\$libname\$release\$shared_ext\$major"
# Permission mode override for installation of shared libraries.
install_override_mode=""
# Command to use after installation of a shared archive.
postinstall_cmds=""
# Command to use after uninstallation of a shared archive.
postuninstall_cmds=""
# Commands used to finish a libtool library installation in a directory.
finish_cmds="PATH=\\\"\\\$PATH:/sbin\\\" ldconfig -n \$libdir"
# As "finish_cmds", except a single script fragment to be evaled but
# not shown.
finish_eval=""
# Whether we should hardcode library paths into libraries.
hardcode_into_libs=yes
# Compile-time system search path for libraries.
sys_lib_search_path_spec="/usr/lib/gcc/x86_64-linux-gnu/7 /usr/lib/x86_64-linux-gnu /usr/lib /lib/x86_64-linux-gnu /lib "
# Detected run-time system search path for libraries.
sys_lib_dlsearch_path_spec="/lib /usr/lib /usr/lib/x86_64-linux-gnu/libfakeroot /usr/local/lib /usr/local/lib/x86_64-linux-gnu /lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu "
# Explicit LT_SYS_LIBRARY_PATH set during ./configure time.
configure_time_lt_sys_library_path=""
# Whether dlopen is supported.
dlopen_support=unknown
# Whether dlopen of programs is supported.
dlopen_self=unknown
# Whether dlopen of statically linked programs is supported.
dlopen_self_static=unknown
# Commands to strip libraries.
old_striplib="strip --strip-debug"
striplib="strip --strip-unneeded"
# The linker used to build libraries.
-LD="/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64"
+LD="/usr/bin/ld -m elf_x86_64"
# How to create reloadable object files.
reload_flag=" -r"
reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
# Commands used to build an old-style archive.
old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
# A language specific compiler.
CC="gcc"
# Is the compiler the GNU compiler?
with_gcc=yes
# Compiler flag to turn off builtin functions.
no_builtin_flag=" -fno-builtin"
# Additional compiler flags for building library objects.
pic_flag=" -fPIC -DPIC"
# How to pass a linker flag through the compiler.
wl="-Wl,"
# Compiler flag to prevent dynamic linking.
link_static_flag="-static"
# Does compiler simultaneously support -c and -o options?
compiler_c_o="yes"
# Whether or not to add -lc for building shared libraries.
build_libtool_need_lc=no
# Whether or not to disallow shared libs when runtime libs are static.
allow_libtool_libs_with_static_runtimes=no
# Compiler flag to allow reflexive dlopens.
export_dynamic_flag_spec="\$wl--export-dynamic"
# Compiler flag to generate shared objects directly from archives.
whole_archive_flag_spec="\$wl--whole-archive\$convenience \$wl--no-whole-archive"
# Whether the compiler copes with passing no objects directly.
compiler_needs_object="no"
# Create an old-style archive from a shared archive.
old_archive_from_new_cmds=""
# Create a temporary old-style archive to link instead of a shared archive.
old_archive_from_expsyms_cmds=""
# Commands used to build a shared archive.
archive_cmds="\$CC -shared \$pic_flag \$libobjs \$deplibs \$compiler_flags \$wl-soname \$wl\$soname -o \$lib"
archive_expsym_cmds="echo \\\"{ global:\\\" > \$output_objdir/\$libname.ver~
cat \$export_symbols | sed -e \\\"s/\\\\(.*\\\\)/\\\\1;/\\\" >> \$output_objdir/\$libname.ver~
echo \\\"local: *; };\\\" >> \$output_objdir/\$libname.ver~
\$CC -shared \$pic_flag \$libobjs \$deplibs \$compiler_flags \$wl-soname \$wl\$soname \$wl-version-script \$wl\$output_objdir/\$libname.ver -o \$lib"
# Commands used to build a loadable module if different from building
# a shared archive.
module_cmds=""
module_expsym_cmds=""
# Whether we are building with GNU ld or not.
with_gnu_ld="yes"
# Flag that allows shared libraries with undefined symbols to be built.
allow_undefined_flag=""
# Flag that enforces no undefined symbols.
no_undefined_flag=""
# Flag to hardcode $libdir into a binary during linking.
# This must work even if $libdir does not exist
hardcode_libdir_flag_spec="\$wl-rpath \$wl\$libdir"
# Whether we need a single "-rpath" flag with a separated argument.
hardcode_libdir_separator=""
# Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
# DIR into the resulting binary.
hardcode_direct=no
# Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
# DIR into the resulting binary and the resulting library dependency is
# "absolute",i.e impossible to change by setting $shlibpath_var if the
# library is relocated.
hardcode_direct_absolute=no
# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
# into the resulting binary.
hardcode_minus_L=no
# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
# into the resulting binary.
hardcode_shlibpath_var=unsupported
# Set to "yes" if building a shared library automatically hardcodes DIR
# into the library and all subsequent libraries and executables linked
# against it.
hardcode_automatic=no
# Set to yes if linker adds runtime paths of dependent libraries
# to runtime path list.
inherit_rpath=no
# Whether libtool must link a program against all its dependency libraries.
link_all_deplibs=unknown
# Set to "yes" if exported symbols are required.
always_export_symbols=no
# The commands to list exported symbols.
export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
# Symbols that should not be listed in the preloaded symbols.
exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
# Symbols that must always be exported.
include_expsyms=""
# Commands necessary for linking programs (against libraries) with templates.
prelink_cmds=""
# Commands necessary for finishing linking programs.
postlink_cmds=""
# Specify filename containing input files.
file_list_spec=""
# How to hardcode a shared library path into an executable.
hardcode_action=immediate
# The directories searched by this compiler when creating a shared library.
compiler_lib_search_dirs=""
# Dependencies to place before and after the objects being linked to
# create a shared library.
predep_objects=""
postdep_objects=""
predeps=""
postdeps=""
# The library search path used internally by the compiler when linking
# a shared library.
compiler_lib_search_path=""
# ### END LIBTOOL CONFIG
# ### BEGIN FUNCTIONS SHARED WITH CONFIGURE
# func_munge_path_list VARIABLE PATH
# -----------------------------------
# VARIABLE is name of variable containing _space_ separated list of
# directories to be munged by the contents of PATH, which is string
# having a format:
# "DIR[:DIR]:"
# string "DIR[ DIR]" will be prepended to VARIABLE
# ":DIR[:DIR]"
# string "DIR[ DIR]" will be appended to VARIABLE
# "DIRP[:DIRP]::[DIRA:]DIRA"
# string "DIRP[ DIRP]" will be prepended to VARIABLE and string
# "DIRA[ DIRA]" will be appended to VARIABLE
# "DIR[:DIR]"
# VARIABLE will be replaced by "DIR[ DIR]"
func_munge_path_list ()
{
case x$2 in
x)
;;
*:)
eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\"
;;
x:*)
eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\"
;;
*::*)
eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\"
eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\"
;;
*)
eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\"
;;
esac
}
# Calculate cc_basename. Skip known compiler wrappers and cross-prefix.
func_cc_basename ()
{
for cc_temp in $*""; do
case $cc_temp in
compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
\-*) ;;
*) break;;
esac
done
func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
}
# ### END FUNCTIONS SHARED WITH CONFIGURE
#! /bin/sh
## DO NOT EDIT - This file generated from ./build-aux/ltmain.in
## by inline-source v2014-01-03.01
# libtool (GNU libtool) 2.4.6
# Provide generalized library-building support services.
# Written by Gordon Matzigkeit , 1996
# Copyright (C) 1996-2015 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# GNU Libtool is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# As a special exception to the GNU General Public License,
# if you distribute this file as part of a program or library that
# is built using GNU Libtool, you may include this file under the
# same distribution terms that you use for the rest of that program.
#
# GNU Libtool is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
PROGRAM=libtool
PACKAGE=libtool
VERSION=2.4.6
package_revision=2.4.6
## ------ ##
## Usage. ##
## ------ ##
# Run './libtool --help' for help with using this script from the
# command line.
## ------------------------------- ##
## User overridable command paths. ##
## ------------------------------- ##
# After configure completes, it has a better idea of some of the
# shell tools we need than the defaults used by the functions shared
# with bootstrap, so set those here where they can still be over-
# ridden by the user, but otherwise take precedence.
: ${AUTOCONF="autoconf"}
: ${AUTOMAKE="automake"}
## -------------------------- ##
## Source external libraries. ##
## -------------------------- ##
# Much of our low-level functionality needs to be sourced from external
# libraries, which are installed to $pkgauxdir.
# Set a version string for this script.
scriptversion=2015-01-20.17; # UTC
# General shell script boiler plate, and helper functions.
# Written by Gary V. Vaughan, 2004
# Copyright (C) 2004-2015 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# As a special exception to the GNU General Public License, if you distribute
# this file as part of a program or library that is built using GNU Libtool,
# you may include this file under the same distribution terms that you use
# for the rest of that program.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNES FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
# Please report bugs or propose patches to gary@gnu.org.
## ------ ##
## Usage. ##
## ------ ##
# Evaluate this file near the top of your script to gain access to
# the functions and variables defined here:
#
# . `echo "$0" | ${SED-sed} 's|[^/]*$||'`/build-aux/funclib.sh
#
# If you need to override any of the default environment variable
# settings, do that before evaluating this file.
## -------------------- ##
## Shell normalisation. ##
## -------------------- ##
# Some shells need a little help to be as Bourne compatible as possible.
# Before doing anything else, make sure all that help has been provided!
DUALCASE=1; export DUALCASE # for MKS sh
if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
# is contrary to our usage. Disable this feature.
alias -g '${1+"$@"}'='"$@"'
setopt NO_GLOB_SUBST
else
case `(set -o) 2>/dev/null` in *posix*) set -o posix ;; esac
fi
# NLS nuisances: We save the old values in case they are required later.
_G_user_locale=
_G_safe_locale=
for _G_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
do
eval "if test set = \"\${$_G_var+set}\"; then
save_$_G_var=\$$_G_var
$_G_var=C
export $_G_var
_G_user_locale=\"$_G_var=\\\$save_\$_G_var; \$_G_user_locale\"
_G_safe_locale=\"$_G_var=C; \$_G_safe_locale\"
fi"
done
# CDPATH.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
# Make sure IFS has a sensible default
sp=' '
nl='
'
IFS="$sp $nl"
# There are apparently some retarded systems that use ';' as a PATH separator!
if test "${PATH_SEPARATOR+set}" != set; then
PATH_SEPARATOR=:
(PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
(PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
PATH_SEPARATOR=';'
}
fi
## ------------------------- ##
## Locate command utilities. ##
## ------------------------- ##
# func_executable_p FILE
# ----------------------
# Check that FILE is an executable regular file.
func_executable_p ()
{
test -f "$1" && test -x "$1"
}
# func_path_progs PROGS_LIST CHECK_FUNC [PATH]
# --------------------------------------------
# Search for either a program that responds to --version with output
# containing "GNU", or else returned by CHECK_FUNC otherwise, by
# trying all the directories in PATH with each of the elements of
# PROGS_LIST.
#
# CHECK_FUNC should accept the path to a candidate program, and
# set $func_check_prog_result if it truncates its output less than
# $_G_path_prog_max characters.
func_path_progs ()
{
_G_progs_list=$1
_G_check_func=$2
_G_PATH=${3-"$PATH"}
_G_path_prog_max=0
_G_path_prog_found=false
_G_save_IFS=$IFS; IFS=${PATH_SEPARATOR-:}
for _G_dir in $_G_PATH; do
IFS=$_G_save_IFS
test -z "$_G_dir" && _G_dir=.
for _G_prog_name in $_G_progs_list; do
for _exeext in '' .EXE; do
_G_path_prog=$_G_dir/$_G_prog_name$_exeext
func_executable_p "$_G_path_prog" || continue
case `"$_G_path_prog" --version 2>&1` in
*GNU*) func_path_progs_result=$_G_path_prog _G_path_prog_found=: ;;
*) $_G_check_func $_G_path_prog
func_path_progs_result=$func_check_prog_result
;;
esac
$_G_path_prog_found && break 3
done
done
done
IFS=$_G_save_IFS
test -z "$func_path_progs_result" && {
echo "no acceptable sed could be found in \$PATH" >&2
exit 1
}
}
# We want to be able to use the functions in this file before configure
# has figured out where the best binaries are kept, which means we have
# to search for them ourselves - except when the results are already set
# where we skip the searches.
# Unless the user overrides by setting SED, search the path for either GNU
# sed, or the sed that truncates its output the least.
test -z "$SED" && {
_G_sed_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
for _G_i in 1 2 3 4 5 6 7; do
_G_sed_script=$_G_sed_script$nl$_G_sed_script
done
echo "$_G_sed_script" 2>/dev/null | sed 99q >conftest.sed
_G_sed_script=
func_check_prog_sed ()
{
_G_path_prog=$1
_G_count=0
printf 0123456789 >conftest.in
while :
do
cat conftest.in conftest.in >conftest.tmp
mv conftest.tmp conftest.in
cp conftest.in conftest.nl
echo '' >> conftest.nl
"$_G_path_prog" -f conftest.sed conftest.out 2>/dev/null || break
diff conftest.out conftest.nl >/dev/null 2>&1 || break
_G_count=`expr $_G_count + 1`
if test "$_G_count" -gt "$_G_path_prog_max"; then
# Best one so far, save it but keep looking for a better one
func_check_prog_result=$_G_path_prog
_G_path_prog_max=$_G_count
fi
# 10*(2^10) chars as input seems more than enough
test 10 -lt "$_G_count" && break
done
rm -f conftest.in conftest.tmp conftest.nl conftest.out
}
func_path_progs "sed gsed" func_check_prog_sed $PATH:/usr/xpg4/bin
rm -f conftest.sed
SED=$func_path_progs_result
}
# Unless the user overrides by setting GREP, search the path for either GNU
# grep, or the grep that truncates its output the least.
test -z "$GREP" && {
func_check_prog_grep ()
{
_G_path_prog=$1
_G_count=0
_G_path_prog_max=0
printf 0123456789 >conftest.in
while :
do
cat conftest.in conftest.in >conftest.tmp
mv conftest.tmp conftest.in
cp conftest.in conftest.nl
echo 'GREP' >> conftest.nl
"$_G_path_prog" -e 'GREP$' -e '-(cannot match)-' conftest.out 2>/dev/null || break
diff conftest.out conftest.nl >/dev/null 2>&1 || break
_G_count=`expr $_G_count + 1`
if test "$_G_count" -gt "$_G_path_prog_max"; then
# Best one so far, save it but keep looking for a better one
func_check_prog_result=$_G_path_prog
_G_path_prog_max=$_G_count
fi
# 10*(2^10) chars as input seems more than enough
test 10 -lt "$_G_count" && break
done
rm -f conftest.in conftest.tmp conftest.nl conftest.out
}
func_path_progs "grep ggrep" func_check_prog_grep $PATH:/usr/xpg4/bin
GREP=$func_path_progs_result
}
## ------------------------------- ##
## User overridable command paths. ##
## ------------------------------- ##
# All uppercase variable names are used for environment variables. These
# variables can be overridden by the user before calling a script that
# uses them if a suitable command of that name is not already available
# in the command search PATH.
: ${CP="cp -f"}
: ${ECHO="printf %s\n"}
: ${EGREP="$GREP -E"}
: ${FGREP="$GREP -F"}
: ${LN_S="ln -s"}
: ${MAKE="make"}
: ${MKDIR="mkdir"}
: ${MV="mv -f"}
: ${RM="rm -f"}
: ${SHELL="${CONFIG_SHELL-/bin/sh}"}
## -------------------- ##
## Useful sed snippets. ##
## -------------------- ##
sed_dirname='s|/[^/]*$||'
sed_basename='s|^.*/||'
# Sed substitution that helps us do robust quoting. It backslashifies
# metacharacters that are still active within double-quoted strings.
sed_quote_subst='s|\([`"$\\]\)|\\\1|g'
# Same as above, but do not quote variable references.
sed_double_quote_subst='s/\(["`\\]\)/\\\1/g'
# Sed substitution that turns a string into a regex matching for the
# string literally.
sed_make_literal_regex='s|[].[^$\\*\/]|\\&|g'
# Sed substitution that converts a w32 file name or path
# that contains forward slashes, into one that contains
# (escaped) backslashes. A very naive implementation.
sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g'
# Re-'\' parameter expansions in output of sed_double_quote_subst that
# were '\'-ed in input to the same. If an odd number of '\' preceded a
# '$' in input to sed_double_quote_subst, that '$' was protected from
# expansion. Since each input '\' is now two '\'s, look for any number
# of runs of four '\'s followed by two '\'s and then a '$'. '\' that '$'.
_G_bs='\\'
_G_bs2='\\\\'
_G_bs4='\\\\\\\\'
_G_dollar='\$'
sed_double_backslash="\
s/$_G_bs4/&\\
/g
s/^$_G_bs2$_G_dollar/$_G_bs&/
s/\\([^$_G_bs]\\)$_G_bs2$_G_dollar/\\1$_G_bs2$_G_bs$_G_dollar/g
s/\n//g"
## ----------------- ##
## Global variables. ##
## ----------------- ##
# Except for the global variables explicitly listed below, the following
# functions in the '^func_' namespace, and the '^require_' namespace
# variables initialised in the 'Resource management' section, sourcing
# this file will not pollute your global namespace with anything
# else. There's no portable way to scope variables in Bourne shell
# though, so actually running these functions will sometimes place
# results into a variable named after the function, and often use
# temporary variables in the '^_G_' namespace. If you are careful to
# avoid using those namespaces casually in your sourcing script, things
# should continue to work as you expect. And, of course, you can freely
# overwrite any of the functions or variables defined here before
# calling anything to customize them.
EXIT_SUCCESS=0
EXIT_FAILURE=1
EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing.
EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake.
# Allow overriding, eg assuming that you follow the convention of
# putting '$debug_cmd' at the start of all your functions, you can get
# bash to show function call trace with:
#
# debug_cmd='eval echo "${FUNCNAME[0]} $*" >&2' bash your-script-name
debug_cmd=${debug_cmd-":"}
exit_cmd=:
# By convention, finish your script with:
#
# exit $exit_status
#
# so that you can set exit_status to non-zero if you want to indicate
# something went wrong during execution without actually bailing out at
# the point of failure.
exit_status=$EXIT_SUCCESS
# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
# is ksh but when the shell is invoked as "sh" and the current value of
# the _XPG environment variable is not equal to 1 (one), the special
# positional parameter $0, within a function call, is the name of the
# function.
progpath=$0
# The name of this program.
progname=`$ECHO "$progpath" |$SED "$sed_basename"`
# Make sure we have an absolute progpath for reexecution:
case $progpath in
[\\/]*|[A-Za-z]:\\*) ;;
*[\\/]*)
progdir=`$ECHO "$progpath" |$SED "$sed_dirname"`
progdir=`cd "$progdir" && pwd`
progpath=$progdir/$progname
;;
*)
_G_IFS=$IFS
IFS=${PATH_SEPARATOR-:}
for progdir in $PATH; do
IFS=$_G_IFS
test -x "$progdir/$progname" && break
done
IFS=$_G_IFS
test -n "$progdir" || progdir=`pwd`
progpath=$progdir/$progname
;;
esac
## ----------------- ##
## Standard options. ##
## ----------------- ##
# The following options affect the operation of the functions defined
# below, and should be set appropriately depending on run-time para-
# meters passed on the command line.
opt_dry_run=false
opt_quiet=false
opt_verbose=false
# Categories 'all' and 'none' are always available. Append any others
# you will pass as the first argument to func_warning from your own
# code.
warning_categories=
# By default, display warnings according to 'opt_warning_types'. Set
# 'warning_func' to ':' to elide all warnings, or func_fatal_error to
# treat the next displayed warning as a fatal error.
warning_func=func_warn_and_continue
# Set to 'all' to display all warnings, 'none' to suppress all
# warnings, or a space delimited list of some subset of
# 'warning_categories' to display only the listed warnings.
opt_warning_types=all
## -------------------- ##
## Resource management. ##
## -------------------- ##
# This section contains definitions for functions that each ensure a
# particular resource (a file, or a non-empty configuration variable for
# example) is available, and if appropriate to extract default values
# from pertinent package files. Call them using their associated
# 'require_*' variable to ensure that they are executed, at most, once.
#
# It's entirely deliberate that calling these functions can set
# variables that don't obey the namespace limitations obeyed by the rest
# of this file, in order that that they be as useful as possible to
# callers.
# require_term_colors
# -------------------
# Allow display of bold text on terminals that support it.
require_term_colors=func_require_term_colors
func_require_term_colors ()
{
$debug_cmd
test -t 1 && {
# COLORTERM and USE_ANSI_COLORS environment variables take
# precedence, because most terminfo databases neglect to describe
# whether color sequences are supported.
test -n "${COLORTERM+set}" && : ${USE_ANSI_COLORS="1"}
if test 1 = "$USE_ANSI_COLORS"; then
# Standard ANSI escape sequences
tc_reset='[0m'
tc_bold='[1m'; tc_standout='[7m'
tc_red='[31m'; tc_green='[32m'
tc_blue='[34m'; tc_cyan='[36m'
else
# Otherwise trust the terminfo database after all.
test -n "`tput sgr0 2>/dev/null`" && {
tc_reset=`tput sgr0`
test -n "`tput bold 2>/dev/null`" && tc_bold=`tput bold`
tc_standout=$tc_bold
test -n "`tput smso 2>/dev/null`" && tc_standout=`tput smso`
test -n "`tput setaf 1 2>/dev/null`" && tc_red=`tput setaf 1`
test -n "`tput setaf 2 2>/dev/null`" && tc_green=`tput setaf 2`
test -n "`tput setaf 4 2>/dev/null`" && tc_blue=`tput setaf 4`
test -n "`tput setaf 5 2>/dev/null`" && tc_cyan=`tput setaf 5`
}
fi
}
require_term_colors=:
}
## ----------------- ##
## Function library. ##
## ----------------- ##
# This section contains a variety of useful functions to call in your
# scripts. Take note of the portable wrappers for features provided by
# some modern shells, which will fall back to slower equivalents on
# less featureful shells.
# func_append VAR VALUE
# ---------------------
# Append VALUE onto the existing contents of VAR.
# We should try to minimise forks, especially on Windows where they are
# unreasonably slow, so skip the feature probes when bash or zsh are
# being used:
if test set = "${BASH_VERSION+set}${ZSH_VERSION+set}"; then
: ${_G_HAVE_ARITH_OP="yes"}
: ${_G_HAVE_XSI_OPS="yes"}
# The += operator was introduced in bash 3.1
case $BASH_VERSION in
[12].* | 3.0 | 3.0*) ;;
*)
: ${_G_HAVE_PLUSEQ_OP="yes"}
;;
esac
fi
# _G_HAVE_PLUSEQ_OP
# Can be empty, in which case the shell is probed, "yes" if += is
# useable or anything else if it does not work.
test -z "$_G_HAVE_PLUSEQ_OP" \
&& (eval 'x=a; x+=" b"; test "a b" = "$x"') 2>/dev/null \
&& _G_HAVE_PLUSEQ_OP=yes
if test yes = "$_G_HAVE_PLUSEQ_OP"
then
# This is an XSI compatible shell, allowing a faster implementation...
eval 'func_append ()
{
$debug_cmd
eval "$1+=\$2"
}'
else
# ...otherwise fall back to using expr, which is often a shell builtin.
func_append ()
{
$debug_cmd
eval "$1=\$$1\$2"
}
fi
# func_append_quoted VAR VALUE
# ----------------------------
# Quote VALUE and append to the end of shell variable VAR, separated
# by a space.
if test yes = "$_G_HAVE_PLUSEQ_OP"; then
eval 'func_append_quoted ()
{
$debug_cmd
func_quote_for_eval "$2"
eval "$1+=\\ \$func_quote_for_eval_result"
}'
else
func_append_quoted ()
{
$debug_cmd
func_quote_for_eval "$2"
eval "$1=\$$1\\ \$func_quote_for_eval_result"
}
fi
# func_append_uniq VAR VALUE
# --------------------------
# Append unique VALUE onto the existing contents of VAR, assuming
# entries are delimited by the first character of VALUE. For example:
#
# func_append_uniq options " --another-option option-argument"
#
# will only append to $options if " --another-option option-argument "
# is not already present somewhere in $options already (note spaces at
# each end implied by leading space in second argument).
func_append_uniq ()
{
$debug_cmd
eval _G_current_value='`$ECHO $'$1'`'
_G_delim=`expr "$2" : '\(.\)'`
case $_G_delim$_G_current_value$_G_delim in
*"$2$_G_delim"*) ;;
*) func_append "$@" ;;
esac
}
# func_arith TERM...
# ------------------
# Set func_arith_result to the result of evaluating TERMs.
test -z "$_G_HAVE_ARITH_OP" \
&& (eval 'test 2 = $(( 1 + 1 ))') 2>/dev/null \
&& _G_HAVE_ARITH_OP=yes
if test yes = "$_G_HAVE_ARITH_OP"; then
eval 'func_arith ()
{
$debug_cmd
func_arith_result=$(( $* ))
}'
else
func_arith ()
{
$debug_cmd
func_arith_result=`expr "$@"`
}
fi
# func_basename FILE
# ------------------
# Set func_basename_result to FILE with everything up to and including
# the last / stripped.
if test yes = "$_G_HAVE_XSI_OPS"; then
# If this shell supports suffix pattern removal, then use it to avoid
# forking. Hide the definitions single quotes in case the shell chokes
# on unsupported syntax...
_b='func_basename_result=${1##*/}'
_d='case $1 in
*/*) func_dirname_result=${1%/*}$2 ;;
* ) func_dirname_result=$3 ;;
esac'
else
# ...otherwise fall back to using sed.
_b='func_basename_result=`$ECHO "$1" |$SED "$sed_basename"`'
_d='func_dirname_result=`$ECHO "$1" |$SED "$sed_dirname"`
if test "X$func_dirname_result" = "X$1"; then
func_dirname_result=$3
else
func_append func_dirname_result "$2"
fi'
fi
eval 'func_basename ()
{
$debug_cmd
'"$_b"'
}'
# func_dirname FILE APPEND NONDIR_REPLACEMENT
# -------------------------------------------
# Compute the dirname of FILE. If nonempty, add APPEND to the result,
# otherwise set result to NONDIR_REPLACEMENT.
eval 'func_dirname ()
{
$debug_cmd
'"$_d"'
}'
# func_dirname_and_basename FILE APPEND NONDIR_REPLACEMENT
# --------------------------------------------------------
# Perform func_basename and func_dirname in a single function
# call:
# dirname: Compute the dirname of FILE. If nonempty,
# add APPEND to the result, otherwise set result
# to NONDIR_REPLACEMENT.
# value returned in "$func_dirname_result"
# basename: Compute filename of FILE.
# value retuned in "$func_basename_result"
# For efficiency, we do not delegate to the functions above but instead
# duplicate the functionality here.
eval 'func_dirname_and_basename ()
{
$debug_cmd
'"$_b"'
'"$_d"'
}'
# func_echo ARG...
# ----------------
# Echo program name prefixed message.
func_echo ()
{
$debug_cmd
_G_message=$*
func_echo_IFS=$IFS
IFS=$nl
for _G_line in $_G_message; do
IFS=$func_echo_IFS
$ECHO "$progname: $_G_line"
done
IFS=$func_echo_IFS
}
# func_echo_all ARG...
# --------------------
# Invoke $ECHO with all args, space-separated.
func_echo_all ()
{
$ECHO "$*"
}
# func_echo_infix_1 INFIX ARG...
# ------------------------------
# Echo program name, followed by INFIX on the first line, with any
# additional lines not showing INFIX.
func_echo_infix_1 ()
{
$debug_cmd
$require_term_colors
_G_infix=$1; shift
_G_indent=$_G_infix
_G_prefix="$progname: $_G_infix: "
_G_message=$*
# Strip color escape sequences before counting printable length
for _G_tc in "$tc_reset" "$tc_bold" "$tc_standout" "$tc_red" "$tc_green" "$tc_blue" "$tc_cyan"
do
test -n "$_G_tc" && {
_G_esc_tc=`$ECHO "$_G_tc" | $SED "$sed_make_literal_regex"`
_G_indent=`$ECHO "$_G_indent" | $SED "s|$_G_esc_tc||g"`
}
done
_G_indent="$progname: "`echo "$_G_indent" | $SED 's|.| |g'`" " ## exclude from sc_prohibit_nested_quotes
func_echo_infix_1_IFS=$IFS
IFS=$nl
for _G_line in $_G_message; do
IFS=$func_echo_infix_1_IFS
$ECHO "$_G_prefix$tc_bold$_G_line$tc_reset" >&2
_G_prefix=$_G_indent
done
IFS=$func_echo_infix_1_IFS
}
# func_error ARG...
# -----------------
# Echo program name prefixed message to standard error.
func_error ()
{
$debug_cmd
$require_term_colors
func_echo_infix_1 " $tc_standout${tc_red}error$tc_reset" "$*" >&2
}
# func_fatal_error ARG...
# -----------------------
# Echo program name prefixed message to standard error, and exit.
func_fatal_error ()
{
$debug_cmd
func_error "$*"
exit $EXIT_FAILURE
}
# func_grep EXPRESSION FILENAME
# -----------------------------
# Check whether EXPRESSION matches any line of FILENAME, without output.
func_grep ()
{
$debug_cmd
$GREP "$1" "$2" >/dev/null 2>&1
}
# func_len STRING
# ---------------
# Set func_len_result to the length of STRING. STRING may not
# start with a hyphen.
test -z "$_G_HAVE_XSI_OPS" \
&& (eval 'x=a/b/c;
test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \
&& _G_HAVE_XSI_OPS=yes
if test yes = "$_G_HAVE_XSI_OPS"; then
eval 'func_len ()
{
$debug_cmd
func_len_result=${#1}
}'
else
func_len ()
{
$debug_cmd
func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len`
}
fi
# func_mkdir_p DIRECTORY-PATH
# ---------------------------
# Make sure the entire path to DIRECTORY-PATH is available.
func_mkdir_p ()
{
$debug_cmd
_G_directory_path=$1
_G_dir_list=
if test -n "$_G_directory_path" && test : != "$opt_dry_run"; then
# Protect directory names starting with '-'
case $_G_directory_path in
-*) _G_directory_path=./$_G_directory_path ;;
esac
# While some portion of DIR does not yet exist...
while test ! -d "$_G_directory_path"; do
# ...make a list in topmost first order. Use a colon delimited
# list incase some portion of path contains whitespace.
_G_dir_list=$_G_directory_path:$_G_dir_list
# If the last portion added has no slash in it, the list is done
case $_G_directory_path in */*) ;; *) break ;; esac
# ...otherwise throw away the child directory and loop
_G_directory_path=`$ECHO "$_G_directory_path" | $SED -e "$sed_dirname"`
done
_G_dir_list=`$ECHO "$_G_dir_list" | $SED 's|:*$||'`
func_mkdir_p_IFS=$IFS; IFS=:
for _G_dir in $_G_dir_list; do
IFS=$func_mkdir_p_IFS
# mkdir can fail with a 'File exist' error if two processes
# try to create one of the directories concurrently. Don't
# stop in that case!
$MKDIR "$_G_dir" 2>/dev/null || :
done
IFS=$func_mkdir_p_IFS
# Bail out if we (or some other process) failed to create a directory.
test -d "$_G_directory_path" || \
func_fatal_error "Failed to create '$1'"
fi
}
# func_mktempdir [BASENAME]
# -------------------------
# Make a temporary directory that won't clash with other running
# libtool processes, and avoids race conditions if possible. If
# given, BASENAME is the basename for that directory.
func_mktempdir ()
{
$debug_cmd
_G_template=${TMPDIR-/tmp}/${1-$progname}
if test : = "$opt_dry_run"; then
# Return a directory name, but don't create it in dry-run mode
_G_tmpdir=$_G_template-$$
else
# If mktemp works, use that first and foremost
_G_tmpdir=`mktemp -d "$_G_template-XXXXXXXX" 2>/dev/null`
if test ! -d "$_G_tmpdir"; then
# Failing that, at least try and use $RANDOM to avoid a race
_G_tmpdir=$_G_template-${RANDOM-0}$$
func_mktempdir_umask=`umask`
umask 0077
$MKDIR "$_G_tmpdir"
umask $func_mktempdir_umask
fi
# If we're not in dry-run mode, bomb out on failure
test -d "$_G_tmpdir" || \
func_fatal_error "cannot create temporary directory '$_G_tmpdir'"
fi
$ECHO "$_G_tmpdir"
}
# func_normal_abspath PATH
# ------------------------
# Remove doubled-up and trailing slashes, "." path components,
# and cancel out any ".." path components in PATH after making
# it an absolute path.
func_normal_abspath ()
{
$debug_cmd
# These SED scripts presuppose an absolute path with a trailing slash.
_G_pathcar='s|^/\([^/]*\).*$|\1|'
_G_pathcdr='s|^/[^/]*||'
_G_removedotparts=':dotsl
s|/\./|/|g
t dotsl
s|/\.$|/|'
_G_collapseslashes='s|/\{1,\}|/|g'
_G_finalslash='s|/*$|/|'
# Start from root dir and reassemble the path.
func_normal_abspath_result=
func_normal_abspath_tpath=$1
func_normal_abspath_altnamespace=
case $func_normal_abspath_tpath in
"")
# Empty path, that just means $cwd.
func_stripname '' '/' "`pwd`"
func_normal_abspath_result=$func_stripname_result
return
;;
# The next three entries are used to spot a run of precisely
# two leading slashes without using negated character classes;
# we take advantage of case's first-match behaviour.
///*)
# Unusual form of absolute path, do nothing.
;;
//*)
# Not necessarily an ordinary path; POSIX reserves leading '//'
# and for example Cygwin uses it to access remote file shares
# over CIFS/SMB, so we conserve a leading double slash if found.
func_normal_abspath_altnamespace=/
;;
/*)
# Absolute path, do nothing.
;;
*)
# Relative path, prepend $cwd.
func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath
;;
esac
# Cancel out all the simple stuff to save iterations. We also want
# the path to end with a slash for ease of parsing, so make sure
# there is one (and only one) here.
func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
-e "$_G_removedotparts" -e "$_G_collapseslashes" -e "$_G_finalslash"`
while :; do
# Processed it all yet?
if test / = "$func_normal_abspath_tpath"; then
# If we ascended to the root using ".." the result may be empty now.
if test -z "$func_normal_abspath_result"; then
func_normal_abspath_result=/
fi
break
fi
func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \
-e "$_G_pathcar"`
func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \
-e "$_G_pathcdr"`
# Figure out what to do with it
case $func_normal_abspath_tcomponent in
"")
# Trailing empty path component, ignore it.
;;
..)
# Parent dir; strip last assembled component from result.
func_dirname "$func_normal_abspath_result"
func_normal_abspath_result=$func_dirname_result
;;
*)
# Actual path component, append it.
func_append func_normal_abspath_result "/$func_normal_abspath_tcomponent"
;;
esac
done
# Restore leading double-slash if one was found on entry.
func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result
}
# func_notquiet ARG...
# --------------------
# Echo program name prefixed message only when not in quiet mode.
func_notquiet ()
{
$debug_cmd
$opt_quiet || func_echo ${1+"$@"}
# A bug in bash halts the script if the last line of a function
# fails when set -e is in force, so we need another command to
# work around that:
:
}
# func_relative_path SRCDIR DSTDIR
# --------------------------------
# Set func_relative_path_result to the relative path from SRCDIR to DSTDIR.
func_relative_path ()
{
$debug_cmd
func_relative_path_result=
func_normal_abspath "$1"
func_relative_path_tlibdir=$func_normal_abspath_result
func_normal_abspath "$2"
func_relative_path_tbindir=$func_normal_abspath_result
# Ascend the tree starting from libdir
while :; do
# check if we have found a prefix of bindir
case $func_relative_path_tbindir in
$func_relative_path_tlibdir)
# found an exact match
func_relative_path_tcancelled=
break
;;
$func_relative_path_tlibdir*)
# found a matching prefix
func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir"
func_relative_path_tcancelled=$func_stripname_result
if test -z "$func_relative_path_result"; then
func_relative_path_result=.
fi
break
;;
*)
func_dirname $func_relative_path_tlibdir
func_relative_path_tlibdir=$func_dirname_result
if test -z "$func_relative_path_tlibdir"; then
# Have to descend all the way to the root!
func_relative_path_result=../$func_relative_path_result
func_relative_path_tcancelled=$func_relative_path_tbindir
break
fi
func_relative_path_result=../$func_relative_path_result
;;
esac
done
# Now calculate path; take care to avoid doubling-up slashes.
func_stripname '' '/' "$func_relative_path_result"
func_relative_path_result=$func_stripname_result
func_stripname '/' '/' "$func_relative_path_tcancelled"
if test -n "$func_stripname_result"; then
func_append func_relative_path_result "/$func_stripname_result"
fi
# Normalisation. If bindir is libdir, return '.' else relative path.
if test -n "$func_relative_path_result"; then
func_stripname './' '' "$func_relative_path_result"
func_relative_path_result=$func_stripname_result
fi
test -n "$func_relative_path_result" || func_relative_path_result=.
:
}
# func_quote_for_eval ARG...
# --------------------------
# Aesthetically quote ARGs to be evaled later.
# This function returns two values:
# i) func_quote_for_eval_result
# double-quoted, suitable for a subsequent eval
# ii) func_quote_for_eval_unquoted_result
# has all characters that are still active within double
# quotes backslashified.
func_quote_for_eval ()
{
$debug_cmd
func_quote_for_eval_unquoted_result=
func_quote_for_eval_result=
while test 0 -lt $#; do
case $1 in
*[\\\`\"\$]*)
_G_unquoted_arg=`printf '%s\n' "$1" |$SED "$sed_quote_subst"` ;;
*)
_G_unquoted_arg=$1 ;;
esac
if test -n "$func_quote_for_eval_unquoted_result"; then
func_append func_quote_for_eval_unquoted_result " $_G_unquoted_arg"
else
func_append func_quote_for_eval_unquoted_result "$_G_unquoted_arg"
fi
case $_G_unquoted_arg in
# Double-quote args containing shell metacharacters to delay
# word splitting, command substitution and variable expansion
# for a subsequent eval.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
_G_quoted_arg=\"$_G_unquoted_arg\"
;;
*)
_G_quoted_arg=$_G_unquoted_arg
;;
esac
if test -n "$func_quote_for_eval_result"; then
func_append func_quote_for_eval_result " $_G_quoted_arg"
else
func_append func_quote_for_eval_result "$_G_quoted_arg"
fi
shift
done
}
# func_quote_for_expand ARG
# -------------------------
# Aesthetically quote ARG to be evaled later; same as above,
# but do not quote variable references.
func_quote_for_expand ()
{
$debug_cmd
case $1 in
*[\\\`\"]*)
_G_arg=`$ECHO "$1" | $SED \
-e "$sed_double_quote_subst" -e "$sed_double_backslash"` ;;
*)
_G_arg=$1 ;;
esac
case $_G_arg in
# Double-quote args containing shell metacharacters to delay
# word splitting and command substitution for a subsequent eval.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
_G_arg=\"$_G_arg\"
;;
esac
func_quote_for_expand_result=$_G_arg
}
# func_stripname PREFIX SUFFIX NAME
# ---------------------------------
# strip PREFIX and SUFFIX from NAME, and store in func_stripname_result.
# PREFIX and SUFFIX must not contain globbing or regex special
# characters, hashes, percent signs, but SUFFIX may contain a leading
# dot (in which case that matches only a dot).
if test yes = "$_G_HAVE_XSI_OPS"; then
eval 'func_stripname ()
{
$debug_cmd
# pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are
# positional parameters, so assign one to ordinary variable first.
func_stripname_result=$3
func_stripname_result=${func_stripname_result#"$1"}
func_stripname_result=${func_stripname_result%"$2"}
}'
else
func_stripname ()
{
$debug_cmd
case $2 in
.*) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%\\\\$2\$%%"`;;
*) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%$2\$%%"`;;
esac
}
fi
# func_show_eval CMD [FAIL_EXP]
# -----------------------------
# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is
# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
# is given, then evaluate it.
func_show_eval ()
{
$debug_cmd
_G_cmd=$1
_G_fail_exp=${2-':'}
func_quote_for_expand "$_G_cmd"
eval "func_notquiet $func_quote_for_expand_result"
$opt_dry_run || {
eval "$_G_cmd"
_G_status=$?
if test 0 -ne "$_G_status"; then
eval "(exit $_G_status); $_G_fail_exp"
fi
}
}
# func_show_eval_locale CMD [FAIL_EXP]
# ------------------------------------
# Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is
# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP
# is given, then evaluate it. Use the saved locale for evaluation.
func_show_eval_locale ()
{
$debug_cmd
_G_cmd=$1
_G_fail_exp=${2-':'}
$opt_quiet || {
func_quote_for_expand "$_G_cmd"
eval "func_echo $func_quote_for_expand_result"
}
$opt_dry_run || {
eval "$_G_user_locale
$_G_cmd"
_G_status=$?
eval "$_G_safe_locale"
if test 0 -ne "$_G_status"; then
eval "(exit $_G_status); $_G_fail_exp"
fi
}
}
# func_tr_sh
# ----------
# Turn $1 into a string suitable for a shell variable name.
# Result is stored in $func_tr_sh_result. All characters
# not in the set a-zA-Z0-9_ are replaced with '_'. Further,
# if $1 begins with a digit, a '_' is prepended as well.
func_tr_sh ()
{
$debug_cmd
case $1 in
[0-9]* | *[!a-zA-Z0-9_]*)
func_tr_sh_result=`$ECHO "$1" | $SED -e 's/^\([0-9]\)/_\1/' -e 's/[^a-zA-Z0-9_]/_/g'`
;;
* )
func_tr_sh_result=$1
;;
esac
}
# func_verbose ARG...
# -------------------
# Echo program name prefixed message in verbose mode only.
func_verbose ()
{
$debug_cmd
$opt_verbose && func_echo "$*"
:
}
# func_warn_and_continue ARG...
# -----------------------------
# Echo program name prefixed warning message to standard error.
func_warn_and_continue ()
{
$debug_cmd
$require_term_colors
func_echo_infix_1 "${tc_red}warning$tc_reset" "$*" >&2
}
# func_warning CATEGORY ARG...
# ----------------------------
# Echo program name prefixed warning message to standard error. Warning
# messages can be filtered according to CATEGORY, where this function
# elides messages where CATEGORY is not listed in the global variable
# 'opt_warning_types'.
func_warning ()
{
$debug_cmd
# CATEGORY must be in the warning_categories list!
case " $warning_categories " in
*" $1 "*) ;;
*) func_internal_error "invalid warning category '$1'" ;;
esac
_G_category=$1
shift
case " $opt_warning_types " in
*" $_G_category "*) $warning_func ${1+"$@"} ;;
esac
}
# func_sort_ver VER1 VER2
# -----------------------
# 'sort -V' is not generally available.
# Note this deviates from the version comparison in automake
# in that it treats 1.5 < 1.5.0, and treats 1.4.4a < 1.4-p3a
# but this should suffice as we won't be specifying old
# version formats or redundant trailing .0 in bootstrap.conf.
# If we did want full compatibility then we should probably
# use m4_version_compare from autoconf.
func_sort_ver ()
{
$debug_cmd
printf '%s\n%s\n' "$1" "$2" \
| sort -t. -k 1,1n -k 2,2n -k 3,3n -k 4,4n -k 5,5n -k 6,6n -k 7,7n -k 8,8n -k 9,9n
}
# func_lt_ver PREV CURR
# ---------------------
# Return true if PREV and CURR are in the correct order according to
# func_sort_ver, otherwise false. Use it like this:
#
# func_lt_ver "$prev_ver" "$proposed_ver" || func_fatal_error "..."
func_lt_ver ()
{
$debug_cmd
test "x$1" = x`func_sort_ver "$1" "$2" | $SED 1q`
}
# Local variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC"
# time-stamp-time-zone: "UTC"
# End:
#! /bin/sh
# Set a version string for this script.
scriptversion=2014-01-07.03; # UTC
# A portable, pluggable option parser for Bourne shell.
# Written by Gary V. Vaughan, 2010
# Copyright (C) 2010-2015 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
# Please report bugs or propose patches to gary@gnu.org.
## ------ ##
## Usage. ##
## ------ ##
# This file is a library for parsing options in your shell scripts along
# with assorted other useful supporting features that you can make use
# of too.
#
# For the simplest scripts you might need only:
#
# #!/bin/sh
# . relative/path/to/funclib.sh
# . relative/path/to/options-parser
# scriptversion=1.0
# func_options ${1+"$@"}
# eval set dummy "$func_options_result"; shift
# ...rest of your script...
#
# In order for the '--version' option to work, you will need to have a
# suitably formatted comment like the one at the top of this file
# starting with '# Written by ' and ending with '# warranty; '.
#
# For '-h' and '--help' to work, you will also need a one line
# description of your script's purpose in a comment directly above the
# '# Written by ' line, like the one at the top of this file.
#
# The default options also support '--debug', which will turn on shell
# execution tracing (see the comment above debug_cmd below for another
# use), and '--verbose' and the func_verbose function to allow your script
# to display verbose messages only when your user has specified
# '--verbose'.
#
# After sourcing this file, you can plug processing for additional
# options by amending the variables from the 'Configuration' section
# below, and following the instructions in the 'Option parsing'
# section further down.
## -------------- ##
## Configuration. ##
## -------------- ##
# You should override these variables in your script after sourcing this
# file so that they reflect the customisations you have added to the
# option parser.
# The usage line for option parsing errors and the start of '-h' and
# '--help' output messages. You can embed shell variables for delayed
# expansion at the time the message is displayed, but you will need to
# quote other shell meta-characters carefully to prevent them being
# expanded when the contents are evaled.
usage='$progpath [OPTION]...'
# Short help message in response to '-h' and '--help'. Add to this or
# override it after sourcing this library to reflect the full set of
# options your script accepts.
usage_message="\
--debug enable verbose shell tracing
-W, --warnings=CATEGORY
report the warnings falling in CATEGORY [all]
-v, --verbose verbosely report processing
--version print version information and exit
-h, --help print short or long help message and exit
"
# Additional text appended to 'usage_message' in response to '--help'.
long_help_message="
Warning categories include:
'all' show all warnings
'none' turn off all the warnings
'error' warnings are treated as fatal errors"
# Help message printed before fatal option parsing errors.
fatal_help="Try '\$progname --help' for more information."
## ------------------------- ##
## Hook function management. ##
## ------------------------- ##
# This section contains functions for adding, removing, and running hooks
# to the main code. A hook is just a named list of of function, that can
# be run in order later on.
# func_hookable FUNC_NAME
# -----------------------
# Declare that FUNC_NAME will run hooks added with
# 'func_add_hook FUNC_NAME ...'.
func_hookable ()
{
$debug_cmd
func_append hookable_fns " $1"
}
# func_add_hook FUNC_NAME HOOK_FUNC
# ---------------------------------
# Request that FUNC_NAME call HOOK_FUNC before it returns. FUNC_NAME must
# first have been declared "hookable" by a call to 'func_hookable'.
func_add_hook ()
{
$debug_cmd
case " $hookable_fns " in
*" $1 "*) ;;
*) func_fatal_error "'$1' does not accept hook functions." ;;
esac
eval func_append ${1}_hooks '" $2"'
}
# func_remove_hook FUNC_NAME HOOK_FUNC
# ------------------------------------
# Remove HOOK_FUNC from the list of functions called by FUNC_NAME.
func_remove_hook ()
{
$debug_cmd
eval ${1}_hooks='`$ECHO "\$'$1'_hooks" |$SED "s| '$2'||"`'
}
# func_run_hooks FUNC_NAME [ARG]...
# ---------------------------------
# Run all hook functions registered to FUNC_NAME.
# It is assumed that the list of hook functions contains nothing more
# than a whitespace-delimited list of legal shell function names, and
# no effort is wasted trying to catch shell meta-characters or preserve
# whitespace.
func_run_hooks ()
{
$debug_cmd
case " $hookable_fns " in
*" $1 "*) ;;
*) func_fatal_error "'$1' does not support hook funcions.n" ;;
esac
eval _G_hook_fns=\$$1_hooks; shift
for _G_hook in $_G_hook_fns; do
eval $_G_hook '"$@"'
# store returned options list back into positional
# parameters for next 'cmd' execution.
eval _G_hook_result=\$${_G_hook}_result
eval set dummy "$_G_hook_result"; shift
done
func_quote_for_eval ${1+"$@"}
func_run_hooks_result=$func_quote_for_eval_result
}
## --------------- ##
## Option parsing. ##
## --------------- ##
# In order to add your own option parsing hooks, you must accept the
# full positional parameter list in your hook function, remove any
# options that you action, and then pass back the remaining unprocessed
# options in '_result', escaped suitably for
# 'eval'. Like this:
#
# my_options_prep ()
# {
# $debug_cmd
#
# # Extend the existing usage message.
# usage_message=$usage_message'
# -s, --silent don'\''t print informational messages
# '
#
# func_quote_for_eval ${1+"$@"}
# my_options_prep_result=$func_quote_for_eval_result
# }
# func_add_hook func_options_prep my_options_prep
#
#
# my_silent_option ()
# {
# $debug_cmd
#
# # Note that for efficiency, we parse as many options as we can
# # recognise in a loop before passing the remainder back to the
# # caller on the first unrecognised argument we encounter.
# while test $# -gt 0; do
# opt=$1; shift
# case $opt in
# --silent|-s) opt_silent=: ;;
# # Separate non-argument short options:
# -s*) func_split_short_opt "$_G_opt"
# set dummy "$func_split_short_opt_name" \
# "-$func_split_short_opt_arg" ${1+"$@"}
# shift
# ;;
# *) set dummy "$_G_opt" "$*"; shift; break ;;
# esac
# done
#
# func_quote_for_eval ${1+"$@"}
# my_silent_option_result=$func_quote_for_eval_result
# }
# func_add_hook func_parse_options my_silent_option
#
#
# my_option_validation ()
# {
# $debug_cmd
#
# $opt_silent && $opt_verbose && func_fatal_help "\
# '--silent' and '--verbose' options are mutually exclusive."
#
# func_quote_for_eval ${1+"$@"}
# my_option_validation_result=$func_quote_for_eval_result
# }
# func_add_hook func_validate_options my_option_validation
#
# You'll alse need to manually amend $usage_message to reflect the extra
# options you parse. It's preferable to append if you can, so that
# multiple option parsing hooks can be added safely.
# func_options [ARG]...
# ---------------------
# All the functions called inside func_options are hookable. See the
# individual implementations for details.
func_hookable func_options
func_options ()
{
$debug_cmd
func_options_prep ${1+"$@"}
eval func_parse_options \
${func_options_prep_result+"$func_options_prep_result"}
eval func_validate_options \
${func_parse_options_result+"$func_parse_options_result"}
eval func_run_hooks func_options \
${func_validate_options_result+"$func_validate_options_result"}
# save modified positional parameters for caller
func_options_result=$func_run_hooks_result
}
# func_options_prep [ARG]...
# --------------------------
# All initialisations required before starting the option parse loop.
# Note that when calling hook functions, we pass through the list of
# positional parameters. If a hook function modifies that list, and
# needs to propogate that back to rest of this script, then the complete
# modified list must be put in 'func_run_hooks_result' before
# returning.
func_hookable func_options_prep
func_options_prep ()
{
$debug_cmd
# Option defaults:
opt_verbose=false
opt_warning_types=
func_run_hooks func_options_prep ${1+"$@"}
# save modified positional parameters for caller
func_options_prep_result=$func_run_hooks_result
}
# func_parse_options [ARG]...
# ---------------------------
# The main option parsing loop.
func_hookable func_parse_options
func_parse_options ()
{
$debug_cmd
func_parse_options_result=
# this just eases exit handling
while test $# -gt 0; do
# Defer to hook functions for initial option parsing, so they
# get priority in the event of reusing an option name.
func_run_hooks func_parse_options ${1+"$@"}
# Adjust func_parse_options positional parameters to match
eval set dummy "$func_run_hooks_result"; shift
# Break out of the loop if we already parsed every option.
test $# -gt 0 || break
_G_opt=$1
shift
case $_G_opt in
--debug|-x) debug_cmd='set -x'
func_echo "enabling shell trace mode"
$debug_cmd
;;
--no-warnings|--no-warning|--no-warn)
set dummy --warnings none ${1+"$@"}
shift
;;
--warnings|--warning|-W)
test $# = 0 && func_missing_arg $_G_opt && break
case " $warning_categories $1" in
*" $1 "*)
# trailing space prevents matching last $1 above
func_append_uniq opt_warning_types " $1"
;;
*all)
opt_warning_types=$warning_categories
;;
*none)
opt_warning_types=none
warning_func=:
;;
*error)
opt_warning_types=$warning_categories
warning_func=func_fatal_error
;;
*)
func_fatal_error \
"unsupported warning category: '$1'"
;;
esac
shift
;;
--verbose|-v) opt_verbose=: ;;
--version) func_version ;;
-\?|-h) func_usage ;;
--help) func_help ;;
# Separate optargs to long options (plugins may need this):
--*=*) func_split_equals "$_G_opt"
set dummy "$func_split_equals_lhs" \
"$func_split_equals_rhs" ${1+"$@"}
shift
;;
# Separate optargs to short options:
-W*)
func_split_short_opt "$_G_opt"
set dummy "$func_split_short_opt_name" \
"$func_split_short_opt_arg" ${1+"$@"}
shift
;;
# Separate non-argument short options:
-\?*|-h*|-v*|-x*)
func_split_short_opt "$_G_opt"
set dummy "$func_split_short_opt_name" \
"-$func_split_short_opt_arg" ${1+"$@"}
shift
;;
--) break ;;
-*) func_fatal_help "unrecognised option: '$_G_opt'" ;;
*) set dummy "$_G_opt" ${1+"$@"}; shift; break ;;
esac
done
# save modified positional parameters for caller
func_quote_for_eval ${1+"$@"}
func_parse_options_result=$func_quote_for_eval_result
}
# func_validate_options [ARG]...
# ------------------------------
# Perform any sanity checks on option settings and/or unconsumed
# arguments.
func_hookable func_validate_options
func_validate_options ()
{
$debug_cmd
# Display all warnings if -W was not given.
test -n "$opt_warning_types" || opt_warning_types=" $warning_categories"
func_run_hooks func_validate_options ${1+"$@"}
# Bail if the options were screwed!
$exit_cmd $EXIT_FAILURE
# save modified positional parameters for caller
func_validate_options_result=$func_run_hooks_result
}
## ----------------- ##
## Helper functions. ##
## ----------------- ##
# This section contains the helper functions used by the rest of the
# hookable option parser framework in ascii-betical order.
# func_fatal_help ARG...
# ----------------------
# Echo program name prefixed message to standard error, followed by
# a help hint, and exit.
func_fatal_help ()
{
$debug_cmd
eval \$ECHO \""Usage: $usage"\"
eval \$ECHO \""$fatal_help"\"
func_error ${1+"$@"}
exit $EXIT_FAILURE
}
# func_help
# ---------
# Echo long help message to standard output and exit.
func_help ()
{
$debug_cmd
func_usage_message
$ECHO "$long_help_message"
exit 0
}
# func_missing_arg ARGNAME
# ------------------------
# Echo program name prefixed message to standard error and set global
# exit_cmd.
func_missing_arg ()
{
$debug_cmd
func_error "Missing argument for '$1'."
exit_cmd=exit
}
# func_split_equals STRING
# ------------------------
# Set func_split_equals_lhs and func_split_equals_rhs shell variables after
# splitting STRING at the '=' sign.
test -z "$_G_HAVE_XSI_OPS" \
&& (eval 'x=a/b/c;
test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \
&& _G_HAVE_XSI_OPS=yes
if test yes = "$_G_HAVE_XSI_OPS"
then
# This is an XSI compatible shell, allowing a faster implementation...
eval 'func_split_equals ()
{
$debug_cmd
func_split_equals_lhs=${1%%=*}
func_split_equals_rhs=${1#*=}
test "x$func_split_equals_lhs" = "x$1" \
&& func_split_equals_rhs=
}'
else
# ...otherwise fall back to using expr, which is often a shell builtin.
func_split_equals ()
{
$debug_cmd
func_split_equals_lhs=`expr "x$1" : 'x\([^=]*\)'`
func_split_equals_rhs=
test "x$func_split_equals_lhs" = "x$1" \
|| func_split_equals_rhs=`expr "x$1" : 'x[^=]*=\(.*\)$'`
}
fi #func_split_equals
# func_split_short_opt SHORTOPT
# -----------------------------
# Set func_split_short_opt_name and func_split_short_opt_arg shell
# variables after splitting SHORTOPT after the 2nd character.
if test yes = "$_G_HAVE_XSI_OPS"
then
# This is an XSI compatible shell, allowing a faster implementation...
eval 'func_split_short_opt ()
{
$debug_cmd
func_split_short_opt_arg=${1#??}
func_split_short_opt_name=${1%"$func_split_short_opt_arg"}
}'
else
# ...otherwise fall back to using expr, which is often a shell builtin.
func_split_short_opt ()
{
$debug_cmd
func_split_short_opt_name=`expr "x$1" : 'x-\(.\)'`
func_split_short_opt_arg=`expr "x$1" : 'x-.\(.*\)$'`
}
fi #func_split_short_opt
# func_usage
# ----------
# Echo short help message to standard output and exit.
func_usage ()
{
$debug_cmd
func_usage_message
$ECHO "Run '$progname --help |${PAGER-more}' for full usage"
exit 0
}
# func_usage_message
# ------------------
# Echo short help message to standard output.
func_usage_message ()
{
$debug_cmd
eval \$ECHO \""Usage: $usage"\"
echo
$SED -n 's|^# ||
/^Written by/{
x;p;x
}
h
/^Written by/q' < "$progpath"
echo
eval \$ECHO \""$usage_message"\"
}
# func_version
# ------------
# Echo version message to standard output and exit.
func_version ()
{
$debug_cmd
printf '%s\n' "$progname $scriptversion"
$SED -n '
/(C)/!b go
:more
/\./!{
N
s|\n# | |
b more
}
:go
/^# Written by /,/# warranty; / {
s|^# ||
s|^# *$||
s|\((C)\)[ 0-9,-]*[ ,-]\([1-9][0-9]* \)|\1 \2|
p
}
/^# Written by / {
s|^# ||
p
}
/^warranty; /q' < "$progpath"
exit $?
}
# Local variables:
# mode: shell-script
# sh-indentation: 2
# eval: (add-hook 'before-save-hook 'time-stamp)
# time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC"
# time-stamp-time-zone: "UTC"
# End:
# Set a version string.
scriptversion='(GNU libtool) 2.4.6'
# func_echo ARG...
# ----------------
# Libtool also displays the current mode in messages, so override
# funclib.sh func_echo with this custom definition.
func_echo ()
{
$debug_cmd
_G_message=$*
func_echo_IFS=$IFS
IFS=$nl
for _G_line in $_G_message; do
IFS=$func_echo_IFS
$ECHO "$progname${opt_mode+: $opt_mode}: $_G_line"
done
IFS=$func_echo_IFS
}
# func_warning ARG...
# -------------------
# Libtool warnings are not categorized, so override funclib.sh
# func_warning with this simpler definition.
func_warning ()
{
$debug_cmd
$warning_func ${1+"$@"}
}
## ---------------- ##
## Options parsing. ##
## ---------------- ##
# Hook in the functions to make sure our own options are parsed during
# the option parsing loop.
usage='$progpath [OPTION]... [MODE-ARG]...'
# Short help message in response to '-h'.
usage_message="Options:
--config show all configuration variables
--debug enable verbose shell tracing
-n, --dry-run display commands without modifying any files
--features display basic configuration information and exit
--mode=MODE use operation mode MODE
--no-warnings equivalent to '-Wnone'
--preserve-dup-deps don't remove duplicate dependency libraries
--quiet, --silent don't print informational messages
--tag=TAG use configuration variables from tag TAG
-v, --verbose print more informational messages than default
--version print version information
-W, --warnings=CATEGORY report the warnings falling in CATEGORY [all]
-h, --help, --help-all print short, long, or detailed help message
"
# Additional text appended to 'usage_message' in response to '--help'.
func_help ()
{
$debug_cmd
func_usage_message
$ECHO "$long_help_message
MODE must be one of the following:
clean remove files from the build directory
compile compile a source file into a libtool object
execute automatically set library path, then run a program
finish complete the installation of libtool libraries
install install libraries or executables
link create a library or an executable
uninstall remove libraries from an installed directory
MODE-ARGS vary depending on the MODE. When passed as first option,
'--mode=MODE' may be abbreviated as 'MODE' or a unique abbreviation of that.
Try '$progname --help --mode=MODE' for a more detailed description of MODE.
When reporting a bug, please describe a test case to reproduce it and
include the following information:
host-triplet: $host
shell: $SHELL
compiler: $LTCC
compiler flags: $LTCFLAGS
linker: $LD (gnu? $with_gnu_ld)
version: $progname (GNU libtool) 2.4.6
automake: `($AUTOMAKE --version) 2>/dev/null |$SED 1q`
autoconf: `($AUTOCONF --version) 2>/dev/null |$SED 1q`
Report bugs to .
GNU libtool home page: .
General help using GNU software: ."
exit 0
}
# func_lo2o OBJECT-NAME
# ---------------------
# Transform OBJECT-NAME from a '.lo' suffix to the platform specific
# object suffix.
lo2o=s/\\.lo\$/.$objext/
o2lo=s/\\.$objext\$/.lo/
if test yes = "$_G_HAVE_XSI_OPS"; then
eval 'func_lo2o ()
{
case $1 in
*.lo) func_lo2o_result=${1%.lo}.$objext ;;
* ) func_lo2o_result=$1 ;;
esac
}'
# func_xform LIBOBJ-OR-SOURCE
# ---------------------------
# Transform LIBOBJ-OR-SOURCE from a '.o' or '.c' (or otherwise)
# suffix to a '.lo' libtool-object suffix.
eval 'func_xform ()
{
func_xform_result=${1%.*}.lo
}'
else
# ...otherwise fall back to using sed.
func_lo2o ()
{
func_lo2o_result=`$ECHO "$1" | $SED "$lo2o"`
}
func_xform ()
{
func_xform_result=`$ECHO "$1" | $SED 's|\.[^.]*$|.lo|'`
}
fi
# func_fatal_configuration ARG...
# -------------------------------
# Echo program name prefixed message to standard error, followed by
# a configuration failure hint, and exit.
func_fatal_configuration ()
{
func__fatal_error ${1+"$@"} \
"See the $PACKAGE documentation for more information." \
"Fatal configuration error."
}
# func_config
# -----------
# Display the configuration for all the tags in this script.
func_config ()
{
re_begincf='^# ### BEGIN LIBTOOL'
re_endcf='^# ### END LIBTOOL'
# Default configuration.
$SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath"
# Now print the configurations for the tags.
for tagname in $taglist; do
$SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath"
done
exit $?
}
# func_features
# -------------
# Display the features supported by this script.
func_features ()
{
echo "host: $host"
if test yes = "$build_libtool_libs"; then
echo "enable shared libraries"
else
echo "disable shared libraries"
fi
if test yes = "$build_old_libs"; then
echo "enable static libraries"
else
echo "disable static libraries"
fi
exit $?
}
# func_enable_tag TAGNAME
# -----------------------
# Verify that TAGNAME is valid, and either flag an error and exit, or
# enable the TAGNAME tag. We also add TAGNAME to the global $taglist
# variable here.
func_enable_tag ()
{
# Global variable:
tagname=$1
re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$"
re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$"
sed_extractcf=/$re_begincf/,/$re_endcf/p
# Validate tagname.
case $tagname in
*[!-_A-Za-z0-9,/]*)
func_fatal_error "invalid tag name: $tagname"
;;
esac
# Don't test for the "default" C tag, as we know it's
# there but not specially marked.
case $tagname in
CC) ;;
*)
if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then
taglist="$taglist $tagname"
# Evaluate the configuration. Be careful to quote the path
# and the sed script, to avoid splitting on whitespace, but
# also don't use non-portable quotes within backquotes within
# quotes we have to do it in 2 steps:
extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"`
eval "$extractedcf"
else
func_error "ignoring unknown tag $tagname"
fi
;;
esac
}
# func_check_version_match
# ------------------------
# Ensure that we are using m4 macros, and libtool script from the same
# release of libtool.
func_check_version_match ()
{
if test "$package_revision" != "$macro_revision"; then
if test "$VERSION" != "$macro_version"; then
if test -z "$macro_version"; then
cat >&2 <<_LT_EOF
$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
$progname: definition of this LT_INIT comes from an older release.
$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
$progname: and run autoconf again.
_LT_EOF
else
cat >&2 <<_LT_EOF
$progname: Version mismatch error. This is $PACKAGE $VERSION, but the
$progname: definition of this LT_INIT comes from $PACKAGE $macro_version.
$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION
$progname: and run autoconf again.
_LT_EOF
fi
else
cat >&2 <<_LT_EOF
$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision,
$progname: but the definition of this LT_INIT comes from revision $macro_revision.
$progname: You should recreate aclocal.m4 with macros from revision $package_revision
$progname: of $PACKAGE $VERSION and run autoconf again.
_LT_EOF
fi
exit $EXIT_MISMATCH
fi
}
# libtool_options_prep [ARG]...
# -----------------------------
# Preparation for options parsed by libtool.
libtool_options_prep ()
{
$debug_mode
# Option defaults:
opt_config=false
opt_dlopen=
opt_dry_run=false
opt_help=false
opt_mode=
opt_preserve_dup_deps=false
opt_quiet=false
nonopt=
preserve_args=
# Shorthand for --mode=foo, only valid as the first argument
case $1 in
clean|clea|cle|cl)
shift; set dummy --mode clean ${1+"$@"}; shift
;;
compile|compil|compi|comp|com|co|c)
shift; set dummy --mode compile ${1+"$@"}; shift
;;
execute|execut|execu|exec|exe|ex|e)
shift; set dummy --mode execute ${1+"$@"}; shift
;;
finish|finis|fini|fin|fi|f)
shift; set dummy --mode finish ${1+"$@"}; shift
;;
install|instal|insta|inst|ins|in|i)
shift; set dummy --mode install ${1+"$@"}; shift
;;
link|lin|li|l)
shift; set dummy --mode link ${1+"$@"}; shift
;;
uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u)
shift; set dummy --mode uninstall ${1+"$@"}; shift
;;
esac
# Pass back the list of options.
func_quote_for_eval ${1+"$@"}
libtool_options_prep_result=$func_quote_for_eval_result
}
func_add_hook func_options_prep libtool_options_prep
# libtool_parse_options [ARG]...
# ---------------------------------
# Provide handling for libtool specific options.
libtool_parse_options ()
{
$debug_cmd
# Perform our own loop to consume as many options as possible in
# each iteration.
while test $# -gt 0; do
_G_opt=$1
shift
case $_G_opt in
--dry-run|--dryrun|-n)
opt_dry_run=:
;;
--config) func_config ;;
--dlopen|-dlopen)
opt_dlopen="${opt_dlopen+$opt_dlopen
}$1"
shift
;;
--preserve-dup-deps)
opt_preserve_dup_deps=: ;;
--features) func_features ;;
--finish) set dummy --mode finish ${1+"$@"}; shift ;;
--help) opt_help=: ;;
--help-all) opt_help=': help-all' ;;
--mode) test $# = 0 && func_missing_arg $_G_opt && break
opt_mode=$1
case $1 in
# Valid mode arguments:
clean|compile|execute|finish|install|link|relink|uninstall) ;;
# Catch anything else as an error
*) func_error "invalid argument for $_G_opt"
exit_cmd=exit
break
;;
esac
shift
;;
--no-silent|--no-quiet)
opt_quiet=false
func_append preserve_args " $_G_opt"
;;
--no-warnings|--no-warning|--no-warn)
opt_warning=false
func_append preserve_args " $_G_opt"
;;
--no-verbose)
opt_verbose=false
func_append preserve_args " $_G_opt"
;;
--silent|--quiet)
opt_quiet=:
opt_verbose=false
func_append preserve_args " $_G_opt"
;;
--tag) test $# = 0 && func_missing_arg $_G_opt && break
opt_tag=$1
func_append preserve_args " $_G_opt $1"
func_enable_tag "$1"
shift
;;
--verbose|-v) opt_quiet=false
opt_verbose=:
func_append preserve_args " $_G_opt"
;;
# An option not handled by this hook function:
*) set dummy "$_G_opt" ${1+"$@"}; shift; break ;;
esac
done
# save modified positional parameters for caller
func_quote_for_eval ${1+"$@"}
libtool_parse_options_result=$func_quote_for_eval_result
}
func_add_hook func_parse_options libtool_parse_options
# libtool_validate_options [ARG]...
# ---------------------------------
# Perform any sanity checks on option settings and/or unconsumed
# arguments.
libtool_validate_options ()
{
# save first non-option argument
if test 0 -lt $#; then
nonopt=$1
shift
fi
# preserve --debug
test : = "$debug_cmd" || func_append preserve_args " --debug"
case $host in
# Solaris2 added to fix http://debbugs.gnu.org/cgi/bugreport.cgi?bug=16452
# see also: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=59788
*cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* | *os2*)
# don't eliminate duplications in $postdeps and $predeps
opt_duplicate_compiler_generated_deps=:
;;
*)
opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps
;;
esac
$opt_help || {
# Sanity checks first:
func_check_version_match
test yes != "$build_libtool_libs" \
&& test yes != "$build_old_libs" \
&& func_fatal_configuration "not configured to build any kind of library"
# Darwin sucks
eval std_shrext=\"$shrext_cmds\"
# Only execute mode is allowed to have -dlopen flags.
if test -n "$opt_dlopen" && test execute != "$opt_mode"; then
func_error "unrecognized option '-dlopen'"
$ECHO "$help" 1>&2
exit $EXIT_FAILURE
fi
# Change the help message to a mode-specific one.
generic_help=$help
help="Try '$progname --help --mode=$opt_mode' for more information."
}
# Pass back the unparsed argument list
func_quote_for_eval ${1+"$@"}
libtool_validate_options_result=$func_quote_for_eval_result
}
func_add_hook func_validate_options libtool_validate_options
# Process options as early as possible so that --help and --version
# can return quickly.
func_options ${1+"$@"}
eval set dummy "$func_options_result"; shift
## ----------- ##
## Main. ##
## ----------- ##
magic='%%%MAGIC variable%%%'
magic_exe='%%%MAGIC EXE variable%%%'
# Global variables.
extracted_archives=
extracted_serial=0
# If this variable is set in any of the actions, the command in it
# will be execed at the end. This prevents here-documents from being
# left over by shells.
exec_cmd=
# A function that is used when there is no print builtin or printf.
func_fallback_echo ()
{
eval 'cat <<_LTECHO_EOF
$1
_LTECHO_EOF'
}
# func_generated_by_libtool
# True iff stdin has been generated by Libtool. This function is only
# a basic sanity check; it will hardly flush out determined imposters.
func_generated_by_libtool_p ()
{
$GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1
}
# func_lalib_p file
# True iff FILE is a libtool '.la' library or '.lo' object file.
# This function is only a basic sanity check; it will hardly flush out
# determined imposters.
func_lalib_p ()
{
test -f "$1" &&
$SED -e 4q "$1" 2>/dev/null | func_generated_by_libtool_p
}
# func_lalib_unsafe_p file
# True iff FILE is a libtool '.la' library or '.lo' object file.
# This function implements the same check as func_lalib_p without
# resorting to external programs. To this end, it redirects stdin and
# closes it afterwards, without saving the original file descriptor.
# As a safety measure, use it only where a negative result would be
# fatal anyway. Works if 'file' does not exist.
func_lalib_unsafe_p ()
{
lalib_p=no
if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then
for lalib_p_l in 1 2 3 4
do
read lalib_p_line
case $lalib_p_line in
\#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;;
esac
done
exec 0<&5 5<&-
fi
test yes = "$lalib_p"
}
# func_ltwrapper_script_p file
# True iff FILE is a libtool wrapper script
# This function is only a basic sanity check; it will hardly flush out
# determined imposters.
func_ltwrapper_script_p ()
{
test -f "$1" &&
$lt_truncate_bin < "$1" 2>/dev/null | func_generated_by_libtool_p
}
# func_ltwrapper_executable_p file
# True iff FILE is a libtool wrapper executable
# This function is only a basic sanity check; it will hardly flush out
# determined imposters.
func_ltwrapper_executable_p ()
{
func_ltwrapper_exec_suffix=
case $1 in
*.exe) ;;
*) func_ltwrapper_exec_suffix=.exe ;;
esac
$GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1
}
# func_ltwrapper_scriptname file
# Assumes file is an ltwrapper_executable
# uses $file to determine the appropriate filename for a
# temporary ltwrapper_script.
func_ltwrapper_scriptname ()
{
func_dirname_and_basename "$1" "" "."
func_stripname '' '.exe' "$func_basename_result"
func_ltwrapper_scriptname_result=$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper
}
# func_ltwrapper_p file
# True iff FILE is a libtool wrapper script or wrapper executable
# This function is only a basic sanity check; it will hardly flush out
# determined imposters.
func_ltwrapper_p ()
{
func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1"
}
# func_execute_cmds commands fail_cmd
# Execute tilde-delimited COMMANDS.
# If FAIL_CMD is given, eval that upon failure.
# FAIL_CMD may read-access the current command in variable CMD!
func_execute_cmds ()
{
$debug_cmd
save_ifs=$IFS; IFS='~'
for cmd in $1; do
IFS=$sp$nl
eval cmd=\"$cmd\"
IFS=$save_ifs
func_show_eval "$cmd" "${2-:}"
done
IFS=$save_ifs
}
# func_source file
# Source FILE, adding directory component if necessary.
# Note that it is not necessary on cygwin/mingw to append a dot to
# FILE even if both FILE and FILE.exe exist: automatic-append-.exe
# behavior happens only for exec(3), not for open(2)! Also, sourcing
# 'FILE.' does not work on cygwin managed mounts.
func_source ()
{
$debug_cmd
case $1 in
*/* | *\\*) . "$1" ;;
*) . "./$1" ;;
esac
}
# func_resolve_sysroot PATH
# Replace a leading = in PATH with a sysroot. Store the result into
# func_resolve_sysroot_result
func_resolve_sysroot ()
{
func_resolve_sysroot_result=$1
case $func_resolve_sysroot_result in
=*)
func_stripname '=' '' "$func_resolve_sysroot_result"
func_resolve_sysroot_result=$lt_sysroot$func_stripname_result
;;
esac
}
# func_replace_sysroot PATH
# If PATH begins with the sysroot, replace it with = and
# store the result into func_replace_sysroot_result.
func_replace_sysroot ()
{
case $lt_sysroot:$1 in
?*:"$lt_sysroot"*)
func_stripname "$lt_sysroot" '' "$1"
func_replace_sysroot_result='='$func_stripname_result
;;
*)
# Including no sysroot.
func_replace_sysroot_result=$1
;;
esac
}
# func_infer_tag arg
# Infer tagged configuration to use if any are available and
# if one wasn't chosen via the "--tag" command line option.
# Only attempt this if the compiler in the base compile
# command doesn't match the default compiler.
# arg is usually of the form 'gcc ...'
func_infer_tag ()
{
$debug_cmd
if test -n "$available_tags" && test -z "$tagname"; then
CC_quoted=
for arg in $CC; do
func_append_quoted CC_quoted "$arg"
done
CC_expanded=`func_echo_all $CC`
CC_quoted_expanded=`func_echo_all $CC_quoted`
case $@ in
# Blanks in the command may have been stripped by the calling shell,
# but not from the CC environment variable when configure was run.
" $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
" $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;;
# Blanks at the start of $base_compile will cause this to fail
# if we don't check for them as well.
*)
for z in $available_tags; do
if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
# Evaluate the configuration.
eval "`$SED -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
CC_quoted=
for arg in $CC; do
# Double-quote args containing other shell metacharacters.
func_append_quoted CC_quoted "$arg"
done
CC_expanded=`func_echo_all $CC`
CC_quoted_expanded=`func_echo_all $CC_quoted`
case "$@ " in
" $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \
" $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*)
# The compiler in the base compile command matches
# the one in the tagged configuration.
# Assume this is the tagged configuration we want.
tagname=$z
break
;;
esac
fi
done
# If $tagname still isn't set, then no tagged configuration
# was found and let the user know that the "--tag" command
# line option must be used.
if test -z "$tagname"; then
func_echo "unable to infer tagged configuration"
func_fatal_error "specify a tag with '--tag'"
# else
# func_verbose "using $tagname tagged configuration"
fi
;;
esac
fi
}
# func_write_libtool_object output_name pic_name nonpic_name
# Create a libtool object file (analogous to a ".la" file),
# but don't create it if we're doing a dry run.
func_write_libtool_object ()
{
write_libobj=$1
if test yes = "$build_libtool_libs"; then
write_lobj=\'$2\'
else
write_lobj=none
fi
if test yes = "$build_old_libs"; then
write_oldobj=\'$3\'
else
write_oldobj=none
fi
$opt_dry_run || {
cat >${write_libobj}T </dev/null`
if test "$?" -eq 0 && test -n "$func_convert_core_file_wine_to_w32_tmp"; then
func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" |
$SED -e "$sed_naive_backslashify"`
else
func_convert_core_file_wine_to_w32_result=
fi
fi
}
# end: func_convert_core_file_wine_to_w32
# func_convert_core_path_wine_to_w32 ARG
# Helper function used by path conversion functions when $build is *nix, and
# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly
# configured wine environment available, with the winepath program in $build's
# $PATH. Assumes ARG has no leading or trailing path separator characters.
#
# ARG is path to be converted from $build format to win32.
# Result is available in $func_convert_core_path_wine_to_w32_result.
# Unconvertible file (directory) names in ARG are skipped; if no directory names
# are convertible, then the result may be empty.
func_convert_core_path_wine_to_w32 ()
{
$debug_cmd
# unfortunately, winepath doesn't convert paths, only file names
func_convert_core_path_wine_to_w32_result=
if test -n "$1"; then
oldIFS=$IFS
IFS=:
for func_convert_core_path_wine_to_w32_f in $1; do
IFS=$oldIFS
func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f"
if test -n "$func_convert_core_file_wine_to_w32_result"; then
if test -z "$func_convert_core_path_wine_to_w32_result"; then
func_convert_core_path_wine_to_w32_result=$func_convert_core_file_wine_to_w32_result
else
func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result"
fi
fi
done
IFS=$oldIFS
fi
}
# end: func_convert_core_path_wine_to_w32
# func_cygpath ARGS...
# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when
# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2)
# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or
# (2), returns the Cygwin file name or path in func_cygpath_result (input
# file name or path is assumed to be in w32 format, as previously converted
# from $build's *nix or MSYS format). In case (3), returns the w32 file name
# or path in func_cygpath_result (input file name or path is assumed to be in
# Cygwin format). Returns an empty string on error.
#
# ARGS are passed to cygpath, with the last one being the file name or path to
# be converted.
#
# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH
# environment variable; do not put it in $PATH.
func_cygpath ()
{
$debug_cmd
if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then
func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null`
if test "$?" -ne 0; then
# on failure, ensure result is empty
func_cygpath_result=
fi
else
func_cygpath_result=
func_error "LT_CYGPATH is empty or specifies non-existent file: '$LT_CYGPATH'"
fi
}
#end: func_cygpath
# func_convert_core_msys_to_w32 ARG
# Convert file name or path ARG from MSYS format to w32 format. Return
# result in func_convert_core_msys_to_w32_result.
func_convert_core_msys_to_w32 ()
{
$debug_cmd
# awkward: cmd appends spaces to result
func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null |
$SED -e 's/[ ]*$//' -e "$sed_naive_backslashify"`
}
#end: func_convert_core_msys_to_w32
# func_convert_file_check ARG1 ARG2
# Verify that ARG1 (a file name in $build format) was converted to $host
# format in ARG2. Otherwise, emit an error message, but continue (resetting
# func_to_host_file_result to ARG1).
func_convert_file_check ()
{
$debug_cmd
if test -z "$2" && test -n "$1"; then
func_error "Could not determine host file name corresponding to"
func_error " '$1'"
func_error "Continuing, but uninstalled executables may not work."
# Fallback:
func_to_host_file_result=$1
fi
}
# end func_convert_file_check
# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH
# Verify that FROM_PATH (a path in $build format) was converted to $host
# format in TO_PATH. Otherwise, emit an error message, but continue, resetting
# func_to_host_file_result to a simplistic fallback value (see below).
func_convert_path_check ()
{
$debug_cmd
if test -z "$4" && test -n "$3"; then
func_error "Could not determine the host path corresponding to"
func_error " '$3'"
func_error "Continuing, but uninstalled executables may not work."
# Fallback. This is a deliberately simplistic "conversion" and
# should not be "improved". See libtool.info.
if test "x$1" != "x$2"; then
lt_replace_pathsep_chars="s|$1|$2|g"
func_to_host_path_result=`echo "$3" |
$SED -e "$lt_replace_pathsep_chars"`
else
func_to_host_path_result=$3
fi
fi
}
# end func_convert_path_check
# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG
# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT
# and appending REPL if ORIG matches BACKPAT.
func_convert_path_front_back_pathsep ()
{
$debug_cmd
case $4 in
$1 ) func_to_host_path_result=$3$func_to_host_path_result
;;
esac
case $4 in
$2 ) func_append func_to_host_path_result "$3"
;;
esac
}
# end func_convert_path_front_back_pathsep
##################################################
# $build to $host FILE NAME CONVERSION FUNCTIONS #
##################################################
# invoked via '$to_host_file_cmd ARG'
#
# In each case, ARG is the path to be converted from $build to $host format.
# Result will be available in $func_to_host_file_result.
# func_to_host_file ARG
# Converts the file name ARG from $build format to $host format. Return result
# in func_to_host_file_result.
func_to_host_file ()
{
$debug_cmd
$to_host_file_cmd "$1"
}
# end func_to_host_file
# func_to_tool_file ARG LAZY
# converts the file name ARG from $build format to toolchain format. Return
# result in func_to_tool_file_result. If the conversion in use is listed
# in (the comma separated) LAZY, no conversion takes place.
func_to_tool_file ()
{
$debug_cmd
case ,$2, in
*,"$to_tool_file_cmd",*)
func_to_tool_file_result=$1
;;
*)
$to_tool_file_cmd "$1"
func_to_tool_file_result=$func_to_host_file_result
;;
esac
}
# end func_to_tool_file
# func_convert_file_noop ARG
# Copy ARG to func_to_host_file_result.
func_convert_file_noop ()
{
func_to_host_file_result=$1
}
# end func_convert_file_noop
# func_convert_file_msys_to_w32 ARG
# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic
# conversion to w32 is not available inside the cwrapper. Returns result in
# func_to_host_file_result.
func_convert_file_msys_to_w32 ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
func_convert_core_msys_to_w32 "$1"
func_to_host_file_result=$func_convert_core_msys_to_w32_result
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_msys_to_w32
# func_convert_file_cygwin_to_w32 ARG
# Convert file name ARG from Cygwin to w32 format. Returns result in
# func_to_host_file_result.
func_convert_file_cygwin_to_w32 ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
# because $build is cygwin, we call "the" cygpath in $PATH; no need to use
# LT_CYGPATH in this case.
func_to_host_file_result=`cygpath -m "$1"`
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_cygwin_to_w32
# func_convert_file_nix_to_w32 ARG
# Convert file name ARG from *nix to w32 format. Requires a wine environment
# and a working winepath. Returns result in func_to_host_file_result.
func_convert_file_nix_to_w32 ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
func_convert_core_file_wine_to_w32 "$1"
func_to_host_file_result=$func_convert_core_file_wine_to_w32_result
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_nix_to_w32
# func_convert_file_msys_to_cygwin ARG
# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
# Returns result in func_to_host_file_result.
func_convert_file_msys_to_cygwin ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
func_convert_core_msys_to_w32 "$1"
func_cygpath -u "$func_convert_core_msys_to_w32_result"
func_to_host_file_result=$func_cygpath_result
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_msys_to_cygwin
# func_convert_file_nix_to_cygwin ARG
# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed
# in a wine environment, working winepath, and LT_CYGPATH set. Returns result
# in func_to_host_file_result.
func_convert_file_nix_to_cygwin ()
{
$debug_cmd
func_to_host_file_result=$1
if test -n "$1"; then
# convert from *nix to w32, then use cygpath to convert from w32 to cygwin.
func_convert_core_file_wine_to_w32 "$1"
func_cygpath -u "$func_convert_core_file_wine_to_w32_result"
func_to_host_file_result=$func_cygpath_result
fi
func_convert_file_check "$1" "$func_to_host_file_result"
}
# end func_convert_file_nix_to_cygwin
#############################################
# $build to $host PATH CONVERSION FUNCTIONS #
#############################################
# invoked via '$to_host_path_cmd ARG'
#
# In each case, ARG is the path to be converted from $build to $host format.
# The result will be available in $func_to_host_path_result.
#
# Path separators are also converted from $build format to $host format. If
# ARG begins or ends with a path separator character, it is preserved (but
# converted to $host format) on output.
#
# All path conversion functions are named using the following convention:
# file name conversion function : func_convert_file_X_to_Y ()
# path conversion function : func_convert_path_X_to_Y ()
# where, for any given $build/$host combination the 'X_to_Y' value is the
# same. If conversion functions are added for new $build/$host combinations,
# the two new functions must follow this pattern, or func_init_to_host_path_cmd
# will break.
# func_init_to_host_path_cmd
# Ensures that function "pointer" variable $to_host_path_cmd is set to the
# appropriate value, based on the value of $to_host_file_cmd.
to_host_path_cmd=
func_init_to_host_path_cmd ()
{
$debug_cmd
if test -z "$to_host_path_cmd"; then
func_stripname 'func_convert_file_' '' "$to_host_file_cmd"
to_host_path_cmd=func_convert_path_$func_stripname_result
fi
}
# func_to_host_path ARG
# Converts the path ARG from $build format to $host format. Return result
# in func_to_host_path_result.
func_to_host_path ()
{
$debug_cmd
func_init_to_host_path_cmd
$to_host_path_cmd "$1"
}
# end func_to_host_path
# func_convert_path_noop ARG
# Copy ARG to func_to_host_path_result.
func_convert_path_noop ()
{
func_to_host_path_result=$1
}
# end func_convert_path_noop
# func_convert_path_msys_to_w32 ARG
# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic
# conversion to w32 is not available inside the cwrapper. Returns result in
# func_to_host_path_result.
func_convert_path_msys_to_w32 ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# Remove leading and trailing path separator characters from ARG. MSYS
# behavior is inconsistent here; cygpath turns them into '.;' and ';.';
# and winepath ignores them completely.
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
func_to_host_path_result=$func_convert_core_msys_to_w32_result
func_convert_path_check : ";" \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
fi
}
# end func_convert_path_msys_to_w32
# func_convert_path_cygwin_to_w32 ARG
# Convert path ARG from Cygwin to w32 format. Returns result in
# func_to_host_file_result.
func_convert_path_cygwin_to_w32 ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# See func_convert_path_msys_to_w32:
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"`
func_convert_path_check : ";" \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
fi
}
# end func_convert_path_cygwin_to_w32
# func_convert_path_nix_to_w32 ARG
# Convert path ARG from *nix to w32 format. Requires a wine environment and
# a working winepath. Returns result in func_to_host_file_result.
func_convert_path_nix_to_w32 ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# See func_convert_path_msys_to_w32:
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
func_to_host_path_result=$func_convert_core_path_wine_to_w32_result
func_convert_path_check : ";" \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" ";" "$1"
fi
}
# end func_convert_path_nix_to_w32
# func_convert_path_msys_to_cygwin ARG
# Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set.
# Returns result in func_to_host_file_result.
func_convert_path_msys_to_cygwin ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# See func_convert_path_msys_to_w32:
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_convert_core_msys_to_w32 "$func_to_host_path_tmp1"
func_cygpath -u -p "$func_convert_core_msys_to_w32_result"
func_to_host_path_result=$func_cygpath_result
func_convert_path_check : : \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" : "$1"
fi
}
# end func_convert_path_msys_to_cygwin
# func_convert_path_nix_to_cygwin ARG
# Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a
# a wine environment, working winepath, and LT_CYGPATH set. Returns result in
# func_to_host_file_result.
func_convert_path_nix_to_cygwin ()
{
$debug_cmd
func_to_host_path_result=$1
if test -n "$1"; then
# Remove leading and trailing path separator characters from
# ARG. msys behavior is inconsistent here, cygpath turns them
# into '.;' and ';.', and winepath ignores them completely.
func_stripname : : "$1"
func_to_host_path_tmp1=$func_stripname_result
func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1"
func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result"
func_to_host_path_result=$func_cygpath_result
func_convert_path_check : : \
"$func_to_host_path_tmp1" "$func_to_host_path_result"
func_convert_path_front_back_pathsep ":*" "*:" : "$1"
fi
}
# end func_convert_path_nix_to_cygwin
# func_dll_def_p FILE
# True iff FILE is a Windows DLL '.def' file.
# Keep in sync with _LT_DLL_DEF_P in libtool.m4
func_dll_def_p ()
{
$debug_cmd
func_dll_def_p_tmp=`$SED -n \
-e 's/^[ ]*//' \
-e '/^\(;.*\)*$/d' \
-e 's/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p' \
-e q \
"$1"`
test DEF = "$func_dll_def_p_tmp"
}
# func_mode_compile arg...
func_mode_compile ()
{
$debug_cmd
# Get the compilation command and the source file.
base_compile=
srcfile=$nonopt # always keep a non-empty value in "srcfile"
suppress_opt=yes
suppress_output=
arg_mode=normal
libobj=
later=
pie_flag=
for arg
do
case $arg_mode in
arg )
# do not "continue". Instead, add this to base_compile
lastarg=$arg
arg_mode=normal
;;
target )
libobj=$arg
arg_mode=normal
continue
;;
normal )
# Accept any command-line options.
case $arg in
-o)
test -n "$libobj" && \
func_fatal_error "you cannot specify '-o' more than once"
arg_mode=target
continue
;;
-pie | -fpie | -fPIE)
func_append pie_flag " $arg"
continue
;;
-shared | -static | -prefer-pic | -prefer-non-pic)
func_append later " $arg"
continue
;;
-no-suppress)
suppress_opt=no
continue
;;
-Xcompiler)
arg_mode=arg # the next one goes into the "base_compile" arg list
continue # The current "srcfile" will either be retained or
;; # replaced later. I would guess that would be a bug.
-Wc,*)
func_stripname '-Wc,' '' "$arg"
args=$func_stripname_result
lastarg=
save_ifs=$IFS; IFS=,
for arg in $args; do
IFS=$save_ifs
func_append_quoted lastarg "$arg"
done
IFS=$save_ifs
func_stripname ' ' '' "$lastarg"
lastarg=$func_stripname_result
# Add the arguments to base_compile.
func_append base_compile " $lastarg"
continue
;;
*)
# Accept the current argument as the source file.
# The previous "srcfile" becomes the current argument.
#
lastarg=$srcfile
srcfile=$arg
;;
esac # case $arg
;;
esac # case $arg_mode
# Aesthetically quote the previous argument.
func_append_quoted base_compile "$lastarg"
done # for arg
case $arg_mode in
arg)
func_fatal_error "you must specify an argument for -Xcompile"
;;
target)
func_fatal_error "you must specify a target with '-o'"
;;
*)
# Get the name of the library object.
test -z "$libobj" && {
func_basename "$srcfile"
libobj=$func_basename_result
}
;;
esac
# Recognize several different file suffixes.
# If the user specifies -o file.o, it is replaced with file.lo
case $libobj in
*.[cCFSifmso] | \
*.ada | *.adb | *.ads | *.asm | \
*.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \
*.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup)
func_xform "$libobj"
libobj=$func_xform_result
;;
esac
case $libobj in
*.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;;
*)
func_fatal_error "cannot determine name of library object from '$libobj'"
;;
esac
func_infer_tag $base_compile
for arg in $later; do
case $arg in
-shared)
test yes = "$build_libtool_libs" \
|| func_fatal_configuration "cannot build a shared library"
build_old_libs=no
continue
;;
-static)
build_libtool_libs=no
build_old_libs=yes
continue
;;
-prefer-pic)
pic_mode=yes
continue
;;
-prefer-non-pic)
pic_mode=no
continue
;;
esac
done
func_quote_for_eval "$libobj"
test "X$libobj" != "X$func_quote_for_eval_result" \
&& $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \
&& func_warning "libobj name '$libobj' may not contain shell special characters."
func_dirname_and_basename "$obj" "/" ""
objname=$func_basename_result
xdir=$func_dirname_result
lobj=$xdir$objdir/$objname
test -z "$base_compile" && \
func_fatal_help "you must specify a compilation command"
# Delete any leftover library objects.
if test yes = "$build_old_libs"; then
removelist="$obj $lobj $libobj ${libobj}T"
else
removelist="$lobj $libobj ${libobj}T"
fi
# On Cygwin there's no "real" PIC flag so we must build both object types
case $host_os in
cygwin* | mingw* | pw32* | os2* | cegcc*)
pic_mode=default
;;
esac
if test no = "$pic_mode" && test pass_all != "$deplibs_check_method"; then
# non-PIC code in shared libraries is not supported
pic_mode=default
fi
# Calculate the filename of the output object if compiler does
# not support -o with -c
if test no = "$compiler_c_o"; then
output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.$objext
lockfile=$output_obj.lock
else
output_obj=
need_locks=no
lockfile=
fi
# Lock this critical section if it is needed
# We use this script file to make the link, it avoids creating a new file
if test yes = "$need_locks"; then
until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
func_echo "Waiting for $lockfile to be removed"
sleep 2
done
elif test warn = "$need_locks"; then
if test -f "$lockfile"; then
$ECHO "\
*** ERROR, $lockfile exists and contains:
`cat $lockfile 2>/dev/null`
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support '-c' and '-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$opt_dry_run || $RM $removelist
exit $EXIT_FAILURE
fi
func_append removelist " $output_obj"
$ECHO "$srcfile" > "$lockfile"
fi
$opt_dry_run || $RM $removelist
func_append removelist " $lockfile"
trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15
func_to_tool_file "$srcfile" func_convert_file_msys_to_w32
srcfile=$func_to_tool_file_result
func_quote_for_eval "$srcfile"
qsrcfile=$func_quote_for_eval_result
# Only build a PIC object if we are building libtool libraries.
if test yes = "$build_libtool_libs"; then
# Without this assignment, base_compile gets emptied.
fbsd_hideous_sh_bug=$base_compile
if test no != "$pic_mode"; then
command="$base_compile $qsrcfile $pic_flag"
else
# Don't build PIC code
command="$base_compile $qsrcfile"
fi
func_mkdir_p "$xdir$objdir"
if test -z "$output_obj"; then
# Place PIC objects in $objdir
func_append command " -o $lobj"
fi
func_show_eval_locale "$command" \
'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE'
if test warn = "$need_locks" &&
test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
$ECHO "\
*** ERROR, $lockfile contains:
`cat $lockfile 2>/dev/null`
but it should contain:
$srcfile
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support '-c' and '-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$opt_dry_run || $RM $removelist
exit $EXIT_FAILURE
fi
# Just move the object if needed, then go on to compile the next one
if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
func_show_eval '$MV "$output_obj" "$lobj"' \
'error=$?; $opt_dry_run || $RM $removelist; exit $error'
fi
# Allow error messages only from the first compilation.
if test yes = "$suppress_opt"; then
suppress_output=' >/dev/null 2>&1'
fi
fi
# Only build a position-dependent object if we build old libraries.
if test yes = "$build_old_libs"; then
if test yes != "$pic_mode"; then
# Don't build PIC code
command="$base_compile $qsrcfile$pie_flag"
else
command="$base_compile $qsrcfile $pic_flag"
fi
if test yes = "$compiler_c_o"; then
func_append command " -o $obj"
fi
# Suppress compiler output if we already did a PIC compilation.
func_append command "$suppress_output"
func_show_eval_locale "$command" \
'$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE'
if test warn = "$need_locks" &&
test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
$ECHO "\
*** ERROR, $lockfile contains:
`cat $lockfile 2>/dev/null`
but it should contain:
$srcfile
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support '-c' and '-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$opt_dry_run || $RM $removelist
exit $EXIT_FAILURE
fi
# Just move the object if needed
if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
func_show_eval '$MV "$output_obj" "$obj"' \
'error=$?; $opt_dry_run || $RM $removelist; exit $error'
fi
fi
$opt_dry_run || {
func_write_libtool_object "$libobj" "$objdir/$objname" "$objname"
# Unlock the critical section if it was locked
if test no != "$need_locks"; then
removelist=$lockfile
$RM "$lockfile"
fi
}
exit $EXIT_SUCCESS
}
$opt_help || {
test compile = "$opt_mode" && func_mode_compile ${1+"$@"}
}
func_mode_help ()
{
# We need to display help for each of the modes.
case $opt_mode in
"")
# Generic help is extracted from the usage comments
# at the start of this file.
func_help
;;
clean)
$ECHO \
"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
Remove files from the build directory.
RM is the name of the program to use to delete files associated with each FILE
(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed
to RM.
If FILE is a libtool library, object or program, all the files associated
with it are deleted. Otherwise, only FILE itself is deleted using RM."
;;
compile)
$ECHO \
"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
Compile a source file into a libtool library object.
This mode accepts the following additional options:
-o OUTPUT-FILE set the output file name to OUTPUT-FILE
-no-suppress do not suppress compiler output for multiple passes
-prefer-pic try to build PIC objects only
-prefer-non-pic try to build non-PIC objects only
-shared do not build a '.o' file suitable for static linking
-static only build a '.o' file suitable for static linking
-Wc,FLAG pass FLAG directly to the compiler
COMPILE-COMMAND is a command to be used in creating a 'standard' object file
from the given SOURCEFILE.
The output file name is determined by removing the directory component from
SOURCEFILE, then substituting the C source code suffix '.c' with the
library object suffix, '.lo'."
;;
execute)
$ECHO \
"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]...
Automatically set library path, then run a program.
This mode accepts the following additional options:
-dlopen FILE add the directory containing FILE to the library path
This mode sets the library path environment variable according to '-dlopen'
flags.
If any of the ARGS are libtool executable wrappers, then they are translated
into their corresponding uninstalled binary, and any of their required library
directories are added to the library path.
Then, COMMAND is executed, with ARGS as arguments."
;;
finish)
$ECHO \
"Usage: $progname [OPTION]... --mode=finish [LIBDIR]...
Complete the installation of libtool libraries.
Each LIBDIR is a directory that contains libtool libraries.
The commands that this mode executes may require superuser privileges. Use
the '--dry-run' option if you just want to see what would be executed."
;;
install)
$ECHO \
"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND...
Install executables or libraries.
INSTALL-COMMAND is the installation command. The first component should be
either the 'install' or 'cp' program.
The following components of INSTALL-COMMAND are treated specially:
-inst-prefix-dir PREFIX-DIR Use PREFIX-DIR as a staging area for installation
The rest of the components are interpreted as arguments to that command (only
BSD-compatible install options are recognized)."
;;
link)
$ECHO \
"Usage: $progname [OPTION]... --mode=link LINK-COMMAND...
Link object files or libraries together to form another library, or to
create an executable program.
LINK-COMMAND is a command using the C compiler that you would use to create
a program from several object files.
The following components of LINK-COMMAND are treated specially:
-all-static do not do any dynamic linking at all
-avoid-version do not add a version suffix if possible
-bindir BINDIR specify path to binaries directory (for systems where
libraries must be found in the PATH setting at runtime)
-dlopen FILE '-dlpreopen' FILE if it cannot be dlopened at runtime
-dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols
-export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
-export-symbols SYMFILE
try to export only the symbols listed in SYMFILE
-export-symbols-regex REGEX
try to export only the symbols matching REGEX
-LLIBDIR search LIBDIR for required installed libraries
-lNAME OUTPUT-FILE requires the installed library libNAME
-module build a library that can dlopened
-no-fast-install disable the fast-install mode
-no-install link a not-installable executable
-no-undefined declare that a library does not refer to external symbols
-o OUTPUT-FILE create OUTPUT-FILE from the specified objects
-objectlist FILE use a list of object files found in FILE to specify objects
-os2dllname NAME force a short DLL name on OS/2 (no effect on other OSes)
-precious-files-regex REGEX
don't remove output files matching REGEX
-release RELEASE specify package release information
-rpath LIBDIR the created library will eventually be installed in LIBDIR
-R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries
-shared only do dynamic linking of libtool libraries
-shrext SUFFIX override the standard shared library file extension
-static do not do any dynamic linking of uninstalled libtool libraries
-static-libtool-libs
do not do any dynamic linking of libtool libraries
-version-info CURRENT[:REVISION[:AGE]]
specify library version info [each variable defaults to 0]
-weak LIBNAME declare that the target provides the LIBNAME interface
-Wc,FLAG
-Xcompiler FLAG pass linker-specific FLAG directly to the compiler
-Wl,FLAG
-Xlinker FLAG pass linker-specific FLAG directly to the linker
-XCClinker FLAG pass link-specific FLAG to the compiler driver (CC)
All other options (arguments beginning with '-') are ignored.
Every other argument is treated as a filename. Files ending in '.la' are
treated as uninstalled libtool libraries, other files are standard or library
object files.
If the OUTPUT-FILE ends in '.la', then a libtool library is created,
only library objects ('.lo' files) may be specified, and '-rpath' is
required, except when creating a convenience library.
If OUTPUT-FILE ends in '.a' or '.lib', then a standard library is created
using 'ar' and 'ranlib', or on Windows using 'lib'.
If OUTPUT-FILE ends in '.lo' or '.$objext', then a reloadable object file
is created, otherwise an executable program is created."
;;
uninstall)
$ECHO \
"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
Remove libraries from an installation directory.
RM is the name of the program to use to delete files associated with each FILE
(typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed
to RM.
If FILE is a libtool library, all the files associated with it are deleted.
Otherwise, only FILE itself is deleted using RM."
;;
*)
func_fatal_help "invalid operation mode '$opt_mode'"
;;
esac
echo
$ECHO "Try '$progname --help' for more information about other modes."
}
# Now that we've collected a possible --mode arg, show help if necessary
if $opt_help; then
if test : = "$opt_help"; then
func_mode_help
else
{
func_help noexit
for opt_mode in compile link execute install finish uninstall clean; do
func_mode_help
done
} | $SED -n '1p; 2,$s/^Usage:/ or: /p'
{
func_help noexit
for opt_mode in compile link execute install finish uninstall clean; do
echo
func_mode_help
done
} |
$SED '1d
/^When reporting/,/^Report/{
H
d
}
$x
/information about other modes/d
/more detailed .*MODE/d
s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/'
fi
exit $?
fi
# func_mode_execute arg...
func_mode_execute ()
{
$debug_cmd
# The first argument is the command name.
cmd=$nonopt
test -z "$cmd" && \
func_fatal_help "you must specify a COMMAND"
# Handle -dlopen flags immediately.
for file in $opt_dlopen; do
test -f "$file" \
|| func_fatal_help "'$file' is not a file"
dir=
case $file in
*.la)
func_resolve_sysroot "$file"
file=$func_resolve_sysroot_result
# Check to see that this really is a libtool archive.
func_lalib_unsafe_p "$file" \
|| func_fatal_help "'$lib' is not a valid libtool archive"
# Read the libtool library.
dlname=
library_names=
func_source "$file"
# Skip this library if it cannot be dlopened.
if test -z "$dlname"; then
# Warn if it was a shared library.
test -n "$library_names" && \
func_warning "'$file' was not linked with '-export-dynamic'"
continue
fi
func_dirname "$file" "" "."
dir=$func_dirname_result
if test -f "$dir/$objdir/$dlname"; then
func_append dir "/$objdir"
else
if test ! -f "$dir/$dlname"; then
func_fatal_error "cannot find '$dlname' in '$dir' or '$dir/$objdir'"
fi
fi
;;
*.lo)
# Just add the directory containing the .lo file.
func_dirname "$file" "" "."
dir=$func_dirname_result
;;
*)
func_warning "'-dlopen' is ignored for non-libtool libraries and objects"
continue
;;
esac
# Get the absolute pathname.
absdir=`cd "$dir" && pwd`
test -n "$absdir" && dir=$absdir
# Now add the directory to shlibpath_var.
if eval "test -z \"\$$shlibpath_var\""; then
eval "$shlibpath_var=\"\$dir\""
else
eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
fi
done
# This variable tells wrapper scripts just to set shlibpath_var
# rather than running their programs.
libtool_execute_magic=$magic
# Check if any of the arguments is a wrapper script.
args=
for file
do
case $file in
-* | *.la | *.lo ) ;;
*)
# Do a test to see if this is really a libtool program.
if func_ltwrapper_script_p "$file"; then
func_source "$file"
# Transform arg to wrapped name.
file=$progdir/$program
elif func_ltwrapper_executable_p "$file"; then
func_ltwrapper_scriptname "$file"
func_source "$func_ltwrapper_scriptname_result"
# Transform arg to wrapped name.
file=$progdir/$program
fi
;;
esac
# Quote arguments (to preserve shell metacharacters).
func_append_quoted args "$file"
done
if $opt_dry_run; then
# Display what would be done.
if test -n "$shlibpath_var"; then
eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\""
echo "export $shlibpath_var"
fi
$ECHO "$cmd$args"
exit $EXIT_SUCCESS
else
if test -n "$shlibpath_var"; then
# Export the shlibpath_var.
eval "export $shlibpath_var"
fi
# Restore saved environment variables
for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES
do
eval "if test \"\${save_$lt_var+set}\" = set; then
$lt_var=\$save_$lt_var; export $lt_var
else
$lt_unset $lt_var
fi"
done
# Now prepare to actually exec the command.
exec_cmd=\$cmd$args
fi
}
test execute = "$opt_mode" && func_mode_execute ${1+"$@"}
# func_mode_finish arg...
func_mode_finish ()
{
$debug_cmd
libs=
libdirs=
admincmds=
for opt in "$nonopt" ${1+"$@"}
do
if test -d "$opt"; then
func_append libdirs " $opt"
elif test -f "$opt"; then
if func_lalib_unsafe_p "$opt"; then
func_append libs " $opt"
else
func_warning "'$opt' is not a valid libtool archive"
fi
else
func_fatal_error "invalid argument '$opt'"
fi
done
if test -n "$libs"; then
if test -n "$lt_sysroot"; then
sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"`
sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;"
else
sysroot_cmd=
fi
# Remove sysroot references
if $opt_dry_run; then
for lib in $libs; do
echo "removing references to $lt_sysroot and '=' prefixes from $lib"
done
else
tmpdir=`func_mktempdir`
for lib in $libs; do
$SED -e "$sysroot_cmd s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \
> $tmpdir/tmp-la
mv -f $tmpdir/tmp-la $lib
done
${RM}r "$tmpdir"
fi
fi
if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
for libdir in $libdirs; do
if test -n "$finish_cmds"; then
# Do each command in the finish commands.
func_execute_cmds "$finish_cmds" 'admincmds="$admincmds
'"$cmd"'"'
fi
if test -n "$finish_eval"; then
# Do the single finish_eval.
eval cmds=\"$finish_eval\"
$opt_dry_run || eval "$cmds" || func_append admincmds "
$cmds"
fi
done
fi
# Exit here if they wanted silent mode.
$opt_quiet && exit $EXIT_SUCCESS
if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
echo "----------------------------------------------------------------------"
echo "Libraries have been installed in:"
for libdir in $libdirs; do
$ECHO " $libdir"
done
echo
echo "If you ever happen to want to link against installed libraries"
echo "in a given directory, LIBDIR, you must either use libtool, and"
echo "specify the full pathname of the library, or use the '-LLIBDIR'"
echo "flag during linking and do at least one of the following:"
if test -n "$shlibpath_var"; then
echo " - add LIBDIR to the '$shlibpath_var' environment variable"
echo " during execution"
fi
if test -n "$runpath_var"; then
echo " - add LIBDIR to the '$runpath_var' environment variable"
echo " during linking"
fi
if test -n "$hardcode_libdir_flag_spec"; then
libdir=LIBDIR
eval flag=\"$hardcode_libdir_flag_spec\"
$ECHO " - use the '$flag' linker flag"
fi
if test -n "$admincmds"; then
$ECHO " - have your system administrator run these commands:$admincmds"
fi
if test -f /etc/ld.so.conf; then
echo " - have your system administrator add LIBDIR to '/etc/ld.so.conf'"
fi
echo
echo "See any operating system documentation about shared libraries for"
case $host in
solaris2.[6789]|solaris2.1[0-9])
echo "more information, such as the ld(1), crle(1) and ld.so(8) manual"
echo "pages."
;;
*)
echo "more information, such as the ld(1) and ld.so(8) manual pages."
;;
esac
echo "----------------------------------------------------------------------"
fi
exit $EXIT_SUCCESS
}
test finish = "$opt_mode" && func_mode_finish ${1+"$@"}
# func_mode_install arg...
func_mode_install ()
{
$debug_cmd
# There may be an optional sh(1) argument at the beginning of
# install_prog (especially on Windows NT).
if test "$SHELL" = "$nonopt" || test /bin/sh = "$nonopt" ||
# Allow the use of GNU shtool's install command.
case $nonopt in *shtool*) :;; *) false;; esac
then
# Aesthetically quote it.
func_quote_for_eval "$nonopt"
install_prog="$func_quote_for_eval_result "
arg=$1
shift
else
install_prog=
arg=$nonopt
fi
# The real first argument should be the name of the installation program.
# Aesthetically quote it.
func_quote_for_eval "$arg"
func_append install_prog "$func_quote_for_eval_result"
install_shared_prog=$install_prog
case " $install_prog " in
*[\\\ /]cp\ *) install_cp=: ;;
*) install_cp=false ;;
esac
# We need to accept at least all the BSD install flags.
dest=
files=
opts=
prev=
install_type=
isdir=false
stripme=
no_mode=:
for arg
do
arg2=
if test -n "$dest"; then
func_append files " $dest"
dest=$arg
continue
fi
case $arg in
-d) isdir=: ;;
-f)
if $install_cp; then :; else
prev=$arg
fi
;;
-g | -m | -o)
prev=$arg
;;
-s)
stripme=" -s"
continue
;;
-*)
;;
*)
# If the previous option needed an argument, then skip it.
if test -n "$prev"; then
if test X-m = "X$prev" && test -n "$install_override_mode"; then
arg2=$install_override_mode
no_mode=false
fi
prev=
else
dest=$arg
continue
fi
;;
esac
# Aesthetically quote the argument.
func_quote_for_eval "$arg"
func_append install_prog " $func_quote_for_eval_result"
if test -n "$arg2"; then
func_quote_for_eval "$arg2"
fi
func_append install_shared_prog " $func_quote_for_eval_result"
done
test -z "$install_prog" && \
func_fatal_help "you must specify an install program"
test -n "$prev" && \
func_fatal_help "the '$prev' option requires an argument"
if test -n "$install_override_mode" && $no_mode; then
if $install_cp; then :; else
func_quote_for_eval "$install_override_mode"
func_append install_shared_prog " -m $func_quote_for_eval_result"
fi
fi
if test -z "$files"; then
if test -z "$dest"; then
func_fatal_help "no file or destination specified"
else
func_fatal_help "you must specify a destination"
fi
fi
# Strip any trailing slash from the destination.
func_stripname '' '/' "$dest"
dest=$func_stripname_result
# Check to see that the destination is a directory.
test -d "$dest" && isdir=:
if $isdir; then
destdir=$dest
destname=
else
func_dirname_and_basename "$dest" "" "."
destdir=$func_dirname_result
destname=$func_basename_result
# Not a directory, so check to see that there is only one file specified.
set dummy $files; shift
test "$#" -gt 1 && \
func_fatal_help "'$dest' is not a directory"
fi
case $destdir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
for file in $files; do
case $file in
*.lo) ;;
*)
func_fatal_help "'$destdir' must be an absolute directory name"
;;
esac
done
;;
esac
# This variable tells wrapper scripts just to set variables rather
# than running their programs.
libtool_install_magic=$magic
staticlibs=
future_libdirs=
current_libdirs=
for file in $files; do
# Do each installation.
case $file in
*.$libext)
# Do the static libraries later.
func_append staticlibs " $file"
;;
*.la)
func_resolve_sysroot "$file"
file=$func_resolve_sysroot_result
# Check to see that this really is a libtool archive.
func_lalib_unsafe_p "$file" \
|| func_fatal_help "'$file' is not a valid libtool archive"
library_names=
old_library=
relink_command=
func_source "$file"
# Add the libdir to current_libdirs if it is the destination.
if test "X$destdir" = "X$libdir"; then
case "$current_libdirs " in
*" $libdir "*) ;;
*) func_append current_libdirs " $libdir" ;;
esac
else
# Note the libdir as a future libdir.
case "$future_libdirs " in
*" $libdir "*) ;;
*) func_append future_libdirs " $libdir" ;;
esac
fi
func_dirname "$file" "/" ""
dir=$func_dirname_result
func_append dir "$objdir"
if test -n "$relink_command"; then
# Determine the prefix the user has applied to our future dir.
inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"`
# Don't allow the user to place us outside of our expected
# location b/c this prevents finding dependent libraries that
# are installed to the same prefix.
# At present, this check doesn't affect windows .dll's that
# are installed into $libdir/../bin (currently, that works fine)
# but it's something to keep an eye on.
test "$inst_prefix_dir" = "$destdir" && \
func_fatal_error "error: cannot install '$file' to a directory not ending in $libdir"
if test -n "$inst_prefix_dir"; then
# Stick the inst_prefix_dir data into the link command.
relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
else
relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
fi
func_warning "relinking '$file'"
func_show_eval "$relink_command" \
'func_fatal_error "error: relink '\''$file'\'' with the above command before installing it"'
fi
# See the names of the shared library.
set dummy $library_names; shift
if test -n "$1"; then
realname=$1
shift
srcname=$realname
test -n "$relink_command" && srcname=${realname}T
# Install the shared library and build the symlinks.
func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \
'exit $?'
tstripme=$stripme
case $host_os in
cygwin* | mingw* | pw32* | cegcc*)
case $realname in
*.dll.a)
tstripme=
;;
esac
;;
os2*)
case $realname in
*_dll.a)
tstripme=
;;
esac
;;
esac
if test -n "$tstripme" && test -n "$striplib"; then
func_show_eval "$striplib $destdir/$realname" 'exit $?'
fi
if test "$#" -gt 0; then
# Delete the old symlinks, and create new ones.
# Try 'ln -sf' first, because the 'ln' binary might depend on
# the symlink we replace! Solaris /bin/ln does not understand -f,
# so we also need to try rm && ln -s.
for linkname
do
test "$linkname" != "$realname" \
&& func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })"
done
fi
# Do each command in the postinstall commands.
lib=$destdir/$realname
func_execute_cmds "$postinstall_cmds" 'exit $?'
fi
# Install the pseudo-library for information purposes.
func_basename "$file"
name=$func_basename_result
instname=$dir/${name}i
func_show_eval "$install_prog $instname $destdir/$name" 'exit $?'
# Maybe install the static library, too.
test -n "$old_library" && func_append staticlibs " $dir/$old_library"
;;
*.lo)
# Install (i.e. copy) a libtool object.
# Figure out destination file name, if it wasn't already specified.
if test -n "$destname"; then
destfile=$destdir/$destname
else
func_basename "$file"
destfile=$func_basename_result
destfile=$destdir/$destfile
fi
# Deduce the name of the destination old-style object file.
case $destfile in
*.lo)
func_lo2o "$destfile"
staticdest=$func_lo2o_result
;;
*.$objext)
staticdest=$destfile
destfile=
;;
*)
func_fatal_help "cannot copy a libtool object to '$destfile'"
;;
esac
# Install the libtool object if requested.
test -n "$destfile" && \
func_show_eval "$install_prog $file $destfile" 'exit $?'
# Install the old object if enabled.
if test yes = "$build_old_libs"; then
# Deduce the name of the old-style object file.
func_lo2o "$file"
staticobj=$func_lo2o_result
func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?'
fi
exit $EXIT_SUCCESS
;;
*)
# Figure out destination file name, if it wasn't already specified.
if test -n "$destname"; then
destfile=$destdir/$destname
else
func_basename "$file"
destfile=$func_basename_result
destfile=$destdir/$destfile
fi
# If the file is missing, and there is a .exe on the end, strip it
# because it is most likely a libtool script we actually want to
# install
stripped_ext=
case $file in
*.exe)
if test ! -f "$file"; then
func_stripname '' '.exe' "$file"
file=$func_stripname_result
stripped_ext=.exe
fi
;;
esac
# Do a test to see if this is really a libtool program.
case $host in
*cygwin* | *mingw*)
if func_ltwrapper_executable_p "$file"; then
func_ltwrapper_scriptname "$file"
wrapper=$func_ltwrapper_scriptname_result
else
func_stripname '' '.exe' "$file"
wrapper=$func_stripname_result
fi
;;
*)
wrapper=$file
;;
esac
if func_ltwrapper_script_p "$wrapper"; then
notinst_deplibs=
relink_command=
func_source "$wrapper"
# Check the variables that should have been set.
test -z "$generated_by_libtool_version" && \
func_fatal_error "invalid libtool wrapper script '$wrapper'"
finalize=:
for lib in $notinst_deplibs; do
# Check to see that each library is installed.
libdir=
if test -f "$lib"; then
func_source "$lib"
fi
libfile=$libdir/`$ECHO "$lib" | $SED 's%^.*/%%g'`
if test -n "$libdir" && test ! -f "$libfile"; then
func_warning "'$lib' has not been installed in '$libdir'"
finalize=false
fi
done
relink_command=
func_source "$wrapper"
outputname=
if test no = "$fast_install" && test -n "$relink_command"; then
$opt_dry_run || {
if $finalize; then
tmpdir=`func_mktempdir`
func_basename "$file$stripped_ext"
file=$func_basename_result
outputname=$tmpdir/$file
# Replace the output file specification.
relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'`
$opt_quiet || {
func_quote_for_expand "$relink_command"
eval "func_echo $func_quote_for_expand_result"
}
if eval "$relink_command"; then :
else
func_error "error: relink '$file' with the above command before installing it"
$opt_dry_run || ${RM}r "$tmpdir"
continue
fi
file=$outputname
else
func_warning "cannot relink '$file'"
fi
}
else
# Install the binary that we compiled earlier.
file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"`
fi
fi
# remove .exe since cygwin /usr/bin/install will append another
# one anyway
case $install_prog,$host in
*/usr/bin/install*,*cygwin*)
case $file:$destfile in
*.exe:*.exe)
# this is ok
;;
*.exe:*)
destfile=$destfile.exe
;;
*:*.exe)
func_stripname '' '.exe' "$destfile"
destfile=$func_stripname_result
;;
esac
;;
esac
func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?'
$opt_dry_run || if test -n "$outputname"; then
${RM}r "$tmpdir"
fi
;;
esac
done
for file in $staticlibs; do
func_basename "$file"
name=$func_basename_result
# Set up the ranlib parameters.
oldlib=$destdir/$name
func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
tool_oldlib=$func_to_tool_file_result
func_show_eval "$install_prog \$file \$oldlib" 'exit $?'
if test -n "$stripme" && test -n "$old_striplib"; then
func_show_eval "$old_striplib $tool_oldlib" 'exit $?'
fi
# Do each command in the postinstall commands.
func_execute_cmds "$old_postinstall_cmds" 'exit $?'
done
test -n "$future_libdirs" && \
func_warning "remember to run '$progname --finish$future_libdirs'"
if test -n "$current_libdirs"; then
# Maybe just do a dry run.
$opt_dry_run && current_libdirs=" -n$current_libdirs"
exec_cmd='$SHELL "$progpath" $preserve_args --finish$current_libdirs'
else
exit $EXIT_SUCCESS
fi
}
test install = "$opt_mode" && func_mode_install ${1+"$@"}
# func_generate_dlsyms outputname originator pic_p
# Extract symbols from dlprefiles and create ${outputname}S.o with
# a dlpreopen symbol table.
func_generate_dlsyms ()
{
$debug_cmd
my_outputname=$1
my_originator=$2
my_pic_p=${3-false}
my_prefix=`$ECHO "$my_originator" | $SED 's%[^a-zA-Z0-9]%_%g'`
my_dlsyms=
if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
if test -n "$NM" && test -n "$global_symbol_pipe"; then
my_dlsyms=${my_outputname}S.c
else
func_error "not configured to extract global symbols from dlpreopened files"
fi
fi
if test -n "$my_dlsyms"; then
case $my_dlsyms in
"") ;;
*.c)
# Discover the nlist of each of the dlfiles.
nlist=$output_objdir/$my_outputname.nm
func_show_eval "$RM $nlist ${nlist}S ${nlist}T"
# Parse the name list into a source file.
func_verbose "creating $output_objdir/$my_dlsyms"
$opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\
/* $my_dlsyms - symbol resolution table for '$my_outputname' dlsym emulation. */
/* Generated by $PROGRAM (GNU $PACKAGE) $VERSION */
#ifdef __cplusplus
extern \"C\" {
#endif
#if defined __GNUC__ && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4))
#pragma GCC diagnostic ignored \"-Wstrict-prototypes\"
#endif
/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */
#if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE
/* DATA imports from DLLs on WIN32 can't be const, because runtime
relocations are performed -- see ld's documentation on pseudo-relocs. */
# define LT_DLSYM_CONST
#elif defined __osf__
/* This system does not cope well with relocations in const data. */
# define LT_DLSYM_CONST
#else
# define LT_DLSYM_CONST const
#endif
#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0)
/* External symbol declarations for the compiler. */\
"
if test yes = "$dlself"; then
func_verbose "generating symbol list for '$output'"
$opt_dry_run || echo ': @PROGRAM@ ' > "$nlist"
# Add our own program objects to the symbol list.
progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP`
for progfile in $progfiles; do
func_to_tool_file "$progfile" func_convert_file_msys_to_w32
func_verbose "extracting global C symbols from '$func_to_tool_file_result'"
$opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'"
done
if test -n "$exclude_expsyms"; then
$opt_dry_run || {
eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
eval '$MV "$nlist"T "$nlist"'
}
fi
if test -n "$export_symbols_regex"; then
$opt_dry_run || {
eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
eval '$MV "$nlist"T "$nlist"'
}
fi
# Prepare the list of exported symbols
if test -z "$export_symbols"; then
export_symbols=$output_objdir/$outputname.exp
$opt_dry_run || {
$RM $export_symbols
eval "$SED -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
case $host in
*cygwin* | *mingw* | *cegcc* )
eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"'
;;
esac
}
else
$opt_dry_run || {
eval "$SED -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
eval '$MV "$nlist"T "$nlist"'
case $host in
*cygwin* | *mingw* | *cegcc* )
eval "echo EXPORTS "'> "$output_objdir/$outputname.def"'
eval 'cat "$nlist" >> "$output_objdir/$outputname.def"'
;;
esac
}
fi
fi
for dlprefile in $dlprefiles; do
func_verbose "extracting global C symbols from '$dlprefile'"
func_basename "$dlprefile"
name=$func_basename_result
case $host in
*cygwin* | *mingw* | *cegcc* )
# if an import library, we need to obtain dlname
if func_win32_import_lib_p "$dlprefile"; then
func_tr_sh "$dlprefile"
eval "curr_lafile=\$libfile_$func_tr_sh_result"
dlprefile_dlbasename=
if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then
# Use subshell, to avoid clobbering current variable values
dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"`
if test -n "$dlprefile_dlname"; then
func_basename "$dlprefile_dlname"
dlprefile_dlbasename=$func_basename_result
else
# no lafile. user explicitly requested -dlpreopen .
$sharedlib_from_linklib_cmd "$dlprefile"
dlprefile_dlbasename=$sharedlib_from_linklib_result
fi
fi
$opt_dry_run || {
if test -n "$dlprefile_dlbasename"; then
eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"'
else
func_warning "Could not compute DLL name from $name"
eval '$ECHO ": $name " >> "$nlist"'
fi
func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe |
$SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'"
}
else # not an import lib
$opt_dry_run || {
eval '$ECHO ": $name " >> "$nlist"'
func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
}
fi
;;
*)
$opt_dry_run || {
eval '$ECHO ": $name " >> "$nlist"'
func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32
eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'"
}
;;
esac
done
$opt_dry_run || {
# Make sure we have at least an empty file.
test -f "$nlist" || : > "$nlist"
if test -n "$exclude_expsyms"; then
$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
$MV "$nlist"T "$nlist"
fi
# Try sorting and uniquifying the output.
if $GREP -v "^: " < "$nlist" |
if sort -k 3 /dev/null 2>&1; then
sort -k 3
else
sort +2
fi |
uniq > "$nlist"S; then
:
else
$GREP -v "^: " < "$nlist" > "$nlist"S
fi
if test -f "$nlist"S; then
eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"'
else
echo '/* NONE */' >> "$output_objdir/$my_dlsyms"
fi
func_show_eval '$RM "${nlist}I"'
if test -n "$global_symbol_to_import"; then
eval "$global_symbol_to_import"' < "$nlist"S > "$nlist"I'
fi
echo >> "$output_objdir/$my_dlsyms" "\
/* The mapping between symbol names and symbols. */
typedef struct {
const char *name;
void *address;
} lt_dlsymlist;
extern LT_DLSYM_CONST lt_dlsymlist
lt_${my_prefix}_LTX_preloaded_symbols[];\
"
if test -s "$nlist"I; then
echo >> "$output_objdir/$my_dlsyms" "\
static void lt_syminit(void)
{
LT_DLSYM_CONST lt_dlsymlist *symbol = lt_${my_prefix}_LTX_preloaded_symbols;
for (; symbol->name; ++symbol)
{"
$SED 's/.*/ if (STREQ (symbol->name, \"&\")) symbol->address = (void *) \&&;/' < "$nlist"I >> "$output_objdir/$my_dlsyms"
echo >> "$output_objdir/$my_dlsyms" "\
}
}"
fi
echo >> "$output_objdir/$my_dlsyms" "\
LT_DLSYM_CONST lt_dlsymlist
lt_${my_prefix}_LTX_preloaded_symbols[] =
{ {\"$my_originator\", (void *) 0},"
if test -s "$nlist"I; then
echo >> "$output_objdir/$my_dlsyms" "\
{\"@INIT@\", (void *) <_syminit},"
fi
case $need_lib_prefix in
no)
eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms"
;;
*)
eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms"
;;
esac
echo >> "$output_objdir/$my_dlsyms" "\
{0, (void *) 0}
};
/* This works around a problem in FreeBSD linker */
#ifdef FREEBSD_WORKAROUND
static const void *lt_preloaded_setup() {
return lt_${my_prefix}_LTX_preloaded_symbols;
}
#endif
#ifdef __cplusplus
}
#endif\
"
} # !$opt_dry_run
pic_flag_for_symtable=
case "$compile_command " in
*" -static "*) ;;
*)
case $host in
# compiling the symbol table file with pic_flag works around
# a FreeBSD bug that causes programs to crash when -lm is
# linked before any other PIC object. But we must not use
# pic_flag when linking with -static. The problem exists in
# FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
*-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;;
*-*-hpux*)
pic_flag_for_symtable=" $pic_flag" ;;
*)
$my_pic_p && pic_flag_for_symtable=" $pic_flag"
;;
esac
;;
esac
symtab_cflags=
for arg in $LTCFLAGS; do
case $arg in
-pie | -fpie | -fPIE) ;;
*) func_append symtab_cflags " $arg" ;;
esac
done
# Now compile the dynamic symbol file.
func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?'
# Clean up the generated files.
func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T" "${nlist}I"'
# Transform the symbol file into the correct name.
symfileobj=$output_objdir/${my_outputname}S.$objext
case $host in
*cygwin* | *mingw* | *cegcc* )
if test -f "$output_objdir/$my_outputname.def"; then
compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"`
else
compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
fi
;;
*)
compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"`
finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"`
;;
esac
;;
*)
func_fatal_error "unknown suffix for '$my_dlsyms'"
;;
esac
else
# We keep going just in case the user didn't refer to
# lt_preloaded_symbols. The linker will fail if global_symbol_pipe
# really was required.
# Nullify the symbol file.
compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"`
finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"`
fi
}
# func_cygming_gnu_implib_p ARG
# This predicate returns with zero status (TRUE) if
# ARG is a GNU/binutils-style import library. Returns
# with nonzero status (FALSE) otherwise.
func_cygming_gnu_implib_p ()
{
$debug_cmd
func_to_tool_file "$1" func_convert_file_msys_to_w32
func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'`
test -n "$func_cygming_gnu_implib_tmp"
}
# func_cygming_ms_implib_p ARG
# This predicate returns with zero status (TRUE) if
# ARG is an MS-style import library. Returns
# with nonzero status (FALSE) otherwise.
func_cygming_ms_implib_p ()
{
$debug_cmd
func_to_tool_file "$1" func_convert_file_msys_to_w32
func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'`
test -n "$func_cygming_ms_implib_tmp"
}
# func_win32_libid arg
# return the library type of file 'arg'
#
# Need a lot of goo to handle *both* DLLs and import libs
# Has to be a shell function in order to 'eat' the argument
# that is supplied when $file_magic_command is called.
# Despite the name, also deal with 64 bit binaries.
func_win32_libid ()
{
$debug_cmd
win32_libid_type=unknown
win32_fileres=`file -L $1 2>/dev/null`
case $win32_fileres in
*ar\ archive\ import\ library*) # definitely import
win32_libid_type="x86 archive import"
;;
*ar\ archive*) # could be an import, or static
# Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD.
if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null |
$EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then
case $nm_interface in
"MS dumpbin")
if func_cygming_ms_implib_p "$1" ||
func_cygming_gnu_implib_p "$1"
then
win32_nmres=import
else
win32_nmres=
fi
;;
*)
func_to_tool_file "$1" func_convert_file_msys_to_w32
win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" |
$SED -n -e '
1,100{
/ I /{
s|.*|import|
p
q
}
}'`
;;
esac
case $win32_nmres in
import*) win32_libid_type="x86 archive import";;
*) win32_libid_type="x86 archive static";;
esac
fi
;;
*DLL*)
win32_libid_type="x86 DLL"
;;
*executable*) # but shell scripts are "executable" too...
case $win32_fileres in
*MS\ Windows\ PE\ Intel*)
win32_libid_type="x86 DLL"
;;
esac
;;
esac
$ECHO "$win32_libid_type"
}
# func_cygming_dll_for_implib ARG
#
# Platform-specific function to extract the
# name of the DLL associated with the specified
# import library ARG.
# Invoked by eval'ing the libtool variable
# $sharedlib_from_linklib_cmd
# Result is available in the variable
# $sharedlib_from_linklib_result
func_cygming_dll_for_implib ()
{
$debug_cmd
sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"`
}
# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs
#
# The is the core of a fallback implementation of a
# platform-specific function to extract the name of the
# DLL associated with the specified import library LIBNAME.
#
# SECTION_NAME is either .idata$6 or .idata$7, depending
# on the platform and compiler that created the implib.
#
# Echos the name of the DLL associated with the
# specified import library.
func_cygming_dll_for_implib_fallback_core ()
{
$debug_cmd
match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"`
$OBJDUMP -s --section "$1" "$2" 2>/dev/null |
$SED '/^Contents of section '"$match_literal"':/{
# Place marker at beginning of archive member dllname section
s/.*/====MARK====/
p
d
}
# These lines can sometimes be longer than 43 characters, but
# are always uninteresting
/:[ ]*file format pe[i]\{,1\}-/d
/^In archive [^:]*:/d
# Ensure marker is printed
/^====MARK====/p
# Remove all lines with less than 43 characters
/^.\{43\}/!d
# From remaining lines, remove first 43 characters
s/^.\{43\}//' |
$SED -n '
# Join marker and all lines until next marker into a single line
/^====MARK====/ b para
H
$ b para
b
:para
x
s/\n//g
# Remove the marker
s/^====MARK====//
# Remove trailing dots and whitespace
s/[\. \t]*$//
# Print
/./p' |
# we now have a list, one entry per line, of the stringified
# contents of the appropriate section of all members of the
# archive that possess that section. Heuristic: eliminate
# all those that have a first or second character that is
# a '.' (that is, objdump's representation of an unprintable
# character.) This should work for all archives with less than
# 0x302f exports -- but will fail for DLLs whose name actually
# begins with a literal '.' or a single character followed by
# a '.'.
#
# Of those that remain, print the first one.
$SED -e '/^\./d;/^.\./d;q'
}
# func_cygming_dll_for_implib_fallback ARG
# Platform-specific function to extract the
# name of the DLL associated with the specified
# import library ARG.
#
# This fallback implementation is for use when $DLLTOOL
# does not support the --identify-strict option.
# Invoked by eval'ing the libtool variable
# $sharedlib_from_linklib_cmd
# Result is available in the variable
# $sharedlib_from_linklib_result
func_cygming_dll_for_implib_fallback ()
{
$debug_cmd
if func_cygming_gnu_implib_p "$1"; then
# binutils import library
sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"`
elif func_cygming_ms_implib_p "$1"; then
# ms-generated import library
sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"`
else
# unknown
sharedlib_from_linklib_result=
fi
}
# func_extract_an_archive dir oldlib
func_extract_an_archive ()
{
$debug_cmd
f_ex_an_ar_dir=$1; shift
f_ex_an_ar_oldlib=$1
if test yes = "$lock_old_archive_extraction"; then
lockfile=$f_ex_an_ar_oldlib.lock
until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do
func_echo "Waiting for $lockfile to be removed"
sleep 2
done
fi
func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \
'stat=$?; rm -f "$lockfile"; exit $stat'
if test yes = "$lock_old_archive_extraction"; then
$opt_dry_run || rm -f "$lockfile"
fi
if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
:
else
func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib"
fi
}
# func_extract_archives gentop oldlib ...
func_extract_archives ()
{
$debug_cmd
my_gentop=$1; shift
my_oldlibs=${1+"$@"}
my_oldobjs=
my_xlib=
my_xabs=
my_xdir=
for my_xlib in $my_oldlibs; do
# Extract the objects.
case $my_xlib in
[\\/]* | [A-Za-z]:[\\/]*) my_xabs=$my_xlib ;;
*) my_xabs=`pwd`"/$my_xlib" ;;
esac
func_basename "$my_xlib"
my_xlib=$func_basename_result
my_xlib_u=$my_xlib
while :; do
case " $extracted_archives " in
*" $my_xlib_u "*)
func_arith $extracted_serial + 1
extracted_serial=$func_arith_result
my_xlib_u=lt$extracted_serial-$my_xlib ;;
*) break ;;
esac
done
extracted_archives="$extracted_archives $my_xlib_u"
my_xdir=$my_gentop/$my_xlib_u
func_mkdir_p "$my_xdir"
case $host in
*-darwin*)
func_verbose "Extracting $my_xabs"
# Do not bother doing anything if just a dry run
$opt_dry_run || {
darwin_orig_dir=`pwd`
cd $my_xdir || exit $?
darwin_archive=$my_xabs
darwin_curdir=`pwd`
func_basename "$darwin_archive"
darwin_base_archive=$func_basename_result
darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true`
if test -n "$darwin_arches"; then
darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'`
darwin_arch=
func_verbose "$darwin_base_archive has multiple architectures $darwin_arches"
for darwin_arch in $darwin_arches; do
func_mkdir_p "unfat-$$/$darwin_base_archive-$darwin_arch"
$LIPO -thin $darwin_arch -output "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" "$darwin_archive"
cd "unfat-$$/$darwin_base_archive-$darwin_arch"
func_extract_an_archive "`pwd`" "$darwin_base_archive"
cd "$darwin_curdir"
$RM "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive"
done # $darwin_arches
## Okay now we've a bunch of thin objects, gotta fatten them up :)
darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$sed_basename" | sort -u`
darwin_file=
darwin_files=
for darwin_file in $darwin_filelist; do
darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP`
$LIPO -create -output "$darwin_file" $darwin_files
done # $darwin_filelist
$RM -rf unfat-$$
cd "$darwin_orig_dir"
else
cd $darwin_orig_dir
func_extract_an_archive "$my_xdir" "$my_xabs"
fi # $darwin_arches
} # !$opt_dry_run
;;
*)
func_extract_an_archive "$my_xdir" "$my_xabs"
;;
esac
my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP`
done
func_extract_archives_result=$my_oldobjs
}
# func_emit_wrapper [arg=no]
#
# Emit a libtool wrapper script on stdout.
# Don't directly open a file because we may want to
# incorporate the script contents within a cygwin/mingw
# wrapper executable. Must ONLY be called from within
# func_mode_link because it depends on a number of variables
# set therein.
#
# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR
# variable will take. If 'yes', then the emitted script
# will assume that the directory where it is stored is
# the $objdir directory. This is a cygwin/mingw-specific
# behavior.
func_emit_wrapper ()
{
func_emit_wrapper_arg1=${1-no}
$ECHO "\
#! $SHELL
# $output - temporary wrapper script for $objdir/$outputname
# Generated by $PROGRAM (GNU $PACKAGE) $VERSION
#
# The $output program cannot be directly executed until all the libtool
# libraries that it depends on are installed.
#
# This wrapper script should never be moved out of the build directory.
# If it is, it will not operate correctly.
# Sed substitution that helps us do robust quoting. It backslashifies
# metacharacters that are still active within double-quoted strings.
sed_quote_subst='$sed_quote_subst'
# Be Bourne compatible
if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then
emulate sh
NULLCMD=:
# Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which
# is contrary to our usage. Disable this feature.
alias -g '\${1+\"\$@\"}'='\"\$@\"'
setopt NO_GLOB_SUBST
else
case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac
fi
BIN_SH=xpg4; export BIN_SH # for Tru64
DUALCASE=1; export DUALCASE # for MKS sh
# The HP-UX ksh and POSIX shell print the target directory to stdout
# if CDPATH is set.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
relink_command=\"$relink_command\"
# This environment variable determines our operation mode.
if test \"\$libtool_install_magic\" = \"$magic\"; then
# install mode needs the following variables:
generated_by_libtool_version='$macro_version'
notinst_deplibs='$notinst_deplibs'
else
# When we are sourced in execute mode, \$file and \$ECHO are already set.
if test \"\$libtool_execute_magic\" != \"$magic\"; then
file=\"\$0\""
qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"`
$ECHO "\
# A function that is used when there is no print builtin or printf.
func_fallback_echo ()
{
eval 'cat <<_LTECHO_EOF
\$1
_LTECHO_EOF'
}
ECHO=\"$qECHO\"
fi
# Very basic option parsing. These options are (a) specific to
# the libtool wrapper, (b) are identical between the wrapper
# /script/ and the wrapper /executable/ that is used only on
# windows platforms, and (c) all begin with the string "--lt-"
# (application programs are unlikely to have options that match
# this pattern).
#
# There are only two supported options: --lt-debug and
# --lt-dump-script. There is, deliberately, no --lt-help.
#
# The first argument to this parsing function should be the
# script's $0 value, followed by "$@".
lt_option_debug=
func_parse_lt_options ()
{
lt_script_arg0=\$0
shift
for lt_opt
do
case \"\$lt_opt\" in
--lt-debug) lt_option_debug=1 ;;
--lt-dump-script)
lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\`
test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=.
lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\`
cat \"\$lt_dump_D/\$lt_dump_F\"
exit 0
;;
--lt-*)
\$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2
exit 1
;;
esac
done
# Print the debug banner immediately:
if test -n \"\$lt_option_debug\"; then
echo \"$outputname:$output:\$LINENO: libtool wrapper (GNU $PACKAGE) $VERSION\" 1>&2
fi
}
# Used when --lt-debug. Prints its arguments to stdout
# (redirection is the responsibility of the caller)
func_lt_dump_args ()
{
lt_dump_args_N=1;
for lt_arg
do
\$ECHO \"$outputname:$output:\$LINENO: newargv[\$lt_dump_args_N]: \$lt_arg\"
lt_dump_args_N=\`expr \$lt_dump_args_N + 1\`
done
}
# Core function for launching the target application
func_exec_program_core ()
{
"
case $host in
# Backslashes separate directories on plain windows
*-*-mingw | *-*-os2* | *-cegcc*)
$ECHO "\
if test -n \"\$lt_option_debug\"; then
\$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir\\\\\$program\" 1>&2
func_lt_dump_args \${1+\"\$@\"} 1>&2
fi
exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
"
;;
*)
$ECHO "\
if test -n \"\$lt_option_debug\"; then
\$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir/\$program\" 1>&2
func_lt_dump_args \${1+\"\$@\"} 1>&2
fi
exec \"\$progdir/\$program\" \${1+\"\$@\"}
"
;;
esac
$ECHO "\
\$ECHO \"\$0: cannot exec \$program \$*\" 1>&2
exit 1
}
# A function to encapsulate launching the target application
# Strips options in the --lt-* namespace from \$@ and
# launches target application with the remaining arguments.
func_exec_program ()
{
case \" \$* \" in
*\\ --lt-*)
for lt_wr_arg
do
case \$lt_wr_arg in
--lt-*) ;;
*) set x \"\$@\" \"\$lt_wr_arg\"; shift;;
esac
shift
done ;;
esac
func_exec_program_core \${1+\"\$@\"}
}
# Parse options
func_parse_lt_options \"\$0\" \${1+\"\$@\"}
# Find the directory that this script lives in.
thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\`
test \"x\$thisdir\" = \"x\$file\" && thisdir=.
# Follow symbolic links until we get to the real thisdir.
file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\`
while test -n \"\$file\"; do
destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\`
# If there was a directory component, then change thisdir.
if test \"x\$destdir\" != \"x\$file\"; then
case \"\$destdir\" in
[\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
*) thisdir=\"\$thisdir/\$destdir\" ;;
esac
fi
file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\`
file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\`
done
# Usually 'no', except on cygwin/mingw when embedded into
# the cwrapper.
WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1
if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then
# special case for '.'
if test \"\$thisdir\" = \".\"; then
thisdir=\`pwd\`
fi
# remove .libs from thisdir
case \"\$thisdir\" in
*[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;;
$objdir ) thisdir=. ;;
esac
fi
# Try to get the absolute directory name.
absdir=\`cd \"\$thisdir\" && pwd\`
test -n \"\$absdir\" && thisdir=\"\$absdir\"
"
if test yes = "$fast_install"; then
$ECHO "\
program=lt-'$outputname'$exeext
progdir=\"\$thisdir/$objdir\"
if test ! -f \"\$progdir/\$program\" ||
{ file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | $SED 1q\`; \\
test \"X\$file\" != \"X\$progdir/\$program\"; }; then
file=\"\$\$-\$program\"
if test ! -d \"\$progdir\"; then
$MKDIR \"\$progdir\"
else
$RM \"\$progdir/\$file\"
fi"
$ECHO "\
# relink executable if necessary
if test -n \"\$relink_command\"; then
if relink_command_output=\`eval \$relink_command 2>&1\`; then :
else
\$ECHO \"\$relink_command_output\" >&2
$RM \"\$progdir/\$file\"
exit 1
fi
fi
$MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
{ $RM \"\$progdir/\$program\";
$MV \"\$progdir/\$file\" \"\$progdir/\$program\"; }
$RM \"\$progdir/\$file\"
fi"
else
$ECHO "\
program='$outputname'
progdir=\"\$thisdir/$objdir\"
"
fi
$ECHO "\
if test -f \"\$progdir/\$program\"; then"
# fixup the dll searchpath if we need to.
#
# Fix the DLL searchpath if we need to. Do this before prepending
# to shlibpath, because on Windows, both are PATH and uninstalled
# libraries must come first.
if test -n "$dllsearchpath"; then
$ECHO "\
# Add the dll search path components to the executable PATH
PATH=$dllsearchpath:\$PATH
"
fi
# Export our shlibpath_var if we have one.
if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
$ECHO "\
# Add our own library path to $shlibpath_var
$shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
# Some systems cannot cope with colon-terminated $shlibpath_var
# The second colon is a workaround for a bug in BeOS R4 sed
$shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\`
export $shlibpath_var
"
fi
$ECHO "\
if test \"\$libtool_execute_magic\" != \"$magic\"; then
# Run the actual program with our arguments.
func_exec_program \${1+\"\$@\"}
fi
else
# The program doesn't exist.
\$ECHO \"\$0: error: '\$progdir/\$program' does not exist\" 1>&2
\$ECHO \"This script is just a wrapper for \$program.\" 1>&2
\$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2
exit 1
fi
fi\
"
}
# func_emit_cwrapperexe_src
# emit the source code for a wrapper executable on stdout
# Must ONLY be called from within func_mode_link because
# it depends on a number of variable set therein.
func_emit_cwrapperexe_src ()
{
cat <
#include
#ifdef _MSC_VER
# include
# include
# include
#else
# include
# include
# ifdef __CYGWIN__
# include
# endif
#endif
#include
#include
#include
#include
#include
#include
#include
#include
#define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0)
/* declarations of non-ANSI functions */
#if defined __MINGW32__
# ifdef __STRICT_ANSI__
int _putenv (const char *);
# endif
#elif defined __CYGWIN__
# ifdef __STRICT_ANSI__
char *realpath (const char *, char *);
int putenv (char *);
int setenv (const char *, const char *, int);
# endif
/* #elif defined other_platform || defined ... */
#endif
/* portability defines, excluding path handling macros */
#if defined _MSC_VER
# define setmode _setmode
# define stat _stat
# define chmod _chmod
# define getcwd _getcwd
# define putenv _putenv
# define S_IXUSR _S_IEXEC
#elif defined __MINGW32__
# define setmode _setmode
# define stat _stat
# define chmod _chmod
# define getcwd _getcwd
# define putenv _putenv
#elif defined __CYGWIN__
# define HAVE_SETENV
# define FOPEN_WB "wb"
/* #elif defined other platforms ... */
#endif
#if defined PATH_MAX
# define LT_PATHMAX PATH_MAX
#elif defined MAXPATHLEN
# define LT_PATHMAX MAXPATHLEN
#else
# define LT_PATHMAX 1024
#endif
#ifndef S_IXOTH
# define S_IXOTH 0
#endif
#ifndef S_IXGRP
# define S_IXGRP 0
#endif
/* path handling portability macros */
#ifndef DIR_SEPARATOR
# define DIR_SEPARATOR '/'
# define PATH_SEPARATOR ':'
#endif
#if defined _WIN32 || defined __MSDOS__ || defined __DJGPP__ || \
defined __OS2__
# define HAVE_DOS_BASED_FILE_SYSTEM
# define FOPEN_WB "wb"
# ifndef DIR_SEPARATOR_2
# define DIR_SEPARATOR_2 '\\'
# endif
# ifndef PATH_SEPARATOR_2
# define PATH_SEPARATOR_2 ';'
# endif
#endif
#ifndef DIR_SEPARATOR_2
# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
#else /* DIR_SEPARATOR_2 */
# define IS_DIR_SEPARATOR(ch) \
(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
#endif /* DIR_SEPARATOR_2 */
#ifndef PATH_SEPARATOR_2
# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR)
#else /* PATH_SEPARATOR_2 */
# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2)
#endif /* PATH_SEPARATOR_2 */
#ifndef FOPEN_WB
# define FOPEN_WB "w"
#endif
#ifndef _O_BINARY
# define _O_BINARY 0
#endif
#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type)))
#define XFREE(stale) do { \
if (stale) { free (stale); stale = 0; } \
} while (0)
#if defined LT_DEBUGWRAPPER
static int lt_debug = 1;
#else
static int lt_debug = 0;
#endif
const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */
void *xmalloc (size_t num);
char *xstrdup (const char *string);
const char *base_name (const char *name);
char *find_executable (const char *wrapper);
char *chase_symlinks (const char *pathspec);
int make_executable (const char *path);
int check_executable (const char *path);
char *strendzap (char *str, const char *pat);
void lt_debugprintf (const char *file, int line, const char *fmt, ...);
void lt_fatal (const char *file, int line, const char *message, ...);
static const char *nonnull (const char *s);
static const char *nonempty (const char *s);
void lt_setenv (const char *name, const char *value);
char *lt_extend_str (const char *orig_value, const char *add, int to_end);
void lt_update_exe_path (const char *name, const char *value);
void lt_update_lib_path (const char *name, const char *value);
char **prepare_spawn (char **argv);
void lt_dump_script (FILE *f);
EOF
cat <= 0)
&& (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
return 1;
else
return 0;
}
int
make_executable (const char *path)
{
int rval = 0;
struct stat st;
lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n",
nonempty (path));
if ((!path) || (!*path))
return 0;
if (stat (path, &st) >= 0)
{
rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR);
}
return rval;
}
/* Searches for the full path of the wrapper. Returns
newly allocated full path name if found, NULL otherwise
Does not chase symlinks, even on platforms that support them.
*/
char *
find_executable (const char *wrapper)
{
int has_slash = 0;
const char *p;
const char *p_next;
/* static buffer for getcwd */
char tmp[LT_PATHMAX + 1];
size_t tmp_len;
char *concat_name;
lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n",
nonempty (wrapper));
if ((wrapper == NULL) || (*wrapper == '\0'))
return NULL;
/* Absolute path? */
#if defined HAVE_DOS_BASED_FILE_SYSTEM
if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':')
{
concat_name = xstrdup (wrapper);
if (check_executable (concat_name))
return concat_name;
XFREE (concat_name);
}
else
{
#endif
if (IS_DIR_SEPARATOR (wrapper[0]))
{
concat_name = xstrdup (wrapper);
if (check_executable (concat_name))
return concat_name;
XFREE (concat_name);
}
#if defined HAVE_DOS_BASED_FILE_SYSTEM
}
#endif
for (p = wrapper; *p; p++)
if (*p == '/')
{
has_slash = 1;
break;
}
if (!has_slash)
{
/* no slashes; search PATH */
const char *path = getenv ("PATH");
if (path != NULL)
{
for (p = path; *p; p = p_next)
{
const char *q;
size_t p_len;
for (q = p; *q; q++)
if (IS_PATH_SEPARATOR (*q))
break;
p_len = (size_t) (q - p);
p_next = (*q == '\0' ? q : q + 1);
if (p_len == 0)
{
/* empty path: current directory */
if (getcwd (tmp, LT_PATHMAX) == NULL)
lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
nonnull (strerror (errno)));
tmp_len = strlen (tmp);
concat_name =
XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
memcpy (concat_name, tmp, tmp_len);
concat_name[tmp_len] = '/';
strcpy (concat_name + tmp_len + 1, wrapper);
}
else
{
concat_name =
XMALLOC (char, p_len + 1 + strlen (wrapper) + 1);
memcpy (concat_name, p, p_len);
concat_name[p_len] = '/';
strcpy (concat_name + p_len + 1, wrapper);
}
if (check_executable (concat_name))
return concat_name;
XFREE (concat_name);
}
}
/* not found in PATH; assume curdir */
}
/* Relative path | not found in path: prepend cwd */
if (getcwd (tmp, LT_PATHMAX) == NULL)
lt_fatal (__FILE__, __LINE__, "getcwd failed: %s",
nonnull (strerror (errno)));
tmp_len = strlen (tmp);
concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1);
memcpy (concat_name, tmp, tmp_len);
concat_name[tmp_len] = '/';
strcpy (concat_name + tmp_len + 1, wrapper);
if (check_executable (concat_name))
return concat_name;
XFREE (concat_name);
return NULL;
}
char *
chase_symlinks (const char *pathspec)
{
#ifndef S_ISLNK
return xstrdup (pathspec);
#else
char buf[LT_PATHMAX];
struct stat s;
char *tmp_pathspec = xstrdup (pathspec);
char *p;
int has_symlinks = 0;
while (strlen (tmp_pathspec) && !has_symlinks)
{
lt_debugprintf (__FILE__, __LINE__,
"checking path component for symlinks: %s\n",
tmp_pathspec);
if (lstat (tmp_pathspec, &s) == 0)
{
if (S_ISLNK (s.st_mode) != 0)
{
has_symlinks = 1;
break;
}
/* search backwards for last DIR_SEPARATOR */
p = tmp_pathspec + strlen (tmp_pathspec) - 1;
while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
p--;
if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p)))
{
/* no more DIR_SEPARATORS left */
break;
}
*p = '\0';
}
else
{
lt_fatal (__FILE__, __LINE__,
"error accessing file \"%s\": %s",
tmp_pathspec, nonnull (strerror (errno)));
}
}
XFREE (tmp_pathspec);
if (!has_symlinks)
{
return xstrdup (pathspec);
}
tmp_pathspec = realpath (pathspec, buf);
if (tmp_pathspec == 0)
{
lt_fatal (__FILE__, __LINE__,
"could not follow symlinks for %s", pathspec);
}
return xstrdup (tmp_pathspec);
#endif
}
char *
strendzap (char *str, const char *pat)
{
size_t len, patlen;
assert (str != NULL);
assert (pat != NULL);
len = strlen (str);
patlen = strlen (pat);
if (patlen <= len)
{
str += len - patlen;
if (STREQ (str, pat))
*str = '\0';
}
return str;
}
void
lt_debugprintf (const char *file, int line, const char *fmt, ...)
{
va_list args;
if (lt_debug)
{
(void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line);
va_start (args, fmt);
(void) vfprintf (stderr, fmt, args);
va_end (args);
}
}
static void
lt_error_core (int exit_status, const char *file,
int line, const char *mode,
const char *message, va_list ap)
{
fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode);
vfprintf (stderr, message, ap);
fprintf (stderr, ".\n");
if (exit_status >= 0)
exit (exit_status);
}
void
lt_fatal (const char *file, int line, const char *message, ...)
{
va_list ap;
va_start (ap, message);
lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap);
va_end (ap);
}
static const char *
nonnull (const char *s)
{
return s ? s : "(null)";
}
static const char *
nonempty (const char *s)
{
return (s && !*s) ? "(empty)" : nonnull (s);
}
void
lt_setenv (const char *name, const char *value)
{
lt_debugprintf (__FILE__, __LINE__,
"(lt_setenv) setting '%s' to '%s'\n",
nonnull (name), nonnull (value));
{
#ifdef HAVE_SETENV
/* always make a copy, for consistency with !HAVE_SETENV */
char *str = xstrdup (value);
setenv (name, str, 1);
#else
size_t len = strlen (name) + 1 + strlen (value) + 1;
char *str = XMALLOC (char, len);
sprintf (str, "%s=%s", name, value);
if (putenv (str) != EXIT_SUCCESS)
{
XFREE (str);
}
#endif
}
}
char *
lt_extend_str (const char *orig_value, const char *add, int to_end)
{
char *new_value;
if (orig_value && *orig_value)
{
size_t orig_value_len = strlen (orig_value);
size_t add_len = strlen (add);
new_value = XMALLOC (char, add_len + orig_value_len + 1);
if (to_end)
{
strcpy (new_value, orig_value);
strcpy (new_value + orig_value_len, add);
}
else
{
strcpy (new_value, add);
strcpy (new_value + add_len, orig_value);
}
}
else
{
new_value = xstrdup (add);
}
return new_value;
}
void
lt_update_exe_path (const char *name, const char *value)
{
lt_debugprintf (__FILE__, __LINE__,
"(lt_update_exe_path) modifying '%s' by prepending '%s'\n",
nonnull (name), nonnull (value));
if (name && *name && value && *value)
{
char *new_value = lt_extend_str (getenv (name), value, 0);
/* some systems can't cope with a ':'-terminated path #' */
size_t len = strlen (new_value);
while ((len > 0) && IS_PATH_SEPARATOR (new_value[len-1]))
{
new_value[--len] = '\0';
}
lt_setenv (name, new_value);
XFREE (new_value);
}
}
void
lt_update_lib_path (const char *name, const char *value)
{
lt_debugprintf (__FILE__, __LINE__,
"(lt_update_lib_path) modifying '%s' by prepending '%s'\n",
nonnull (name), nonnull (value));
if (name && *name && value && *value)
{
char *new_value = lt_extend_str (getenv (name), value, 0);
lt_setenv (name, new_value);
XFREE (new_value);
}
}
EOF
case $host_os in
mingw*)
cat <<"EOF"
/* Prepares an argument vector before calling spawn().
Note that spawn() does not by itself call the command interpreter
(getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") :
({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
GetVersionEx(&v);
v.dwPlatformId == VER_PLATFORM_WIN32_NT;
}) ? "cmd.exe" : "command.com").
Instead it simply concatenates the arguments, separated by ' ', and calls
CreateProcess(). We must quote the arguments since Win32 CreateProcess()
interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a
special way:
- Space and tab are interpreted as delimiters. They are not treated as
delimiters if they are surrounded by double quotes: "...".
- Unescaped double quotes are removed from the input. Their only effect is
that within double quotes, space and tab are treated like normal
characters.
- Backslashes not followed by double quotes are not special.
- But 2*n+1 backslashes followed by a double quote become
n backslashes followed by a double quote (n >= 0):
\" -> "
\\\" -> \"
\\\\\" -> \\"
*/
#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037"
char **
prepare_spawn (char **argv)
{
size_t argc;
char **new_argv;
size_t i;
/* Count number of arguments. */
for (argc = 0; argv[argc] != NULL; argc++)
;
/* Allocate new argument vector. */
new_argv = XMALLOC (char *, argc + 1);
/* Put quoted arguments into the new argument vector. */
for (i = 0; i < argc; i++)
{
const char *string = argv[i];
if (string[0] == '\0')
new_argv[i] = xstrdup ("\"\"");
else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL)
{
int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL);
size_t length;
unsigned int backslashes;
const char *s;
char *quoted_string;
char *p;
length = 0;
backslashes = 0;
if (quote_around)
length++;
for (s = string; *s != '\0'; s++)
{
char c = *s;
if (c == '"')
length += backslashes + 1;
length++;
if (c == '\\')
backslashes++;
else
backslashes = 0;
}
if (quote_around)
length += backslashes + 1;
quoted_string = XMALLOC (char, length + 1);
p = quoted_string;
backslashes = 0;
if (quote_around)
*p++ = '"';
for (s = string; *s != '\0'; s++)
{
char c = *s;
if (c == '"')
{
unsigned int j;
for (j = backslashes + 1; j > 0; j--)
*p++ = '\\';
}
*p++ = c;
if (c == '\\')
backslashes++;
else
backslashes = 0;
}
if (quote_around)
{
unsigned int j;
for (j = backslashes; j > 0; j--)
*p++ = '\\';
*p++ = '"';
}
*p = '\0';
new_argv[i] = quoted_string;
}
else
new_argv[i] = (char *) string;
}
new_argv[argc] = NULL;
return new_argv;
}
EOF
;;
esac
cat <<"EOF"
void lt_dump_script (FILE* f)
{
EOF
func_emit_wrapper yes |
$SED -n -e '
s/^\(.\{79\}\)\(..*\)/\1\
\2/
h
s/\([\\"]\)/\\\1/g
s/$/\\n/
s/\([^\n]*\).*/ fputs ("\1", f);/p
g
D'
cat <<"EOF"
}
EOF
}
# end: func_emit_cwrapperexe_src
# func_win32_import_lib_p ARG
# True if ARG is an import lib, as indicated by $file_magic_cmd
func_win32_import_lib_p ()
{
$debug_cmd
case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in
*import*) : ;;
*) false ;;
esac
}
# func_suncc_cstd_abi
# !!ONLY CALL THIS FOR SUN CC AFTER $compile_command IS FULLY EXPANDED!!
# Several compiler flags select an ABI that is incompatible with the
# Cstd library. Avoid specifying it if any are in CXXFLAGS.
func_suncc_cstd_abi ()
{
$debug_cmd
case " $compile_command " in
*" -compat=g "*|*\ -std=c++[0-9][0-9]\ *|*" -library=stdcxx4 "*|*" -library=stlport4 "*)
suncc_use_cstd_abi=no
;;
*)
suncc_use_cstd_abi=yes
;;
esac
}
# func_mode_link arg...
func_mode_link ()
{
$debug_cmd
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
# It is impossible to link a dll without this setting, and
# we shouldn't force the makefile maintainer to figure out
# what system we are compiling for in order to pass an extra
# flag for every libtool invocation.
# allow_undefined=no
# FIXME: Unfortunately, there are problems with the above when trying
# to make a dll that has undefined symbols, in which case not
# even a static library is built. For now, we need to specify
# -no-undefined on the libtool link line when we can be certain
# that all symbols are satisfied, otherwise we get a static library.
allow_undefined=yes
;;
*)
allow_undefined=yes
;;
esac
libtool_args=$nonopt
base_compile="$nonopt $@"
compile_command=$nonopt
finalize_command=$nonopt
compile_rpath=
finalize_rpath=
compile_shlibpath=
finalize_shlibpath=
convenience=
old_convenience=
deplibs=
old_deplibs=
compiler_flags=
linker_flags=
dllsearchpath=
lib_search_path=`pwd`
inst_prefix_dir=
new_inherited_linker_flags=
avoid_version=no
bindir=
dlfiles=
dlprefiles=
dlself=no
export_dynamic=no
export_symbols=
export_symbols_regex=
generated=
libobjs=
ltlibs=
module=no
no_install=no
objs=
os2dllname=
non_pic_objects=
precious_files_regex=
prefer_static_libs=no
preload=false
prev=
prevarg=
release=
rpath=
xrpath=
perm_rpath=
temp_rpath=
thread_safe=no
vinfo=
vinfo_number=no
weak_libs=
single_module=$wl-single_module
func_infer_tag $base_compile
# We need to know -static, to get the right output filenames.
for arg
do
case $arg in
-shared)
test yes != "$build_libtool_libs" \
&& func_fatal_configuration "cannot build a shared library"
build_old_libs=no
break
;;
-all-static | -static | -static-libtool-libs)
case $arg in
-all-static)
if test yes = "$build_libtool_libs" && test -z "$link_static_flag"; then
func_warning "complete static linking is impossible in this configuration"
fi
if test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
prefer_static_libs=yes
;;
-static)
if test -z "$pic_flag" && test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
prefer_static_libs=built
;;
-static-libtool-libs)
if test -z "$pic_flag" && test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
prefer_static_libs=yes
;;
esac
build_libtool_libs=no
build_old_libs=yes
break
;;
esac
done
# See if our shared archives depend on static archives.
test -n "$old_archive_from_new_cmds" && build_old_libs=yes
# Go through the arguments, transforming them on the way.
while test "$#" -gt 0; do
arg=$1
shift
func_quote_for_eval "$arg"
qarg=$func_quote_for_eval_unquoted_result
func_append libtool_args " $func_quote_for_eval_result"
# If the previous option needs an argument, assign it.
if test -n "$prev"; then
case $prev in
output)
func_append compile_command " @OUTPUT@"
func_append finalize_command " @OUTPUT@"
;;
esac
case $prev in
bindir)
bindir=$arg
prev=
continue
;;
dlfiles|dlprefiles)
$preload || {
# Add the symbol object into the linking commands.
func_append compile_command " @SYMFILE@"
func_append finalize_command " @SYMFILE@"
preload=:
}
case $arg in
*.la | *.lo) ;; # We handle these cases below.
force)
if test no = "$dlself"; then
dlself=needless
export_dynamic=yes
fi
prev=
continue
;;
self)
if test dlprefiles = "$prev"; then
dlself=yes
elif test dlfiles = "$prev" && test yes != "$dlopen_self"; then
dlself=yes
else
dlself=needless
export_dynamic=yes
fi
prev=
continue
;;
*)
if test dlfiles = "$prev"; then
func_append dlfiles " $arg"
else
func_append dlprefiles " $arg"
fi
prev=
continue
;;
esac
;;
expsyms)
export_symbols=$arg
test -f "$arg" \
|| func_fatal_error "symbol file '$arg' does not exist"
prev=
continue
;;
expsyms_regex)
export_symbols_regex=$arg
prev=
continue
;;
framework)
case $host in
*-*-darwin*)
case "$deplibs " in
*" $qarg.ltframework "*) ;;
*) func_append deplibs " $qarg.ltframework" # this is fixed later
;;
esac
;;
esac
prev=
continue
;;
inst_prefix)
inst_prefix_dir=$arg
prev=
continue
;;
mllvm)
# Clang does not use LLVM to link, so we can simply discard any
# '-mllvm $arg' options when doing the link step.
prev=
continue
;;
objectlist)
if test -f "$arg"; then
save_arg=$arg
moreargs=
for fil in `cat "$save_arg"`
do
# func_append moreargs " $fil"
arg=$fil
# A libtool-controlled object.
# Check to see that this really is a libtool object.
if func_lalib_unsafe_p "$arg"; then
pic_object=
non_pic_object=
# Read the .lo file
func_source "$arg"
if test -z "$pic_object" ||
test -z "$non_pic_object" ||
test none = "$pic_object" &&
test none = "$non_pic_object"; then
func_fatal_error "cannot find name of object for '$arg'"
fi
# Extract subdirectory from the argument.
func_dirname "$arg" "/" ""
xdir=$func_dirname_result
if test none != "$pic_object"; then
# Prepend the subdirectory the object is found in.
pic_object=$xdir$pic_object
if test dlfiles = "$prev"; then
if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then
func_append dlfiles " $pic_object"
prev=
continue
else
# If libtool objects are unsupported, then we need to preload.
prev=dlprefiles
fi
fi
# CHECK ME: I think I busted this. -Ossama
if test dlprefiles = "$prev"; then
# Preload the old-style object.
func_append dlprefiles " $pic_object"
prev=
fi
# A PIC object.
func_append libobjs " $pic_object"
arg=$pic_object
fi
# Non-PIC object.
if test none != "$non_pic_object"; then
# Prepend the subdirectory the object is found in.
non_pic_object=$xdir$non_pic_object
# A standard non-PIC object
func_append non_pic_objects " $non_pic_object"
if test -z "$pic_object" || test none = "$pic_object"; then
arg=$non_pic_object
fi
else
# If the PIC object exists, use it instead.
# $xdir was prepended to $pic_object above.
non_pic_object=$pic_object
func_append non_pic_objects " $non_pic_object"
fi
else
# Only an error if not doing a dry-run.
if $opt_dry_run; then
# Extract subdirectory from the argument.
func_dirname "$arg" "/" ""
xdir=$func_dirname_result
func_lo2o "$arg"
pic_object=$xdir$objdir/$func_lo2o_result
non_pic_object=$xdir$func_lo2o_result
func_append libobjs " $pic_object"
func_append non_pic_objects " $non_pic_object"
else
func_fatal_error "'$arg' is not a valid libtool object"
fi
fi
done
else
func_fatal_error "link input file '$arg' does not exist"
fi
arg=$save_arg
prev=
continue
;;
os2dllname)
os2dllname=$arg
prev=
continue
;;
precious_regex)
precious_files_regex=$arg
prev=
continue
;;
release)
release=-$arg
prev=
continue
;;
rpath | xrpath)
# We need an absolute path.
case $arg in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
func_fatal_error "only absolute run-paths are allowed"
;;
esac
if test rpath = "$prev"; then
case "$rpath " in
*" $arg "*) ;;
*) func_append rpath " $arg" ;;
esac
else
case "$xrpath " in
*" $arg "*) ;;
*) func_append xrpath " $arg" ;;
esac
fi
prev=
continue
;;
shrext)
shrext_cmds=$arg
prev=
continue
;;
weak)
func_append weak_libs " $arg"
prev=
continue
;;
xcclinker)
func_append linker_flags " $qarg"
func_append compiler_flags " $qarg"
prev=
func_append compile_command " $qarg"
func_append finalize_command " $qarg"
continue
;;
xcompiler)
func_append compiler_flags " $qarg"
prev=
func_append compile_command " $qarg"
func_append finalize_command " $qarg"
continue
;;
xlinker)
func_append linker_flags " $qarg"
func_append compiler_flags " $wl$qarg"
prev=
func_append compile_command " $wl$qarg"
func_append finalize_command " $wl$qarg"
continue
;;
*)
eval "$prev=\"\$arg\""
prev=
continue
;;
esac
fi # test -n "$prev"
prevarg=$arg
case $arg in
-all-static)
if test -n "$link_static_flag"; then
# See comment for -static flag below, for more details.
func_append compile_command " $link_static_flag"
func_append finalize_command " $link_static_flag"
fi
continue
;;
-allow-undefined)
# FIXME: remove this flag sometime in the future.
func_fatal_error "'-allow-undefined' must not be used because it is the default"
;;
-avoid-version)
avoid_version=yes
continue
;;
-bindir)
prev=bindir
continue
;;
-dlopen)
prev=dlfiles
continue
;;
-dlpreopen)
prev=dlprefiles
continue
;;
-export-dynamic)
export_dynamic=yes
continue
;;
-export-symbols | -export-symbols-regex)
if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
func_fatal_error "more than one -exported-symbols argument is not allowed"
fi
if test X-export-symbols = "X$arg"; then
prev=expsyms
else
prev=expsyms_regex
fi
continue
;;
-framework)
prev=framework
continue
;;
-inst-prefix-dir)
prev=inst_prefix
continue
;;
# The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
# so, if we see these flags be careful not to treat them like -L
-L[A-Z][A-Z]*:*)
case $with_gcc/$host in
no/*-*-irix* | /*-*-irix*)
func_append compile_command " $arg"
func_append finalize_command " $arg"
;;
esac
continue
;;
-L*)
func_stripname "-L" '' "$arg"
if test -z "$func_stripname_result"; then
if test "$#" -gt 0; then
func_fatal_error "require no space between '-L' and '$1'"
else
func_fatal_error "need path for '-L' option"
fi
fi
func_resolve_sysroot "$func_stripname_result"
dir=$func_resolve_sysroot_result
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
absdir=`cd "$dir" && pwd`
test -z "$absdir" && \
func_fatal_error "cannot determine absolute directory name of '$dir'"
dir=$absdir
;;
esac
case "$deplibs " in
*" -L$dir "* | *" $arg "*)
# Will only happen for absolute or sysroot arguments
;;
*)
# Preserve sysroot, but never include relative directories
case $dir in
[\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;;
*) func_append deplibs " -L$dir" ;;
esac
func_append lib_search_path " $dir"
;;
esac
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'`
case :$dllsearchpath: in
*":$dir:"*) ;;
::) dllsearchpath=$dir;;
*) func_append dllsearchpath ":$dir";;
esac
case :$dllsearchpath: in
*":$testbindir:"*) ;;
::) dllsearchpath=$testbindir;;
*) func_append dllsearchpath ":$testbindir";;
esac
;;
esac
continue
;;
-l*)
if test X-lc = "X$arg" || test X-lm = "X$arg"; then
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*)
# These systems don't actually have a C or math library (as such)
continue
;;
*-*-os2*)
# These systems don't actually have a C library (as such)
test X-lc = "X$arg" && continue
;;
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*)
# Do not include libc due to us having libc/libc_r.
test X-lc = "X$arg" && continue
;;
*-*-rhapsody* | *-*-darwin1.[012])
# Rhapsody C and math libraries are in the System framework
func_append deplibs " System.ltframework"
continue
;;
*-*-sco3.2v5* | *-*-sco5v6*)
# Causes problems with __ctype
test X-lc = "X$arg" && continue
;;
*-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
# Compiler inserts libc in the correct place for threads to work
test X-lc = "X$arg" && continue
;;
esac
elif test X-lc_r = "X$arg"; then
case $host in
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*)
# Do not include libc_r directly, use -pthread flag.
continue
;;
esac
fi
func_append deplibs " $arg"
continue
;;
-mllvm)
prev=mllvm
continue
;;
-module)
module=yes
continue
;;
# Tru64 UNIX uses -model [arg] to determine the layout of C++
# classes, name mangling, and exception handling.
# Darwin uses the -arch flag to determine output architecture.
-model|-arch|-isysroot|--sysroot)
func_append compiler_flags " $arg"
func_append compile_command " $arg"
func_append finalize_command " $arg"
prev=xcompiler
continue
;;
-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
|-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
func_append compiler_flags " $arg"
func_append compile_command " $arg"
func_append finalize_command " $arg"
case "$new_inherited_linker_flags " in
*" $arg "*) ;;
* ) func_append new_inherited_linker_flags " $arg" ;;
esac
continue
;;
-multi_module)
single_module=$wl-multi_module
continue
;;
-no-fast-install)
fast_install=no
continue
;;
-no-install)
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*)
# The PATH hackery in wrapper scripts is required on Windows
# and Darwin in order for the loader to find any dlls it needs.
func_warning "'-no-install' is ignored for $host"
func_warning "assuming '-no-fast-install' instead"
fast_install=no
;;
*) no_install=yes ;;
esac
continue
;;
-no-undefined)
allow_undefined=no
continue
;;
-objectlist)
prev=objectlist
continue
;;
-os2dllname)
prev=os2dllname
continue
;;
-o) prev=output ;;
-precious-files-regex)
prev=precious_regex
continue
;;
-release)
prev=release
continue
;;
-rpath)
prev=rpath
continue
;;
-R)
prev=xrpath
continue
;;
-R*)
func_stripname '-R' '' "$arg"
dir=$func_stripname_result
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
=*)
func_stripname '=' '' "$dir"
dir=$lt_sysroot$func_stripname_result
;;
*)
func_fatal_error "only absolute run-paths are allowed"
;;
esac
case "$xrpath " in
*" $dir "*) ;;
*) func_append xrpath " $dir" ;;
esac
continue
;;
-shared)
# The effects of -shared are defined in a previous loop.
continue
;;
-shrext)
prev=shrext
continue
;;
-static | -static-libtool-libs)
# The effects of -static are defined in a previous loop.
# We used to do the same as -all-static on platforms that
# didn't have a PIC flag, but the assumption that the effects
# would be equivalent was wrong. It would break on at least
# Digital Unix and AIX.
continue
;;
-thread-safe)
thread_safe=yes
continue
;;
-version-info)
prev=vinfo
continue
;;
-version-number)
prev=vinfo
vinfo_number=yes
continue
;;
-weak)
prev=weak
continue
;;
-Wc,*)
func_stripname '-Wc,' '' "$arg"
args=$func_stripname_result
arg=
save_ifs=$IFS; IFS=,
for flag in $args; do
IFS=$save_ifs
func_quote_for_eval "$flag"
func_append arg " $func_quote_for_eval_result"
func_append compiler_flags " $func_quote_for_eval_result"
done
IFS=$save_ifs
func_stripname ' ' '' "$arg"
arg=$func_stripname_result
;;
-Wl,--as-needed)
deplibs="$deplibs $arg"
continue
;;
-Wl,*)
func_stripname '-Wl,' '' "$arg"
args=$func_stripname_result
arg=
save_ifs=$IFS; IFS=,
for flag in $args; do
IFS=$save_ifs
func_quote_for_eval "$flag"
func_append arg " $wl$func_quote_for_eval_result"
func_append compiler_flags " $wl$func_quote_for_eval_result"
func_append linker_flags " $func_quote_for_eval_result"
done
IFS=$save_ifs
func_stripname ' ' '' "$arg"
arg=$func_stripname_result
;;
-Xcompiler)
prev=xcompiler
continue
;;
-Xlinker)
prev=xlinker
continue
;;
-XCClinker)
prev=xcclinker
continue
;;
# -msg_* for osf cc
-msg_*)
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
;;
# Flags to be passed through unchanged, with rationale:
# -64, -mips[0-9] enable 64-bit mode for the SGI compiler
# -r[0-9][0-9]* specify processor for the SGI compiler
# -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler
# +DA*, +DD* enable 64-bit mode for the HP compiler
# -q* compiler args for the IBM compiler
# -m*, -t[45]*, -txscale* architecture-specific flags for GCC
# -F/path path to uninstalled frameworks, gcc on darwin
# -p, -pg, --coverage, -fprofile-* profiling flags for GCC
# -fstack-protector* stack protector flags for GCC
# @file GCC response files
# -tp=* Portland pgcc target processor selection
# --sysroot=* for sysroot support
# -O*, -g*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization
# -stdlib=* select c++ std lib with clang
-64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \
-t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \
-O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-fstack-protector*|-stdlib=*)
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
func_append compile_command " $arg"
func_append finalize_command " $arg"
func_append compiler_flags " $arg"
continue
;;
-Z*)
if test os2 = "`expr $host : '.*\(os2\)'`"; then
# OS/2 uses -Zxxx to specify OS/2-specific options
compiler_flags="$compiler_flags $arg"
func_append compile_command " $arg"
func_append finalize_command " $arg"
case $arg in
-Zlinker | -Zstack)
prev=xcompiler
;;
esac
continue
else
# Otherwise treat like 'Some other compiler flag' below
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
fi
;;
# Some other compiler flag.
-* | +*)
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
;;
*.$objext)
# A standard object.
func_append objs " $arg"
;;
*.lo)
# A libtool-controlled object.
# Check to see that this really is a libtool object.
if func_lalib_unsafe_p "$arg"; then
pic_object=
non_pic_object=
# Read the .lo file
func_source "$arg"
if test -z "$pic_object" ||
test -z "$non_pic_object" ||
test none = "$pic_object" &&
test none = "$non_pic_object"; then
func_fatal_error "cannot find name of object for '$arg'"
fi
# Extract subdirectory from the argument.
func_dirname "$arg" "/" ""
xdir=$func_dirname_result
test none = "$pic_object" || {
# Prepend the subdirectory the object is found in.
pic_object=$xdir$pic_object
if test dlfiles = "$prev"; then
if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then
func_append dlfiles " $pic_object"
prev=
continue
else
# If libtool objects are unsupported, then we need to preload.
prev=dlprefiles
fi
fi
# CHECK ME: I think I busted this. -Ossama
if test dlprefiles = "$prev"; then
# Preload the old-style object.
func_append dlprefiles " $pic_object"
prev=
fi
# A PIC object.
func_append libobjs " $pic_object"
arg=$pic_object
}
# Non-PIC object.
if test none != "$non_pic_object"; then
# Prepend the subdirectory the object is found in.
non_pic_object=$xdir$non_pic_object
# A standard non-PIC object
func_append non_pic_objects " $non_pic_object"
if test -z "$pic_object" || test none = "$pic_object"; then
arg=$non_pic_object
fi
else
# If the PIC object exists, use it instead.
# $xdir was prepended to $pic_object above.
non_pic_object=$pic_object
func_append non_pic_objects " $non_pic_object"
fi
else
# Only an error if not doing a dry-run.
if $opt_dry_run; then
# Extract subdirectory from the argument.
func_dirname "$arg" "/" ""
xdir=$func_dirname_result
func_lo2o "$arg"
pic_object=$xdir$objdir/$func_lo2o_result
non_pic_object=$xdir$func_lo2o_result
func_append libobjs " $pic_object"
func_append non_pic_objects " $non_pic_object"
else
func_fatal_error "'$arg' is not a valid libtool object"
fi
fi
;;
*.$libext)
# An archive.
func_append deplibs " $arg"
func_append old_deplibs " $arg"
continue
;;
*.la)
# A libtool-controlled library.
func_resolve_sysroot "$arg"
if test dlfiles = "$prev"; then
# This library was specified with -dlopen.
func_append dlfiles " $func_resolve_sysroot_result"
prev=
elif test dlprefiles = "$prev"; then
# The library was specified with -dlpreopen.
func_append dlprefiles " $func_resolve_sysroot_result"
prev=
else
func_append deplibs " $func_resolve_sysroot_result"
fi
continue
;;
# Some other compiler argument.
*)
# Unknown arguments in both finalize_command and compile_command need
# to be aesthetically quoted because they are evaled later.
func_quote_for_eval "$arg"
arg=$func_quote_for_eval_result
;;
esac # arg
# Now actually substitute the argument into the commands.
if test -n "$arg"; then
func_append compile_command " $arg"
func_append finalize_command " $arg"
fi
done # argument parsing loop
test -n "$prev" && \
func_fatal_help "the '$prevarg' option requires an argument"
if test yes = "$export_dynamic" && test -n "$export_dynamic_flag_spec"; then
eval arg=\"$export_dynamic_flag_spec\"
func_append compile_command " $arg"
func_append finalize_command " $arg"
fi
oldlibs=
# calculate the name of the file, without its directory
func_basename "$output"
outputname=$func_basename_result
libobjs_save=$libobjs
if test -n "$shlibpath_var"; then
# get the directories listed in $shlibpath_var
eval shlib_search_path=\`\$ECHO \"\$$shlibpath_var\" \| \$SED \'s/:/ /g\'\`
else
shlib_search_path=
fi
eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
# Definition is injected by LT_CONFIG during libtool generation.
func_munge_path_list sys_lib_dlsearch_path "$LT_SYS_LIBRARY_PATH"
func_dirname "$output" "/" ""
output_objdir=$func_dirname_result$objdir
func_to_tool_file "$output_objdir/"
tool_output_objdir=$func_to_tool_file_result
# Create the object directory.
func_mkdir_p "$output_objdir"
# Determine the type of output
case $output in
"")
func_fatal_help "you must specify an output file"
;;
*.$libext) linkmode=oldlib ;;
*.lo | *.$objext) linkmode=obj ;;
*.la) linkmode=lib ;;
*) linkmode=prog ;; # Anything else should be a program.
esac
specialdeplibs=
libs=
# Find all interdependent deplibs by searching for libraries
# that are linked more than once (e.g. -la -lb -la)
for deplib in $deplibs; do
if $opt_preserve_dup_deps; then
case "$libs " in
*" $deplib "*) func_append specialdeplibs " $deplib" ;;
esac
fi
func_append libs " $deplib"
done
if test lib = "$linkmode"; then
libs="$predeps $libs $compiler_lib_search_path $postdeps"
# Compute libraries that are listed more than once in $predeps
# $postdeps and mark them as special (i.e., whose duplicates are
# not to be eliminated).
pre_post_deps=
if $opt_duplicate_compiler_generated_deps; then
for pre_post_dep in $predeps $postdeps; do
case "$pre_post_deps " in
*" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;;
esac
func_append pre_post_deps " $pre_post_dep"
done
fi
pre_post_deps=
fi
deplibs=
newdependency_libs=
newlib_search_path=
need_relink=no # whether we're linking any uninstalled libtool libraries
notinst_deplibs= # not-installed libtool libraries
notinst_path= # paths that contain not-installed libtool libraries
case $linkmode in
lib)
as_needed_flag=
passes="conv dlpreopen link"
for file in $dlfiles $dlprefiles; do
case $file in
*.la) ;;
*)
func_fatal_help "libraries can '-dlopen' only libtool libraries: $file"
;;
esac
done
;;
prog)
as_needed_flag=
compile_deplibs=
finalize_deplibs=
alldeplibs=false
newdlfiles=
newdlprefiles=
passes="conv scan dlopen dlpreopen link"
;;
*) passes="conv"
;;
esac
for pass in $passes; do
# The preopen pass in lib mode reverses $deplibs; put it back here
# so that -L comes before libs that need it for instance...
if test lib,link = "$linkmode,$pass"; then
## FIXME: Find the place where the list is rebuilt in the wrong
## order, and fix it there properly
tmp_deplibs=
for deplib in $deplibs; do
tmp_deplibs="$deplib $tmp_deplibs"
done
deplibs=$tmp_deplibs
fi
if test lib,link = "$linkmode,$pass" ||
test prog,scan = "$linkmode,$pass"; then
libs=$deplibs
deplibs=
fi
if test prog = "$linkmode"; then
case $pass in
dlopen) libs=$dlfiles ;;
dlpreopen) libs=$dlprefiles ;;
link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
esac
fi
if test lib,dlpreopen = "$linkmode,$pass"; then
# Collect and forward deplibs of preopened libtool libs
for lib in $dlprefiles; do
# Ignore non-libtool-libs
dependency_libs=
func_resolve_sysroot "$lib"
case $lib in
*.la) func_source "$func_resolve_sysroot_result" ;;
esac
# Collect preopened libtool deplibs, except any this library
# has declared as weak libs
for deplib in $dependency_libs; do
func_basename "$deplib"
deplib_base=$func_basename_result
case " $weak_libs " in
*" $deplib_base "*) ;;
*) func_append deplibs " $deplib" ;;
esac
done
done
libs=$dlprefiles
fi
if test dlopen = "$pass"; then
# Collect dlpreopened libraries
save_deplibs=$deplibs
deplibs=
fi
for deplib in $libs; do
lib=
found=false
case $deplib in
-Wl,--as-needed)
if test prog,link = "$linkmode,$pass" ||
test lib,link = "$linkmode,$pass"; then
as_needed_flag="$deplib "
else
deplibs="$deplib $deplibs"
fi
continue
;;
-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
|-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
if test prog,link = "$linkmode,$pass"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
func_append compiler_flags " $deplib"
if test lib = "$linkmode"; then
case "$new_inherited_linker_flags " in
*" $deplib "*) ;;
* ) func_append new_inherited_linker_flags " $deplib" ;;
esac
fi
fi
continue
;;
-l*)
if test lib != "$linkmode" && test prog != "$linkmode"; then
func_warning "'-l' is ignored for archives/objects"
continue
fi
func_stripname '-l' '' "$deplib"
name=$func_stripname_result
if test lib = "$linkmode"; then
searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path"
else
searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path"
fi
for searchdir in $searchdirs; do
for search_ext in .la $std_shrext .so .a; do
# Search the libtool library
lib=$searchdir/lib$name$search_ext
if test -f "$lib"; then
if test .la = "$search_ext"; then
found=:
else
found=false
fi
break 2
fi
done
done
if $found; then
# deplib is a libtool library
# If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
# We need to do some special things here, and not later.
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
case " $predeps $postdeps " in
*" $deplib "*)
if func_lalib_p "$lib"; then
library_names=
old_library=
func_source "$lib"
for l in $old_library $library_names; do
ll=$l
done
if test "X$ll" = "X$old_library"; then # only static version available
found=false
func_dirname "$lib" "" "."
ladir=$func_dirname_result
lib=$ladir/$old_library
if test prog,link = "$linkmode,$pass"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs"
fi
continue
fi
fi
;;
*) ;;
esac
fi
else
# deplib doesn't seem to be a libtool library
if test prog,link = "$linkmode,$pass"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs"
fi
continue
fi
;; # -l
*.ltframework)
if test prog,link = "$linkmode,$pass"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
if test lib = "$linkmode"; then
case "$new_inherited_linker_flags " in
*" $deplib "*) ;;
* ) func_append new_inherited_linker_flags " $deplib" ;;
esac
fi
fi
continue
;;
-L*)
case $linkmode in
lib)
deplibs="$deplib $deplibs"
test conv = "$pass" && continue
newdependency_libs="$deplib $newdependency_libs"
func_stripname '-L' '' "$deplib"
func_resolve_sysroot "$func_stripname_result"
func_append newlib_search_path " $func_resolve_sysroot_result"
;;
prog)
if test conv = "$pass"; then
deplibs="$deplib $deplibs"
continue
fi
if test scan = "$pass"; then
deplibs="$deplib $deplibs"
else
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
fi
func_stripname '-L' '' "$deplib"
func_resolve_sysroot "$func_stripname_result"
func_append newlib_search_path " $func_resolve_sysroot_result"
;;
*)
func_warning "'-L' is ignored for archives/objects"
;;
esac # linkmode
continue
;; # -L
-R*)
if test link = "$pass"; then
func_stripname '-R' '' "$deplib"
func_resolve_sysroot "$func_stripname_result"
dir=$func_resolve_sysroot_result
# Make sure the xrpath contains only unique directories.
case "$xrpath " in
*" $dir "*) ;;
*) func_append xrpath " $dir" ;;
esac
fi
deplibs="$deplib $deplibs"
continue
;;
*.la)
func_resolve_sysroot "$deplib"
lib=$func_resolve_sysroot_result
;;
*.$libext)
if test conv = "$pass"; then
deplibs="$deplib $deplibs"
continue
fi
case $linkmode in
lib)
# Linking convenience modules into shared libraries is allowed,
# but linking other static libraries is non-portable.
case " $dlpreconveniencelibs " in
*" $deplib "*) ;;
*)
valid_a_lib=false
case $deplibs_check_method in
match_pattern*)
set dummy $deplibs_check_method; shift
match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \
| $EGREP "$match_pattern_regex" > /dev/null; then
valid_a_lib=:
fi
;;
pass_all)
valid_a_lib=:
;;
esac
if $valid_a_lib; then
echo
$ECHO "*** Warning: Linking the shared library $output against the"
$ECHO "*** static library $deplib is not portable!"
deplibs="$deplib $deplibs"
else
echo
$ECHO "*** Warning: Trying to link with static lib archive $deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have"
echo "*** because the file extensions .$libext of this argument makes me believe"
echo "*** that it is just a static archive that I should not use here."
fi
;;
esac
continue
;;
prog)
if test link != "$pass"; then
deplibs="$deplib $deplibs"
else
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
fi
continue
;;
esac # linkmode
;; # *.$libext
*.lo | *.$objext)
if test conv = "$pass"; then
deplibs="$deplib $deplibs"
elif test prog = "$linkmode"; then
if test dlpreopen = "$pass" || test yes != "$dlopen_support" || test no = "$build_libtool_libs"; then
# If there is no dlopen support or we're linking statically,
# we need to preload.
func_append newdlprefiles " $deplib"
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
func_append newdlfiles " $deplib"
fi
fi
continue
;;
%DEPLIBS%)
alldeplibs=:
continue
;;
esac # case $deplib
$found || test -f "$lib" \
|| func_fatal_error "cannot find the library '$lib' or unhandled argument '$deplib'"
# Check to see that this really is a libtool archive.
func_lalib_unsafe_p "$lib" \
|| func_fatal_error "'$lib' is not a valid libtool archive"
func_dirname "$lib" "" "."
ladir=$func_dirname_result
dlname=
dlopen=
dlpreopen=
libdir=
library_names=
old_library=
inherited_linker_flags=
# If the library was installed with an old release of libtool,
# it will not redefine variables installed, or shouldnotlink
installed=yes
shouldnotlink=no
avoidtemprpath=
# Read the .la file
func_source "$lib"
# Convert "-framework foo" to "foo.ltframework"
if test -n "$inherited_linker_flags"; then
tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'`
for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do
case " $new_inherited_linker_flags " in
*" $tmp_inherited_linker_flag "*) ;;
*) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";;
esac
done
fi
dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
if test lib,link = "$linkmode,$pass" ||
test prog,scan = "$linkmode,$pass" ||
{ test prog != "$linkmode" && test lib != "$linkmode"; }; then
test -n "$dlopen" && func_append dlfiles " $dlopen"
test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen"
fi
if test conv = "$pass"; then
# Only check for convenience libraries
deplibs="$lib $deplibs"
if test -z "$libdir"; then
if test -z "$old_library"; then
func_fatal_error "cannot find name of link library for '$lib'"
fi
# It is a libtool convenience library, so add in its objects.
func_append convenience " $ladir/$objdir/$old_library"
func_append old_convenience " $ladir/$objdir/$old_library"
elif test prog != "$linkmode" && test lib != "$linkmode"; then
func_fatal_error "'$lib' is not a convenience library"
fi
tmp_libs=
for deplib in $dependency_libs; do
deplibs="$deplib $deplibs"
if $opt_preserve_dup_deps; then
case "$tmp_libs " in
*" $deplib "*) func_append specialdeplibs " $deplib" ;;
esac
fi
func_append tmp_libs " $deplib"
done
continue
fi # $pass = conv
# Get the name of the library we link against.
linklib=
if test -n "$old_library" &&
{ test yes = "$prefer_static_libs" ||
test built,no = "$prefer_static_libs,$installed"; }; then
linklib=$old_library
else
for l in $old_library $library_names; do
linklib=$l
done
fi
if test -z "$linklib"; then
func_fatal_error "cannot find name of link library for '$lib'"
fi
# This library was specified with -dlopen.
if test dlopen = "$pass"; then
test -z "$libdir" \
&& func_fatal_error "cannot -dlopen a convenience library: '$lib'"
if test -z "$dlname" ||
test yes != "$dlopen_support" ||
test no = "$build_libtool_libs"
then
# If there is no dlname, no dlopen support or we're linking
# statically, we need to preload. We also need to preload any
# dependent libraries so libltdl's deplib preloader doesn't
# bomb out in the load deplibs phase.
func_append dlprefiles " $lib $dependency_libs"
else
func_append newdlfiles " $lib"
fi
continue
fi # $pass = dlopen
# We need an absolute path.
case $ladir in
[\\/]* | [A-Za-z]:[\\/]*) abs_ladir=$ladir ;;
*)
abs_ladir=`cd "$ladir" && pwd`
if test -z "$abs_ladir"; then
func_warning "cannot determine absolute directory name of '$ladir'"
func_warning "passing it literally to the linker, although it might fail"
abs_ladir=$ladir
fi
;;
esac
func_basename "$lib"
laname=$func_basename_result
# Find the relevant object directory and library name.
if test yes = "$installed"; then
if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
func_warning "library '$lib' was moved."
dir=$ladir
absdir=$abs_ladir
libdir=$abs_ladir
else
dir=$lt_sysroot$libdir
absdir=$lt_sysroot$libdir
fi
test yes = "$hardcode_automatic" && avoidtemprpath=yes
else
if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
dir=$ladir
absdir=$abs_ladir
# Remove this search path later
func_append notinst_path " $abs_ladir"
else
dir=$ladir/$objdir
absdir=$abs_ladir/$objdir
# Remove this search path later
func_append notinst_path " $abs_ladir"
fi
fi # $installed = yes
func_stripname 'lib' '.la' "$laname"
name=$func_stripname_result
# This library was specified with -dlpreopen.
if test dlpreopen = "$pass"; then
if test -z "$libdir" && test prog = "$linkmode"; then
func_fatal_error "only libraries may -dlpreopen a convenience library: '$lib'"
fi
case $host in
# special handling for platforms with PE-DLLs.
*cygwin* | *mingw* | *cegcc* )
# Linker will automatically link against shared library if both
# static and shared are present. Therefore, ensure we extract
# symbols from the import library if a shared library is present
# (otherwise, the dlopen module name will be incorrect). We do
# this by putting the import library name into $newdlprefiles.
# We recover the dlopen module name by 'saving' the la file
# name in a special purpose variable, and (later) extracting the
# dlname from the la file.
if test -n "$dlname"; then
func_tr_sh "$dir/$linklib"
eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname"
func_append newdlprefiles " $dir/$linklib"
else
func_append newdlprefiles " $dir/$old_library"
# Keep a list of preopened convenience libraries to check
# that they are being used correctly in the link pass.
test -z "$libdir" && \
func_append dlpreconveniencelibs " $dir/$old_library"
fi
;;
* )
# Prefer using a static library (so that no silly _DYNAMIC symbols
# are required to link).
if test -n "$old_library"; then
func_append newdlprefiles " $dir/$old_library"
# Keep a list of preopened convenience libraries to check
# that they are being used correctly in the link pass.
test -z "$libdir" && \
func_append dlpreconveniencelibs " $dir/$old_library"
# Otherwise, use the dlname, so that lt_dlopen finds it.
elif test -n "$dlname"; then
func_append newdlprefiles " $dir/$dlname"
else
func_append newdlprefiles " $dir/$linklib"
fi
;;
esac
fi # $pass = dlpreopen
if test -z "$libdir"; then
# Link the convenience library
if test lib = "$linkmode"; then
deplibs="$dir/$old_library $deplibs"
elif test prog,link = "$linkmode,$pass"; then
compile_deplibs="$dir/$old_library $compile_deplibs"
finalize_deplibs="$dir/$old_library $finalize_deplibs"
else
deplibs="$lib $deplibs" # used for prog,scan pass
fi
continue
fi
if test prog = "$linkmode" && test link != "$pass"; then
func_append newlib_search_path " $ladir"
deplibs="$lib $deplibs"
linkalldeplibs=false
if test no != "$link_all_deplibs" || test -z "$library_names" ||
test no = "$build_libtool_libs"; then
linkalldeplibs=:
fi
tmp_libs=
for deplib in $dependency_libs; do
case $deplib in
-L*) func_stripname '-L' '' "$deplib"
func_resolve_sysroot "$func_stripname_result"
func_append newlib_search_path " $func_resolve_sysroot_result"
;;
esac
# Need to link against all dependency_libs?
if $linkalldeplibs; then
deplibs="$deplib $deplibs"
else
# Need to hardcode shared library paths
# or/and link against static libraries
newdependency_libs="$deplib $newdependency_libs"
fi
if $opt_preserve_dup_deps; then
case "$tmp_libs " in
*" $deplib "*) func_append specialdeplibs " $deplib" ;;
esac
fi
func_append tmp_libs " $deplib"
done # for deplib
continue
fi # $linkmode = prog...
if test prog,link = "$linkmode,$pass"; then
if test -n "$library_names" &&
{ { test no = "$prefer_static_libs" ||
test built,yes = "$prefer_static_libs,$installed"; } ||
test -z "$old_library"; }; then
# We need to hardcode the library path
if test -n "$shlibpath_var" && test -z "$avoidtemprpath"; then
# Make sure the rpath contains only unique directories.
case $temp_rpath: in
*"$absdir:"*) ;;
*) func_append temp_rpath "$absdir:" ;;
esac
fi
# Hardcode the library path.
# Skip directories that are in the system default run-time
# search path.
case " $sys_lib_dlsearch_path " in
*" $absdir "*) ;;
*)
case "$compile_rpath " in
*" $absdir "*) ;;
*) func_append compile_rpath " $absdir" ;;
esac
;;
esac
case " $sys_lib_dlsearch_path " in
*" $libdir "*) ;;
*)
case "$finalize_rpath " in
*" $libdir "*) ;;
*) func_append finalize_rpath " $libdir" ;;
esac
;;
esac
fi # $linkmode,$pass = prog,link...
if $alldeplibs &&
{ test pass_all = "$deplibs_check_method" ||
{ test yes = "$build_libtool_libs" &&
test -n "$library_names"; }; }; then
# We only need to search for static libraries
continue
fi
fi
link_static=no # Whether the deplib will be linked statically
use_static_libs=$prefer_static_libs
if test built = "$use_static_libs" && test yes = "$installed"; then
use_static_libs=no
fi
if test -n "$library_names" &&
{ test no = "$use_static_libs" || test -z "$old_library"; }; then
case $host in
*cygwin* | *mingw* | *cegcc* | *os2*)
# No point in relinking DLLs because paths are not encoded
func_append notinst_deplibs " $lib"
need_relink=no
;;
*)
if test no = "$installed"; then
func_append notinst_deplibs " $lib"
need_relink=yes
fi
;;
esac
# This is a shared library
# Warn about portability, can't link against -module's on some
# systems (darwin). Don't bleat about dlopened modules though!
dlopenmodule=
for dlpremoduletest in $dlprefiles; do
if test "X$dlpremoduletest" = "X$lib"; then
dlopenmodule=$dlpremoduletest
break
fi
done
if test -z "$dlopenmodule" && test yes = "$shouldnotlink" && test link = "$pass"; then
echo
if test prog = "$linkmode"; then
$ECHO "*** Warning: Linking the executable $output against the loadable module"
else
$ECHO "*** Warning: Linking the shared library $output against the loadable module"
fi
$ECHO "*** $linklib is not portable!"
fi
if test lib = "$linkmode" &&
test yes = "$hardcode_into_libs"; then
# Hardcode the library path.
# Skip directories that are in the system default run-time
# search path.
case " $sys_lib_dlsearch_path " in
*" $absdir "*) ;;
*)
case "$compile_rpath " in
*" $absdir "*) ;;
*) func_append compile_rpath " $absdir" ;;
esac
;;
esac
case " $sys_lib_dlsearch_path " in
*" $libdir "*) ;;
*)
case "$finalize_rpath " in
*" $libdir "*) ;;
*) func_append finalize_rpath " $libdir" ;;
esac
;;
esac
fi
if test -n "$old_archive_from_expsyms_cmds"; then
# figure out the soname
set dummy $library_names
shift
realname=$1
shift
libname=`eval "\\$ECHO \"$libname_spec\""`
# use dlname if we got it. it's perfectly good, no?
if test -n "$dlname"; then
soname=$dlname
elif test -n "$soname_spec"; then
# bleh windows
case $host in
*cygwin* | mingw* | *cegcc* | *os2*)
func_arith $current - $age
major=$func_arith_result
versuffix=-$major
;;
esac
eval soname=\"$soname_spec\"
else
soname=$realname
fi
# Make a new name for the extract_expsyms_cmds to use
soroot=$soname
func_basename "$soroot"
soname=$func_basename_result
func_stripname 'lib' '.dll' "$soname"
newlib=libimp-$func_stripname_result.a
# If the library has no export list, then create one now
if test -f "$output_objdir/$soname-def"; then :
else
func_verbose "extracting exported symbol list from '$soname'"
func_execute_cmds "$extract_expsyms_cmds" 'exit $?'
fi
# Create $newlib
if test -f "$output_objdir/$newlib"; then :; else
func_verbose "generating import library for '$soname'"
func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?'
fi
# make sure the library variables are pointing to the new library
dir=$output_objdir
linklib=$newlib
fi # test -n "$old_archive_from_expsyms_cmds"
if test prog = "$linkmode" || test relink != "$opt_mode"; then
add_shlibpath=
add_dir=
add=
lib_linked=yes
case $hardcode_action in
immediate | unsupported)
if test no = "$hardcode_direct"; then
add=$dir/$linklib
case $host in
*-*-sco3.2v5.0.[024]*) add_dir=-L$dir ;;
*-*-sysv4*uw2*) add_dir=-L$dir ;;
*-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \
*-*-unixware7*) add_dir=-L$dir ;;
*-*-darwin* )
# if the lib is a (non-dlopened) module then we cannot
# link against it, someone is ignoring the earlier warnings
if /usr/bin/file -L $add 2> /dev/null |
$GREP ": [^:]* bundle" >/dev/null; then
if test "X$dlopenmodule" != "X$lib"; then
$ECHO "*** Warning: lib $linklib is a module, not a shared library"
if test -z "$old_library"; then
echo
echo "*** And there doesn't seem to be a static archive available"
echo "*** The link will probably fail, sorry"
else
add=$dir/$old_library
fi
elif test -n "$old_library"; then
add=$dir/$old_library
fi
fi
esac
elif test no = "$hardcode_minus_L"; then
case $host in
*-*-sunos*) add_shlibpath=$dir ;;
esac
add_dir=-L$dir
add=-l$name
elif test no = "$hardcode_shlibpath_var"; then
add_shlibpath=$dir
add=-l$name
else
lib_linked=no
fi
;;
relink)
if test yes = "$hardcode_direct" &&
test no = "$hardcode_direct_absolute"; then
add=$dir/$linklib
elif test yes = "$hardcode_minus_L"; then
add_dir=-L$absdir
# Try looking first in the location we're being installed to.
if test -n "$inst_prefix_dir"; then
case $libdir in
[\\/]*)
func_append add_dir " -L$inst_prefix_dir$libdir"
;;
esac
fi
add=-l$name
elif test yes = "$hardcode_shlibpath_var"; then
add_shlibpath=$dir
add=-l$name
else
lib_linked=no
fi
;;
*) lib_linked=no ;;
esac
if test yes != "$lib_linked"; then
func_fatal_configuration "unsupported hardcode properties"
fi
if test -n "$add_shlibpath"; then
case :$compile_shlibpath: in
*":$add_shlibpath:"*) ;;
*) func_append compile_shlibpath "$add_shlibpath:" ;;
esac
fi
if test prog = "$linkmode"; then
test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
test -n "$add" && compile_deplibs="$add $compile_deplibs"
else
test -n "$add_dir" && deplibs="$add_dir $deplibs"
test -n "$add" && deplibs="$add $deplibs"
if test yes != "$hardcode_direct" &&
test yes != "$hardcode_minus_L" &&
test yes = "$hardcode_shlibpath_var"; then
case :$finalize_shlibpath: in
*":$libdir:"*) ;;
*) func_append finalize_shlibpath "$libdir:" ;;
esac
fi
fi
fi
if test prog = "$linkmode" || test relink = "$opt_mode"; then
add_shlibpath=
add_dir=
add=
# Finalize command for both is simple: just hardcode it.
if test yes = "$hardcode_direct" &&
test no = "$hardcode_direct_absolute"; then
add=$libdir/$linklib
elif test yes = "$hardcode_minus_L"; then
add_dir=-L$libdir
add=-l$name
elif test yes = "$hardcode_shlibpath_var"; then
case :$finalize_shlibpath: in
*":$libdir:"*) ;;
*) func_append finalize_shlibpath "$libdir:" ;;
esac
add=-l$name
elif test yes = "$hardcode_automatic"; then
if test -n "$inst_prefix_dir" &&
test -f "$inst_prefix_dir$libdir/$linklib"; then
add=$inst_prefix_dir$libdir/$linklib
else
add=$libdir/$linklib
fi
else
# We cannot seem to hardcode it, guess we'll fake it.
add_dir=-L$libdir
# Try looking first in the location we're being installed to.
if test -n "$inst_prefix_dir"; then
case $libdir in
[\\/]*)
func_append add_dir " -L$inst_prefix_dir$libdir"
;;
esac
fi
add=-l$name
fi
if test prog = "$linkmode"; then
test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
else
test -n "$add_dir" && deplibs="$add_dir $deplibs"
test -n "$add" && deplibs="$add $deplibs"
fi
fi
elif test prog = "$linkmode"; then
# Here we assume that one of hardcode_direct or hardcode_minus_L
# is not unsupported. This is valid on all known static and
# shared platforms.
if test unsupported != "$hardcode_direct"; then
test -n "$old_library" && linklib=$old_library
compile_deplibs="$dir/$linklib $compile_deplibs"
finalize_deplibs="$dir/$linklib $finalize_deplibs"
else
compile_deplibs="-l$name -L$dir $compile_deplibs"
finalize_deplibs="-l$name -L$dir $finalize_deplibs"
fi
elif test yes = "$build_libtool_libs"; then
# Not a shared library
if test pass_all != "$deplibs_check_method"; then
# We're trying link a shared library against a static one
# but the system doesn't support it.
# Just print a warning and add the library to dependency_libs so
# that the program can be linked against the static library.
echo
$ECHO "*** Warning: This system cannot link to static lib archive $lib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have."
if test yes = "$module"; then
echo "*** But as you try to build a module library, libtool will still create "
echo "*** a static module, that should work as long as the dlopening application"
echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
if test -z "$global_symbol_pipe"; then
echo
echo "*** However, this would only work if libtool was able to extract symbol"
echo "*** lists from a program, using 'nm' or equivalent, but libtool could"
echo "*** not find such a program. So, this module is probably useless."
echo "*** 'nm' from GNU binutils and a full rebuild may help."
fi
if test no = "$build_old_libs"; then
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
fi
else
deplibs="$dir/$old_library $deplibs"
link_static=yes
fi
fi # link shared/static library?
if test lib = "$linkmode"; then
if test -n "$dependency_libs" &&
{ test yes != "$hardcode_into_libs" ||
test yes = "$build_old_libs" ||
test yes = "$link_static"; }; then
# Extract -R from dependency_libs
temp_deplibs=
for libdir in $dependency_libs; do
case $libdir in
-R*) func_stripname '-R' '' "$libdir"
temp_xrpath=$func_stripname_result
case " $xrpath " in
*" $temp_xrpath "*) ;;
*) func_append xrpath " $temp_xrpath";;
esac;;
*) func_append temp_deplibs " $libdir";;
esac
done
dependency_libs=$temp_deplibs
fi
func_append newlib_search_path " $absdir"
# Link against this library
test no = "$link_static" && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
# ... and its dependency_libs
tmp_libs=
for deplib in $dependency_libs; do
newdependency_libs="$deplib $newdependency_libs"
case $deplib in
-L*) func_stripname '-L' '' "$deplib"
func_resolve_sysroot "$func_stripname_result";;
*) func_resolve_sysroot "$deplib" ;;
esac
if $opt_preserve_dup_deps; then
case "$tmp_libs " in
*" $func_resolve_sysroot_result "*)
func_append specialdeplibs " $func_resolve_sysroot_result" ;;
esac
fi
func_append tmp_libs " $func_resolve_sysroot_result"
done
if test no != "$link_all_deplibs"; then
# Add the search paths of all dependency libraries
for deplib in $dependency_libs; do
path=
case $deplib in
-L*) path=$deplib ;;
*.la)
func_resolve_sysroot "$deplib"
deplib=$func_resolve_sysroot_result
func_dirname "$deplib" "" "."
dir=$func_dirname_result
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) absdir=$dir ;;
*)
absdir=`cd "$dir" && pwd`
if test -z "$absdir"; then
func_warning "cannot determine absolute directory name of '$dir'"
absdir=$dir
fi
;;
esac
if $GREP "^installed=no" $deplib > /dev/null; then
case $host in
*-*-darwin*)
depdepl=
eval deplibrary_names=`$SED -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
if test -n "$deplibrary_names"; then
for tmp in $deplibrary_names; do
depdepl=$tmp
done
if test -f "$absdir/$objdir/$depdepl"; then
depdepl=$absdir/$objdir/$depdepl
darwin_install_name=`$OTOOL -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
if test -z "$darwin_install_name"; then
darwin_install_name=`$OTOOL64 -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'`
fi
func_append compiler_flags " $wl-dylib_file $wl$darwin_install_name:$depdepl"
func_append linker_flags " -dylib_file $darwin_install_name:$depdepl"
path=
fi
fi
;;
*)
path=-L$absdir/$objdir
;;
esac
else
eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
test -z "$libdir" && \
func_fatal_error "'$deplib' is not a valid libtool archive"
test "$absdir" != "$libdir" && \
func_warning "'$deplib' seems to be moved"
path=-L$absdir
fi
;;
esac
case " $deplibs " in
*" $path "*) ;;
*) deplibs="$path $deplibs" ;;
esac
done
fi # link_all_deplibs != no
fi # linkmode = lib
done # for deplib in $libs
if test link = "$pass"; then
if test prog = "$linkmode"; then
compile_deplibs="$new_inherited_linker_flags $compile_deplibs"
finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs"
else
compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
fi
fi
dependency_libs=$newdependency_libs
if test dlpreopen = "$pass"; then
# Link the dlpreopened libraries before other libraries
for deplib in $save_deplibs; do
deplibs="$deplib $deplibs"
done
fi
if test dlopen != "$pass"; then
test conv = "$pass" || {
# Make sure lib_search_path contains only unique directories.
lib_search_path=
for dir in $newlib_search_path; do
case "$lib_search_path " in
*" $dir "*) ;;
*) func_append lib_search_path " $dir" ;;
esac
done
newlib_search_path=
}
if test prog,link = "$linkmode,$pass"; then
vars="compile_deplibs finalize_deplibs"
else
vars=deplibs
fi
for var in $vars dependency_libs; do
# Add libraries to $var in reverse order
eval tmp_libs=\"\$$var\"
new_libs=
for deplib in $tmp_libs; do
# FIXME: Pedantically, this is the right thing to do, so
# that some nasty dependency loop isn't accidentally
# broken:
#new_libs="$deplib $new_libs"
# Pragmatically, this seems to cause very few problems in
# practice:
case $deplib in
-L*) new_libs="$deplib $new_libs" ;;
-R*) ;;
*)
# And here is the reason: when a library appears more
# than once as an explicit dependence of a library, or
# is implicitly linked in more than once by the
# compiler, it is considered special, and multiple
# occurrences thereof are not removed. Compare this
# with having the same library being listed as a
# dependency of multiple other libraries: in this case,
# we know (pedantically, we assume) the library does not
# need to be listed more than once, so we keep only the
# last copy. This is not always right, but it is rare
# enough that we require users that really mean to play
# such unportable linking tricks to link the library
# using -Wl,-lname, so that libtool does not consider it
# for duplicate removal.
case " $specialdeplibs " in
*" $deplib "*) new_libs="$deplib $new_libs" ;;
*)
case " $new_libs " in
*" $deplib "*) ;;
*) new_libs="$deplib $new_libs" ;;
esac
;;
esac
;;
esac
done
tmp_libs=
for deplib in $new_libs; do
case $deplib in
-L*)
case " $tmp_libs " in
*" $deplib "*) ;;
*) func_append tmp_libs " $deplib" ;;
esac
;;
*) func_append tmp_libs " $deplib" ;;
esac
done
eval $var=\"$tmp_libs\"
done # for var
fi
# Add Sun CC postdeps if required:
test CXX = "$tagname" && {
case $host_os in
linux*)
case `$CC -V 2>&1 | sed 5q` in
*Sun\ C*) # Sun C++ 5.9
func_suncc_cstd_abi
if test no != "$suncc_use_cstd_abi"; then
func_append postdeps ' -library=Cstd -library=Crun'
fi
;;
esac
;;
solaris*)
func_cc_basename "$CC"
case $func_cc_basename_result in
CC* | sunCC*)
func_suncc_cstd_abi
if test no != "$suncc_use_cstd_abi"; then
func_append postdeps ' -library=Cstd -library=Crun'
fi
;;
esac
;;
esac
}
# Last step: remove runtime libs from dependency_libs
# (they stay in deplibs)
tmp_libs=
for i in $dependency_libs; do
case " $predeps $postdeps $compiler_lib_search_path " in
*" $i "*)
i=
;;
esac
if test -n "$i"; then
func_append tmp_libs " $i"
fi
done
dependency_libs=$tmp_libs
done # for pass
if test prog = "$linkmode"; then
dlfiles=$newdlfiles
fi
if test prog = "$linkmode" || test lib = "$linkmode"; then
dlprefiles=$newdlprefiles
fi
case $linkmode in
oldlib)
if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
func_warning "'-dlopen' is ignored for archives"
fi
case " $deplibs" in
*\ -l* | *\ -L*)
func_warning "'-l' and '-L' are ignored for archives" ;;
esac
test -n "$rpath" && \
func_warning "'-rpath' is ignored for archives"
test -n "$xrpath" && \
func_warning "'-R' is ignored for archives"
test -n "$vinfo" && \
func_warning "'-version-info/-version-number' is ignored for archives"
test -n "$release" && \
func_warning "'-release' is ignored for archives"
test -n "$export_symbols$export_symbols_regex" && \
func_warning "'-export-symbols' is ignored for archives"
# Now set the variables for building old libraries.
build_libtool_libs=no
oldlibs=$output
func_append objs "$old_deplibs"
;;
lib)
# Make sure we only generate libraries of the form 'libNAME.la'.
case $outputname in
lib*)
func_stripname 'lib' '.la' "$outputname"
name=$func_stripname_result
eval shared_ext=\"$shrext_cmds\"
eval libname=\"$libname_spec\"
;;
*)
test no = "$module" \
&& func_fatal_help "libtool library '$output' must begin with 'lib'"
if test no != "$need_lib_prefix"; then
# Add the "lib" prefix for modules if required
func_stripname '' '.la' "$outputname"
name=$func_stripname_result
eval shared_ext=\"$shrext_cmds\"
eval libname=\"$libname_spec\"
else
func_stripname '' '.la' "$outputname"
libname=$func_stripname_result
fi
;;
esac
if test -n "$objs"; then
if test pass_all != "$deplibs_check_method"; then
func_fatal_error "cannot build libtool library '$output' from non-libtool objects on this host:$objs"
else
echo
$ECHO "*** Warning: Linking the shared library $output against the non-libtool"
$ECHO "*** objects $objs is not portable!"
func_append libobjs " $objs"
fi
fi
test no = "$dlself" \
|| func_warning "'-dlopen self' is ignored for libtool libraries"
set dummy $rpath
shift
test 1 -lt "$#" \
&& func_warning "ignoring multiple '-rpath's for a libtool library"
install_libdir=$1
oldlibs=
if test -z "$rpath"; then
if test yes = "$build_libtool_libs"; then
# Building a libtool convenience library.
# Some compilers have problems with a '.al' extension so
# convenience libraries should have the same extension an
# archive normally would.
oldlibs="$output_objdir/$libname.$libext $oldlibs"
build_libtool_libs=convenience
build_old_libs=yes
fi
test -n "$vinfo" && \
func_warning "'-version-info/-version-number' is ignored for convenience libraries"
test -n "$release" && \
func_warning "'-release' is ignored for convenience libraries"
else
# Parse the version information argument.
save_ifs=$IFS; IFS=:
set dummy $vinfo 0 0 0
shift
IFS=$save_ifs
test -n "$7" && \
func_fatal_help "too many parameters to '-version-info'"
# convert absolute version numbers to libtool ages
# this retains compatibility with .la files and attempts
# to make the code below a bit more comprehensible
case $vinfo_number in
yes)
number_major=$1
number_minor=$2
number_revision=$3
#
# There are really only two kinds -- those that
# use the current revision as the major version
# and those that subtract age and use age as
# a minor version. But, then there is irix
# that has an extra 1 added just for fun
#
case $version_type in
# correct linux to gnu/linux during the next big refactor
darwin|freebsd-elf|linux|osf|windows|none)
func_arith $number_major + $number_minor
current=$func_arith_result
age=$number_minor
revision=$number_revision
;;
freebsd-aout|qnx|sunos)
current=$number_major
revision=$number_minor
age=0
;;
irix|nonstopux)
func_arith $number_major + $number_minor
current=$func_arith_result
age=$number_minor
revision=$number_minor
lt_irix_increment=no
;;
esac
;;
no)
current=$1
revision=$2
age=$3
;;
esac
# Check that each of the things are valid numbers.
case $current in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
func_error "CURRENT '$current' must be a nonnegative integer"
func_fatal_error "'$vinfo' is not valid version information"
;;
esac
case $revision in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
func_error "REVISION '$revision' must be a nonnegative integer"
func_fatal_error "'$vinfo' is not valid version information"
;;
esac
case $age in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
func_error "AGE '$age' must be a nonnegative integer"
func_fatal_error "'$vinfo' is not valid version information"
;;
esac
if test "$age" -gt "$current"; then
func_error "AGE '$age' is greater than the current interface number '$current'"
func_fatal_error "'$vinfo' is not valid version information"
fi
# Calculate the version variables.
major=
versuffix=
verstring=
case $version_type in
none) ;;
darwin)
# Like Linux, but with the current version available in
# verstring for coding it into the library header
func_arith $current - $age
major=.$func_arith_result
versuffix=$major.$age.$revision
# Darwin ld doesn't like 0 for these options...
func_arith $current + 1
minor_current=$func_arith_result
xlcverstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision"
verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
# On Darwin other compilers
case $CC in
nagfor*)
verstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision"
;;
*)
verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
;;
esac
;;
freebsd-aout)
major=.$current
versuffix=.$current.$revision
;;
freebsd-elf)
func_arith $current - $age
major=.$func_arith_result
versuffix=$major.$age.$revision
;;
irix | nonstopux)
if test no = "$lt_irix_increment"; then
func_arith $current - $age
else
func_arith $current - $age + 1
fi
major=$func_arith_result
case $version_type in
nonstopux) verstring_prefix=nonstopux ;;
*) verstring_prefix=sgi ;;
esac
verstring=$verstring_prefix$major.$revision
# Add in all the interfaces that we are compatible with.
loop=$revision
while test 0 -ne "$loop"; do
func_arith $revision - $loop
iface=$func_arith_result
func_arith $loop - 1
loop=$func_arith_result
verstring=$verstring_prefix$major.$iface:$verstring
done
# Before this point, $major must not contain '.'.
major=.$major
versuffix=$major.$revision
;;
linux) # correct to gnu/linux during the next big refactor
func_arith $current - $age
major=.$func_arith_result
versuffix=$major.$age.$revision
;;
osf)
func_arith $current - $age
major=.$func_arith_result
versuffix=.$current.$age.$revision
verstring=$current.$age.$revision
# Add in all the interfaces that we are compatible with.
loop=$age
while test 0 -ne "$loop"; do
func_arith $current - $loop
iface=$func_arith_result
func_arith $loop - 1
loop=$func_arith_result
verstring=$verstring:$iface.0
done
# Make executables depend on our current version.
func_append verstring ":$current.0"
;;
qnx)
major=.$current
versuffix=.$current
;;
sco)
major=.$current
versuffix=.$current
;;
sunos)
major=.$current
versuffix=.$current.$revision
;;
windows)
# Use '-' rather than '.', since we only want one
# extension on DOS 8.3 file systems.
func_arith $current - $age
major=$func_arith_result
versuffix=-$major
;;
*)
func_fatal_configuration "unknown library version type '$version_type'"
;;
esac
# Clear the version info if we defaulted, and they specified a release.
if test -z "$vinfo" && test -n "$release"; then
major=
case $version_type in
darwin)
# we can't check for "0.0" in archive_cmds due to quoting
# problems, so we reset it completely
verstring=
;;
*)
verstring=0.0
;;
esac
if test no = "$need_version"; then
versuffix=
else
versuffix=.0.0
fi
fi
# Remove version info from name if versioning should be avoided
if test yes,no = "$avoid_version,$need_version"; then
major=
versuffix=
verstring=
fi
# Check to see if the archive will have undefined symbols.
if test yes = "$allow_undefined"; then
if test unsupported = "$allow_undefined_flag"; then
if test yes = "$build_old_libs"; then
func_warning "undefined symbols not allowed in $host shared libraries; building static only"
build_libtool_libs=no
else
func_fatal_error "can't build $host shared library unless -no-undefined is specified"
fi
fi
else
# Don't allow undefined symbols.
allow_undefined_flag=$no_undefined_flag
fi
fi
func_generate_dlsyms "$libname" "$libname" :
func_append libobjs " $symfileobj"
test " " = "$libobjs" && libobjs=
if test relink != "$opt_mode"; then
# Remove our outputs, but don't remove object files since they
# may have been created when compiling PIC objects.
removelist=
tempremovelist=`$ECHO "$output_objdir/*"`
for p in $tempremovelist; do
case $p in
*.$objext | *.gcno)
;;
$output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/$libname$release.*)
if test -n "$precious_files_regex"; then
if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
then
continue
fi
fi
func_append removelist " $p"
;;
*) ;;
esac
done
test -n "$removelist" && \
func_show_eval "${RM}r \$removelist"
fi
# Now set the variables for building old libraries.
if test yes = "$build_old_libs" && test convenience != "$build_libtool_libs"; then
func_append oldlibs " $output_objdir/$libname.$libext"
# Transform .lo files to .o files.
oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; $lo2o" | $NL2SP`
fi
# Eliminate all temporary directories.
#for path in $notinst_path; do
# lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"`
# deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"`
# dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"`
#done
if test -n "$xrpath"; then
# If the user specified any rpath flags, then add them.
temp_xrpath=
for libdir in $xrpath; do
func_replace_sysroot "$libdir"
func_append temp_xrpath " -R$func_replace_sysroot_result"
case "$finalize_rpath " in
*" $libdir "*) ;;
*) func_append finalize_rpath " $libdir" ;;
esac
done
if test yes != "$hardcode_into_libs" || test yes = "$build_old_libs"; then
dependency_libs="$temp_xrpath $dependency_libs"
fi
fi
# Make sure dlfiles contains only unique files that won't be dlpreopened
old_dlfiles=$dlfiles
dlfiles=
for lib in $old_dlfiles; do
case " $dlprefiles $dlfiles " in
*" $lib "*) ;;
*) func_append dlfiles " $lib" ;;
esac
done
# Make sure dlprefiles contains only unique files
old_dlprefiles=$dlprefiles
dlprefiles=
for lib in $old_dlprefiles; do
case "$dlprefiles " in
*" $lib "*) ;;
*) func_append dlprefiles " $lib" ;;
esac
done
if test yes = "$build_libtool_libs"; then
if test -n "$rpath"; then
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*)
# these systems don't actually have a c library (as such)!
;;
*-*-rhapsody* | *-*-darwin1.[012])
# Rhapsody C library is in the System framework
func_append deplibs " System.ltframework"
;;
*-*-netbsd*)
# Don't link with libc until the a.out ld.so is fixed.
;;
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
# Do not include libc due to us having libc/libc_r.
;;
*-*-sco3.2v5* | *-*-sco5v6*)
# Causes problems with __ctype
;;
*-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
# Compiler inserts libc in the correct place for threads to work
;;
*)
# Add libc to deplibs on all other systems if necessary.
if test yes = "$build_libtool_need_lc"; then
func_append deplibs " -lc"
fi
;;
esac
fi
# Transform deplibs into only deplibs that can be linked in shared.
name_save=$name
libname_save=$libname
release_save=$release
versuffix_save=$versuffix
major_save=$major
# I'm not sure if I'm treating the release correctly. I think
# release should show up in the -l (ie -lgmp5) so we don't want to
# add it in twice. Is that correct?
release=
versuffix=
major=
newdeplibs=
droppeddeps=no
case $deplibs_check_method in
pass_all)
# Don't check for shared/static. Everything works.
# This might be a little naive. We might want to check
# whether the library exists or not. But this is on
# osf3 & osf4 and I'm not really sure... Just
# implementing what was already the behavior.
newdeplibs=$deplibs
;;
test_compile)
# This code stresses the "libraries are programs" paradigm to its
# limits. Maybe even breaks it. We compile a program, linking it
# against the deplibs as a proxy for the library. Then we can check
# whether they linked in statically or dynamically with ldd.
$opt_dry_run || $RM conftest.c
cat > conftest.c </dev/null`
$nocaseglob
else
potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null`
fi
for potent_lib in $potential_libs; do
# Follow soft links.
if ls -lLd "$potent_lib" 2>/dev/null |
$GREP " -> " >/dev/null; then
continue
fi
# The statement above tries to avoid entering an
# endless loop below, in case of cyclic links.
# We might still enter an endless loop, since a link
# loop can be closed while we follow links,
# but so what?
potlib=$potent_lib
while test -h "$potlib" 2>/dev/null; do
potliblink=`ls -ld $potlib | $SED 's/.* -> //'`
case $potliblink in
[\\/]* | [A-Za-z]:[\\/]*) potlib=$potliblink;;
*) potlib=`$ECHO "$potlib" | $SED 's|[^/]*$||'`"$potliblink";;
esac
done
if eval $file_magic_cmd \"\$potlib\" 2>/dev/null |
$SED -e 10q |
$EGREP "$file_magic_regex" > /dev/null; then
func_append newdeplibs " $a_deplib"
a_deplib=
break 2
fi
done
done
fi
if test -n "$a_deplib"; then
droppeddeps=yes
echo
$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have"
echo "*** because I did check the linker path looking for a file starting"
if test -z "$potlib"; then
$ECHO "*** with $libname but no candidates were found. (...for file magic test)"
else
$ECHO "*** with $libname and none of the candidates passed a file format test"
$ECHO "*** using a file magic. Last file checked: $potlib"
fi
fi
;;
*)
# Add a -L argument.
func_append newdeplibs " $a_deplib"
;;
esac
done # Gone through all deplibs.
;;
match_pattern*)
set dummy $deplibs_check_method; shift
match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"`
for a_deplib in $deplibs; do
case $a_deplib in
-l*)
func_stripname -l '' "$a_deplib"
name=$func_stripname_result
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
case " $predeps $postdeps " in
*" $a_deplib "*)
func_append newdeplibs " $a_deplib"
a_deplib=
;;
esac
fi
if test -n "$a_deplib"; then
libname=`eval "\\$ECHO \"$libname_spec\""`
for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
for potent_lib in $potential_libs; do
potlib=$potent_lib # see symlink-check above in file_magic test
if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \
$EGREP "$match_pattern_regex" > /dev/null; then
func_append newdeplibs " $a_deplib"
a_deplib=
break 2
fi
done
done
fi
if test -n "$a_deplib"; then
droppeddeps=yes
echo
$ECHO "*** Warning: linker path does not have real file for library $a_deplib."
echo "*** I have the capability to make that library automatically link in when"
echo "*** you link to this library. But I can only do this if you have a"
echo "*** shared version of the library, which you do not appear to have"
echo "*** because I did check the linker path looking for a file starting"
if test -z "$potlib"; then
$ECHO "*** with $libname but no candidates were found. (...for regex pattern test)"
else
$ECHO "*** with $libname and none of the candidates passed a file format test"
$ECHO "*** using a regex pattern. Last file checked: $potlib"
fi
fi
;;
*)
# Add a -L argument.
func_append newdeplibs " $a_deplib"
;;
esac
done # Gone through all deplibs.
;;
none | unknown | *)
newdeplibs=
tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'`
if test yes = "$allow_libtool_libs_with_static_runtimes"; then
for i in $predeps $postdeps; do
# can't use Xsed below, because $i might contain '/'
tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s|$i||"`
done
fi
case $tmp_deplibs in
*[!\ \ ]*)
echo
if test none = "$deplibs_check_method"; then
echo "*** Warning: inter-library dependencies are not supported in this platform."
else
echo "*** Warning: inter-library dependencies are not known to be supported."
fi
echo "*** All declared inter-library dependencies are being dropped."
droppeddeps=yes
;;
esac
;;
esac
versuffix=$versuffix_save
major=$major_save
release=$release_save
libname=$libname_save
name=$name_save
case $host in
*-*-rhapsody* | *-*-darwin1.[012])
# On Rhapsody replace the C library with the System framework
newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'`
;;
esac
if test yes = "$droppeddeps"; then
if test yes = "$module"; then
echo
echo "*** Warning: libtool could not satisfy all declared inter-library"
$ECHO "*** dependencies of module $libname. Therefore, libtool will create"
echo "*** a static module, that should work as long as the dlopening"
echo "*** application is linked with the -dlopen flag."
if test -z "$global_symbol_pipe"; then
echo
echo "*** However, this would only work if libtool was able to extract symbol"
echo "*** lists from a program, using 'nm' or equivalent, but libtool could"
echo "*** not find such a program. So, this module is probably useless."
echo "*** 'nm' from GNU binutils and a full rebuild may help."
fi
if test no = "$build_old_libs"; then
oldlibs=$output_objdir/$libname.$libext
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
else
echo "*** The inter-library dependencies that have been dropped here will be"
echo "*** automatically added whenever a program is linked with this library"
echo "*** or is declared to -dlopen it."
if test no = "$allow_undefined"; then
echo
echo "*** Since this library must not contain undefined symbols,"
echo "*** because either the platform does not support them or"
echo "*** it was explicitly requested with -no-undefined,"
echo "*** libtool will only create a static version of it."
if test no = "$build_old_libs"; then
oldlibs=$output_objdir/$libname.$libext
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
fi
fi
fi
# Done checking deplibs!
deplibs=$newdeplibs
fi
# Time to change all our "foo.ltframework" stuff back to "-framework foo"
case $host in
*-*-darwin*)
newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
;;
esac
# move library search paths that coincide with paths to not yet
# installed libraries to the beginning of the library search list
new_libs=
for path in $notinst_path; do
case " $new_libs " in
*" -L$path/$objdir "*) ;;
*)
case " $deplibs " in
*" -L$path/$objdir "*)
func_append new_libs " -L$path/$objdir" ;;
esac
;;
esac
done
for deplib in $deplibs; do
case $deplib in
-L*)
case " $new_libs " in
*" $deplib "*) ;;
*) func_append new_libs " $deplib" ;;
esac
;;
*) func_append new_libs " $deplib" ;;
esac
done
deplibs=$new_libs
# All the library-specific variables (install_libdir is set above).
library_names=
old_library=
dlname=
# Test again, we may have decided not to build it any more
if test yes = "$build_libtool_libs"; then
# Remove $wl instances when linking with ld.
# FIXME: should test the right _cmds variable.
case $archive_cmds in
*\$LD\ *) wl= ;;
esac
if test yes = "$hardcode_into_libs"; then
# Hardcode the library paths
hardcode_libdirs=
dep_rpath=
rpath=$finalize_rpath
test relink = "$opt_mode" || rpath=$compile_rpath$rpath
for libdir in $rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
func_replace_sysroot "$libdir"
libdir=$func_replace_sysroot_result
if test -z "$hardcode_libdirs"; then
hardcode_libdirs=$libdir
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
func_append dep_rpath " $flag"
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
*" $libdir "*) ;;
*) func_append perm_rpath " $libdir" ;;
esac
fi
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir=$hardcode_libdirs
eval "dep_rpath=\"$hardcode_libdir_flag_spec\""
fi
if test -n "$runpath_var" && test -n "$perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $perm_rpath; do
func_append rpath "$dir:"
done
eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
fi
test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
fi
shlibpath=$finalize_shlibpath
test relink = "$opt_mode" || shlibpath=$compile_shlibpath$shlibpath
if test -n "$shlibpath"; then
eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
fi
# Get the real and link names of the library.
eval shared_ext=\"$shrext_cmds\"
eval library_names=\"$library_names_spec\"
set dummy $library_names
shift
realname=$1
shift
if test -n "$soname_spec"; then
eval soname=\"$soname_spec\"
else
soname=$realname
fi
if test -z "$dlname"; then
dlname=$soname
fi
lib=$output_objdir/$realname
linknames=
for link
do
func_append linknames " $link"
done
# Use standard objects if they are pic
test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP`
test "X$libobjs" = "X " && libobjs=
delfiles=
if test -n "$export_symbols" && test -n "$include_expsyms"; then
$opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp"
export_symbols=$output_objdir/$libname.uexp
func_append delfiles " $export_symbols"
fi
orig_export_symbols=
case $host_os in
cygwin* | mingw* | cegcc*)
if test -n "$export_symbols" && test -z "$export_symbols_regex"; then
# exporting using user supplied symfile
func_dll_def_p "$export_symbols" || {
# and it's NOT already a .def file. Must figure out
# which of the given symbols are data symbols and tag
# them as such. So, trigger use of export_symbols_cmds.
# export_symbols gets reassigned inside the "prepare
# the list of exported symbols" if statement, so the
# include_expsyms logic still works.
orig_export_symbols=$export_symbols
export_symbols=
always_export_symbols=yes
}
fi
;;
esac
# Prepare the list of exported symbols
if test -z "$export_symbols"; then
if test yes = "$always_export_symbols" || test -n "$export_symbols_regex"; then
func_verbose "generating symbol list for '$libname.la'"
export_symbols=$output_objdir/$libname.exp
$opt_dry_run || $RM $export_symbols
cmds=$export_symbols_cmds
save_ifs=$IFS; IFS='~'
for cmd1 in $cmds; do
IFS=$save_ifs
# Take the normal branch if the nm_file_list_spec branch
# doesn't work or if tool conversion is not needed.
case $nm_file_list_spec~$to_tool_file_cmd in
*~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*)
try_normal_branch=yes
eval cmd=\"$cmd1\"
func_len " $cmd"
len=$func_len_result
;;
*)
try_normal_branch=no
;;
esac
if test yes = "$try_normal_branch" \
&& { test "$len" -lt "$max_cmd_len" \
|| test "$max_cmd_len" -le -1; }
then
func_show_eval "$cmd" 'exit $?'
skipped_export=false
elif test -n "$nm_file_list_spec"; then
func_basename "$output"
output_la=$func_basename_result
save_libobjs=$libobjs
save_output=$output
output=$output_objdir/$output_la.nm
func_to_tool_file "$output"
libobjs=$nm_file_list_spec$func_to_tool_file_result
func_append delfiles " $output"
func_verbose "creating $NM input file list: $output"
for obj in $save_libobjs; do
func_to_tool_file "$obj"
$ECHO "$func_to_tool_file_result"
done > "$output"
eval cmd=\"$cmd1\"
func_show_eval "$cmd" 'exit $?'
output=$save_output
libobjs=$save_libobjs
skipped_export=false
else
# The command line is too long to execute in one step.
func_verbose "using reloadable object file for export list..."
skipped_export=:
# Break out early, otherwise skipped_export may be
# set to false by a later but shorter cmd.
break
fi
done
IFS=$save_ifs
if test -n "$export_symbols_regex" && test : != "$skipped_export"; then
func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
fi
fi
fi
if test -n "$export_symbols" && test -n "$include_expsyms"; then
tmp_export_symbols=$export_symbols
test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols
$opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
fi
if test : != "$skipped_export" && test -n "$orig_export_symbols"; then
# The given exports_symbols file has to be filtered, so filter it.
func_verbose "filter symbol list for '$libname.la' to tag DATA exports"
# FIXME: $output_objdir/$libname.filter potentially contains lots of
# 's' commands, which not all seds can handle. GNU sed should be fine
# though. Also, the filter scales superlinearly with the number of
# global variables. join(1) would be nice here, but unfortunately
# isn't a blessed tool.
$opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
func_append delfiles " $export_symbols $output_objdir/$libname.filter"
export_symbols=$output_objdir/$libname.def
$opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
fi
tmp_deplibs=
for test_deplib in $deplibs; do
case " $convenience " in
*" $test_deplib "*) ;;
*)
func_append tmp_deplibs " $test_deplib"
;;
esac
done
deplibs=$tmp_deplibs
if test -n "$convenience"; then
if test -n "$whole_archive_flag_spec" &&
test yes = "$compiler_needs_object" &&
test -z "$libobjs"; then
# extract the archives, so we have objects to list.
# TODO: could optimize this to just extract one archive.
whole_archive_flag_spec=
fi
if test -n "$whole_archive_flag_spec"; then
save_libobjs=$libobjs
eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
test "X$libobjs" = "X " && libobjs=
else
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_extract_archives $gentop $convenience
func_append libobjs " $func_extract_archives_result"
test "X$libobjs" = "X " && libobjs=
fi
fi
if test yes = "$thread_safe" && test -n "$thread_safe_flag_spec"; then
eval flag=\"$thread_safe_flag_spec\"
func_append linker_flags " $flag"
fi
# Make a backup of the uninstalled library when relinking
if test relink = "$opt_mode"; then
$opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $?
fi
# Do each of the archive commands.
if test yes = "$module" && test -n "$module_cmds"; then
if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
eval test_cmds=\"$module_expsym_cmds\"
cmds=$module_expsym_cmds
else
eval test_cmds=\"$module_cmds\"
cmds=$module_cmds
fi
else
if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
eval test_cmds=\"$archive_expsym_cmds\"
cmds=$archive_expsym_cmds
else
eval test_cmds=\"$archive_cmds\"
cmds=$archive_cmds
fi
fi
if test : != "$skipped_export" &&
func_len " $test_cmds" &&
len=$func_len_result &&
test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
:
else
# The command line is too long to link in one step, link piecewise
# or, if using GNU ld and skipped_export is not :, use a linker
# script.
# Save the value of $output and $libobjs because we want to
# use them later. If we have whole_archive_flag_spec, we
# want to use save_libobjs as it was before
# whole_archive_flag_spec was expanded, because we can't
# assume the linker understands whole_archive_flag_spec.
# This may have to be revisited, in case too many
# convenience libraries get linked in and end up exceeding
# the spec.
if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
save_libobjs=$libobjs
fi
save_output=$output
func_basename "$output"
output_la=$func_basename_result
# Clear the reloadable object creation command queue and
# initialize k to one.
test_cmds=
concat_cmds=
objlist=
last_robj=
k=1
if test -n "$save_libobjs" && test : != "$skipped_export" && test yes = "$with_gnu_ld"; then
output=$output_objdir/$output_la.lnkscript
func_verbose "creating GNU ld script: $output"
echo 'INPUT (' > $output
for obj in $save_libobjs
do
func_to_tool_file "$obj"
$ECHO "$func_to_tool_file_result" >> $output
done
echo ')' >> $output
func_append delfiles " $output"
func_to_tool_file "$output"
output=$func_to_tool_file_result
elif test -n "$save_libobjs" && test : != "$skipped_export" && test -n "$file_list_spec"; then
output=$output_objdir/$output_la.lnk
func_verbose "creating linker input file list: $output"
: > $output
set x $save_libobjs
shift
firstobj=
if test yes = "$compiler_needs_object"; then
firstobj="$1 "
shift
fi
for obj
do
func_to_tool_file "$obj"
$ECHO "$func_to_tool_file_result" >> $output
done
func_append delfiles " $output"
func_to_tool_file "$output"
output=$firstobj\"$file_list_spec$func_to_tool_file_result\"
else
if test -n "$save_libobjs"; then
func_verbose "creating reloadable object files..."
output=$output_objdir/$output_la-$k.$objext
eval test_cmds=\"$reload_cmds\"
func_len " $test_cmds"
len0=$func_len_result
len=$len0
# Loop over the list of objects to be linked.
for obj in $save_libobjs
do
func_len " $obj"
func_arith $len + $func_len_result
len=$func_arith_result
if test -z "$objlist" ||
test "$len" -lt "$max_cmd_len"; then
func_append objlist " $obj"
else
# The command $test_cmds is almost too long, add a
# command to the queue.
if test 1 -eq "$k"; then
# The first file doesn't have a previous command to add.
reload_objs=$objlist
eval concat_cmds=\"$reload_cmds\"
else
# All subsequent reloadable object files will link in
# the last one created.
reload_objs="$objlist $last_robj"
eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"
fi
last_robj=$output_objdir/$output_la-$k.$objext
func_arith $k + 1
k=$func_arith_result
output=$output_objdir/$output_la-$k.$objext
objlist=" $obj"
func_len " $last_robj"
func_arith $len0 + $func_len_result
len=$func_arith_result
fi
done
# Handle the remaining objects by creating one last
# reloadable object file. All subsequent reloadable object
# files will link in the last one created.
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
reload_objs="$objlist $last_robj"
eval concat_cmds=\"\$concat_cmds$reload_cmds\"
if test -n "$last_robj"; then
eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
fi
func_append delfiles " $output"
else
output=
fi
${skipped_export-false} && {
func_verbose "generating symbol list for '$libname.la'"
export_symbols=$output_objdir/$libname.exp
$opt_dry_run || $RM $export_symbols
libobjs=$output
# Append the command to create the export file.
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\"
if test -n "$last_robj"; then
eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\"
fi
}
test -n "$save_libobjs" &&
func_verbose "creating a temporary reloadable object file: $output"
# Loop through the commands generated above and execute them.
save_ifs=$IFS; IFS='~'
for cmd in $concat_cmds; do
IFS=$save_ifs
$opt_quiet || {
func_quote_for_expand "$cmd"
eval "func_echo $func_quote_for_expand_result"
}
$opt_dry_run || eval "$cmd" || {
lt_exit=$?
# Restore the uninstalled library and exit
if test relink = "$opt_mode"; then
( cd "$output_objdir" && \
$RM "${realname}T" && \
$MV "${realname}U" "$realname" )
fi
exit $lt_exit
}
done
IFS=$save_ifs
if test -n "$export_symbols_regex" && ${skipped_export-false}; then
func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
func_show_eval '$MV "${export_symbols}T" "$export_symbols"'
fi
fi
${skipped_export-false} && {
if test -n "$export_symbols" && test -n "$include_expsyms"; then
tmp_export_symbols=$export_symbols
test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols
$opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"'
fi
if test -n "$orig_export_symbols"; then
# The given exports_symbols file has to be filtered, so filter it.
func_verbose "filter symbol list for '$libname.la' to tag DATA exports"
# FIXME: $output_objdir/$libname.filter potentially contains lots of
# 's' commands, which not all seds can handle. GNU sed should be fine
# though. Also, the filter scales superlinearly with the number of
# global variables. join(1) would be nice here, but unfortunately
# isn't a blessed tool.
$opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter
func_append delfiles " $export_symbols $output_objdir/$libname.filter"
export_symbols=$output_objdir/$libname.def
$opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols
fi
}
libobjs=$output
# Restore the value of output.
output=$save_output
if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
test "X$libobjs" = "X " && libobjs=
fi
# Expand the library linking commands again to reset the
# value of $libobjs for piecewise linking.
# Do each of the archive commands.
if test yes = "$module" && test -n "$module_cmds"; then
if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
cmds=$module_expsym_cmds
else
cmds=$module_cmds
fi
else
if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
cmds=$archive_expsym_cmds
else
cmds=$archive_cmds
fi
fi
fi
if test -n "$delfiles"; then
# Append the command to remove temporary files to $cmds.
eval cmds=\"\$cmds~\$RM $delfiles\"
fi
# Add any objects from preloaded convenience libraries
if test -n "$dlprefiles"; then
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_extract_archives $gentop $dlprefiles
func_append libobjs " $func_extract_archives_result"
test "X$libobjs" = "X " && libobjs=
fi
# A bit hacky. I had wanted to add \$as_needed_flag to archive_cmds instead, but that
# comes from libtool.m4 which is part of the project being built. This should put it
# in the right place though.
if test lib,link = "$linkmode,$pass" && test -n "$as_needed_flag"; then
libobjs=$as_needed_flag$libobjs
fi
save_ifs=$IFS; IFS='~'
for cmd in $cmds; do
IFS=$sp$nl
eval cmd=\"$cmd\"
IFS=$save_ifs
$opt_quiet || {
func_quote_for_expand "$cmd"
eval "func_echo $func_quote_for_expand_result"
}
$opt_dry_run || eval "$cmd" || {
lt_exit=$?
# Restore the uninstalled library and exit
if test relink = "$opt_mode"; then
( cd "$output_objdir" && \
$RM "${realname}T" && \
$MV "${realname}U" "$realname" )
fi
exit $lt_exit
}
done
IFS=$save_ifs
# Restore the uninstalled library and exit
if test relink = "$opt_mode"; then
$opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $?
if test -n "$convenience"; then
if test -z "$whole_archive_flag_spec"; then
func_show_eval '${RM}r "$gentop"'
fi
fi
exit $EXIT_SUCCESS
fi
# Create links to the real library.
for linkname in $linknames; do
if test "$realname" != "$linkname"; then
func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?'
fi
done
# If -module or -export-dynamic was specified, set the dlname.
if test yes = "$module" || test yes = "$export_dynamic"; then
# On all known operating systems, these are identical.
dlname=$soname
fi
fi
;;
obj)
if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then
func_warning "'-dlopen' is ignored for objects"
fi
case " $deplibs" in
*\ -l* | *\ -L*)
func_warning "'-l' and '-L' are ignored for objects" ;;
esac
test -n "$rpath" && \
func_warning "'-rpath' is ignored for objects"
test -n "$xrpath" && \
func_warning "'-R' is ignored for objects"
test -n "$vinfo" && \
func_warning "'-version-info' is ignored for objects"
test -n "$release" && \
func_warning "'-release' is ignored for objects"
case $output in
*.lo)
test -n "$objs$old_deplibs" && \
func_fatal_error "cannot build library object '$output' from non-libtool objects"
libobj=$output
func_lo2o "$libobj"
obj=$func_lo2o_result
;;
*)
libobj=
obj=$output
;;
esac
# Delete the old objects.
$opt_dry_run || $RM $obj $libobj
# Objects from convenience libraries. This assumes
# single-version convenience libraries. Whenever we create
# different ones for PIC/non-PIC, this we'll have to duplicate
# the extraction.
reload_conv_objs=
gentop=
# if reload_cmds runs $LD directly, get rid of -Wl from
# whole_archive_flag_spec and hope we can get by with turning comma
# into space.
case $reload_cmds in
*\$LD[\ \$]*) wl= ;;
esac
if test -n "$convenience"; then
if test -n "$whole_archive_flag_spec"; then
eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\"
test -n "$wl" || tmp_whole_archive_flags=`$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'`
reload_conv_objs=$reload_objs\ $tmp_whole_archive_flags
else
gentop=$output_objdir/${obj}x
func_append generated " $gentop"
func_extract_archives $gentop $convenience
reload_conv_objs="$reload_objs $func_extract_archives_result"
fi
fi
# If we're not building shared, we need to use non_pic_objs
test yes = "$build_libtool_libs" || libobjs=$non_pic_objects
# Create the old-style object.
reload_objs=$objs$old_deplibs' '`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; /\.lib$/d; $lo2o" | $NL2SP`' '$reload_conv_objs
output=$obj
func_execute_cmds "$reload_cmds" 'exit $?'
# Exit if we aren't doing a library object file.
if test -z "$libobj"; then
if test -n "$gentop"; then
func_show_eval '${RM}r "$gentop"'
fi
exit $EXIT_SUCCESS
fi
test yes = "$build_libtool_libs" || {
if test -n "$gentop"; then
func_show_eval '${RM}r "$gentop"'
fi
# Create an invalid libtool object if no PIC, so that we don't
# accidentally link it into a program.
# $show "echo timestamp > $libobj"
# $opt_dry_run || eval "echo timestamp > $libobj" || exit $?
exit $EXIT_SUCCESS
}
if test -n "$pic_flag" || test default != "$pic_mode"; then
# Only do commands if we really have different PIC objects.
reload_objs="$libobjs $reload_conv_objs"
output=$libobj
func_execute_cmds "$reload_cmds" 'exit $?'
fi
if test -n "$gentop"; then
func_show_eval '${RM}r "$gentop"'
fi
exit $EXIT_SUCCESS
;;
prog)
case $host in
*cygwin*) func_stripname '' '.exe' "$output"
output=$func_stripname_result.exe;;
esac
test -n "$vinfo" && \
func_warning "'-version-info' is ignored for programs"
test -n "$release" && \
func_warning "'-release' is ignored for programs"
$preload \
&& test unknown,unknown,unknown = "$dlopen_support,$dlopen_self,$dlopen_self_static" \
&& func_warning "'LT_INIT([dlopen])' not used. Assuming no dlopen support."
case $host in
*-*-rhapsody* | *-*-darwin1.[012])
# On Rhapsody replace the C library is the System framework
compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'`
finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'`
;;
esac
case $host in
*-*-darwin*)
# Don't allow lazy linking, it breaks C++ global constructors
# But is supposedly fixed on 10.4 or later (yay!).
if test CXX = "$tagname"; then
case ${MACOSX_DEPLOYMENT_TARGET-10.0} in
10.[0123])
func_append compile_command " $wl-bind_at_load"
func_append finalize_command " $wl-bind_at_load"
;;
esac
fi
# Time to change all our "foo.ltframework" stuff back to "-framework foo"
compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'`
;;
esac
# move library search paths that coincide with paths to not yet
# installed libraries to the beginning of the library search list
new_libs=
for path in $notinst_path; do
case " $new_libs " in
*" -L$path/$objdir "*) ;;
*)
case " $compile_deplibs " in
*" -L$path/$objdir "*)
func_append new_libs " -L$path/$objdir" ;;
esac
;;
esac
done
for deplib in $compile_deplibs; do
case $deplib in
-L*)
case " $new_libs " in
*" $deplib "*) ;;
*) func_append new_libs " $deplib" ;;
esac
;;
*) func_append new_libs " $deplib" ;;
esac
done
compile_deplibs=$new_libs
func_append compile_command " $as_needed_flag $compile_deplibs"
func_append finalize_command " $as_needed_flag $finalize_deplibs"
if test -n "$rpath$xrpath"; then
# If the user specified any rpath flags, then add them.
for libdir in $rpath $xrpath; do
# This is the magic to use -rpath.
case "$finalize_rpath " in
*" $libdir "*) ;;
*) func_append finalize_rpath " $libdir" ;;
esac
done
fi
# Now hardcode the library paths
rpath=
hardcode_libdirs=
for libdir in $compile_rpath $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs=$libdir
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
func_append rpath " $flag"
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
*" $libdir "*) ;;
*) func_append perm_rpath " $libdir" ;;
esac
fi
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*)
testbindir=`$ECHO "$libdir" | $SED -e 's*/lib$*/bin*'`
case :$dllsearchpath: in
*":$libdir:"*) ;;
::) dllsearchpath=$libdir;;
*) func_append dllsearchpath ":$libdir";;
esac
case :$dllsearchpath: in
*":$testbindir:"*) ;;
::) dllsearchpath=$testbindir;;
*) func_append dllsearchpath ":$testbindir";;
esac
;;
esac
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir=$hardcode_libdirs
eval rpath=\" $hardcode_libdir_flag_spec\"
fi
compile_rpath=$rpath
rpath=
hardcode_libdirs=
for libdir in $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs=$libdir
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
func_append hardcode_libdirs "$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
func_append rpath " $flag"
fi
elif test -n "$runpath_var"; then
case "$finalize_perm_rpath " in
*" $libdir "*) ;;
*) func_append finalize_perm_rpath " $libdir" ;;
esac
fi
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir=$hardcode_libdirs
eval rpath=\" $hardcode_libdir_flag_spec\"
fi
finalize_rpath=$rpath
if test -n "$libobjs" && test yes = "$build_old_libs"; then
# Transform all the library objects into standard objects.
compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP`
fi
func_generate_dlsyms "$outputname" "@PROGRAM@" false
# template prelinking step
if test -n "$prelink_cmds"; then
func_execute_cmds "$prelink_cmds" 'exit $?'
fi
wrappers_required=:
case $host in
*cegcc* | *mingw32ce*)
# Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway.
wrappers_required=false
;;
*cygwin* | *mingw* )
test yes = "$build_libtool_libs" || wrappers_required=false
;;
*)
if test no = "$need_relink" || test yes != "$build_libtool_libs"; then
wrappers_required=false
fi
;;
esac
$wrappers_required || {
# Replace the output file specification.
compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
link_command=$compile_command$compile_rpath
# We have no uninstalled library dependencies, so finalize right now.
exit_status=0
func_show_eval "$link_command" 'exit_status=$?'
if test -n "$postlink_cmds"; then
func_to_tool_file "$output"
postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
func_execute_cmds "$postlink_cmds" 'exit $?'
fi
# Delete the generated files.
if test -f "$output_objdir/${outputname}S.$objext"; then
func_show_eval '$RM "$output_objdir/${outputname}S.$objext"'
fi
exit $exit_status
}
if test -n "$compile_shlibpath$finalize_shlibpath"; then
compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
fi
if test -n "$finalize_shlibpath"; then
finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
fi
compile_var=
finalize_var=
if test -n "$runpath_var"; then
if test -n "$perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $perm_rpath; do
func_append rpath "$dir:"
done
compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
fi
if test -n "$finalize_perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $finalize_perm_rpath; do
func_append rpath "$dir:"
done
finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
fi
fi
if test yes = "$no_install"; then
# We don't need to create a wrapper script.
link_command=$compile_var$compile_command$compile_rpath
# Replace the output file specification.
link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'`
# Delete the old output file.
$opt_dry_run || $RM $output
# Link the executable and exit
func_show_eval "$link_command" 'exit $?'
if test -n "$postlink_cmds"; then
func_to_tool_file "$output"
postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
func_execute_cmds "$postlink_cmds" 'exit $?'
fi
exit $EXIT_SUCCESS
fi
case $hardcode_action,$fast_install in
relink,*)
# Fast installation is not supported
link_command=$compile_var$compile_command$compile_rpath
relink_command=$finalize_var$finalize_command$finalize_rpath
func_warning "this platform does not like uninstalled shared libraries"
func_warning "'$output' will be relinked during installation"
;;
*,yes)
link_command=$finalize_var$compile_command$finalize_rpath
relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'`
;;
*,no)
link_command=$compile_var$compile_command$compile_rpath
relink_command=$finalize_var$finalize_command$finalize_rpath
;;
*,needless)
link_command=$finalize_var$compile_command$finalize_rpath
relink_command=
;;
esac
# Replace the output file specification.
link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
# Delete the old output files.
$opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname
func_show_eval "$link_command" 'exit $?'
if test -n "$postlink_cmds"; then
func_to_tool_file "$output_objdir/$outputname"
postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'`
func_execute_cmds "$postlink_cmds" 'exit $?'
fi
# Now create the wrapper script.
func_verbose "creating $output"
# Quote the relink command for shipping.
if test -n "$relink_command"; then
# Preserve any variables that may affect compiler behavior
for var in $variables_saved_for_relink; do
if eval test -z \"\${$var+set}\"; then
relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
elif eval var_value=\$$var; test -z "$var_value"; then
relink_command="$var=; export $var; $relink_command"
else
func_quote_for_eval "$var_value"
relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
fi
done
relink_command="(cd `pwd`; $relink_command)"
relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
fi
# Only actually do things if not in dry run mode.
$opt_dry_run || {
# win32 will think the script is a binary if it has
# a .exe suffix, so we strip it off here.
case $output in
*.exe) func_stripname '' '.exe' "$output"
output=$func_stripname_result ;;
esac
# test for cygwin because mv fails w/o .exe extensions
case $host in
*cygwin*)
exeext=.exe
func_stripname '' '.exe' "$outputname"
outputname=$func_stripname_result ;;
*) exeext= ;;
esac
case $host in
*cygwin* | *mingw* )
func_dirname_and_basename "$output" "" "."
output_name=$func_basename_result
output_path=$func_dirname_result
cwrappersource=$output_path/$objdir/lt-$output_name.c
cwrapper=$output_path/$output_name.exe
$RM $cwrappersource $cwrapper
trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
func_emit_cwrapperexe_src > $cwrappersource
# The wrapper executable is built using the $host compiler,
# because it contains $host paths and files. If cross-
# compiling, it, like the target executable, must be
# executed on the $host or under an emulation environment.
$opt_dry_run || {
$LTCC $LTCFLAGS -o $cwrapper $cwrappersource
$STRIP $cwrapper
}
# Now, create the wrapper script for func_source use:
func_ltwrapper_scriptname $cwrapper
$RM $func_ltwrapper_scriptname_result
trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15
$opt_dry_run || {
# note: this script will not be executed, so do not chmod.
if test "x$build" = "x$host"; then
$cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result
else
func_emit_wrapper no > $func_ltwrapper_scriptname_result
fi
}
;;
* )
$RM $output
trap "$RM $output; exit $EXIT_FAILURE" 1 2 15
func_emit_wrapper no > $output
chmod +x $output
;;
esac
}
exit $EXIT_SUCCESS
;;
esac
# See if we need to build an old-fashioned archive.
for oldlib in $oldlibs; do
case $build_libtool_libs in
convenience)
oldobjs="$libobjs_save $symfileobj"
addlibs=$convenience
build_libtool_libs=no
;;
module)
oldobjs=$libobjs_save
addlibs=$old_convenience
build_libtool_libs=no
;;
*)
oldobjs="$old_deplibs $non_pic_objects"
$preload && test -f "$symfileobj" \
&& func_append oldobjs " $symfileobj"
addlibs=$old_convenience
;;
esac
if test -n "$addlibs"; then
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_extract_archives $gentop $addlibs
func_append oldobjs " $func_extract_archives_result"
fi
# Do each command in the archive commands.
if test -n "$old_archive_from_new_cmds" && test yes = "$build_libtool_libs"; then
cmds=$old_archive_from_new_cmds
else
# Add any objects from preloaded convenience libraries
if test -n "$dlprefiles"; then
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_extract_archives $gentop $dlprefiles
func_append oldobjs " $func_extract_archives_result"
fi
# POSIX demands no paths to be encoded in archives. We have
# to avoid creating archives with duplicate basenames if we
# might have to extract them afterwards, e.g., when creating a
# static archive out of a convenience library, or when linking
# the entirety of a libtool archive into another (currently
# not supported by libtool).
if (for obj in $oldobjs
do
func_basename "$obj"
$ECHO "$func_basename_result"
done | sort | sort -uc >/dev/null 2>&1); then
:
else
echo "copying selected object files to avoid basename conflicts..."
gentop=$output_objdir/${outputname}x
func_append generated " $gentop"
func_mkdir_p "$gentop"
save_oldobjs=$oldobjs
oldobjs=
counter=1
for obj in $save_oldobjs
do
func_basename "$obj"
objbase=$func_basename_result
case " $oldobjs " in
" ") oldobjs=$obj ;;
*[\ /]"$objbase "*)
while :; do
# Make sure we don't pick an alternate name that also
# overlaps.
newobj=lt$counter-$objbase
func_arith $counter + 1
counter=$func_arith_result
case " $oldobjs " in
*[\ /]"$newobj "*) ;;
*) if test ! -f "$gentop/$newobj"; then break; fi ;;
esac
done
func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
func_append oldobjs " $gentop/$newobj"
;;
*) func_append oldobjs " $obj" ;;
esac
done
fi
func_to_tool_file "$oldlib" func_convert_file_msys_to_w32
tool_oldlib=$func_to_tool_file_result
eval cmds=\"$old_archive_cmds\"
func_len " $cmds"
len=$func_len_result
if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then
cmds=$old_archive_cmds
elif test -n "$archiver_list_spec"; then
func_verbose "using command file archive linking..."
for obj in $oldobjs
do
func_to_tool_file "$obj"
$ECHO "$func_to_tool_file_result"
done > $output_objdir/$libname.libcmd
func_to_tool_file "$output_objdir/$libname.libcmd"
oldobjs=" $archiver_list_spec$func_to_tool_file_result"
cmds=$old_archive_cmds
else
# the command line is too long to link in one step, link in parts
func_verbose "using piecewise archive linking..."
save_RANLIB=$RANLIB
RANLIB=:
objlist=
concat_cmds=
save_oldobjs=$oldobjs
oldobjs=
# Is there a better way of finding the last object in the list?
for obj in $save_oldobjs
do
last_oldobj=$obj
done
eval test_cmds=\"$old_archive_cmds\"
func_len " $test_cmds"
len0=$func_len_result
len=$len0
for obj in $save_oldobjs
do
func_len " $obj"
func_arith $len + $func_len_result
len=$func_arith_result
func_append objlist " $obj"
if test "$len" -lt "$max_cmd_len"; then
:
else
# the above command should be used before it gets too long
oldobjs=$objlist
if test "$obj" = "$last_oldobj"; then
RANLIB=$save_RANLIB
fi
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
eval concat_cmds=\"\$concat_cmds$old_archive_cmds\"
objlist=
len=$len0
fi
done
RANLIB=$save_RANLIB
oldobjs=$objlist
if test -z "$oldobjs"; then
eval cmds=\"\$concat_cmds\"
else
eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
fi
fi
fi
func_execute_cmds "$cmds" 'exit $?'
done
test -n "$generated" && \
func_show_eval "${RM}r$generated"
# Now create the libtool archive.
case $output in
*.la)
old_library=
test yes = "$build_old_libs" && old_library=$libname.$libext
func_verbose "creating $output"
# Preserve any variables that may affect compiler behavior
for var in $variables_saved_for_relink; do
if eval test -z \"\${$var+set}\"; then
relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command"
elif eval var_value=\$$var; test -z "$var_value"; then
relink_command="$var=; export $var; $relink_command"
else
func_quote_for_eval "$var_value"
relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command"
fi
done
# Quote the link command for shipping.
relink_command="(cd `pwd`; $SHELL \"$progpath\" $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"`
if test yes = "$hardcode_automatic"; then
relink_command=
fi
# Only create the output if not a dry run.
$opt_dry_run || {
for installed in no yes; do
if test yes = "$installed"; then
if test -z "$install_libdir"; then
break
fi
output=$output_objdir/${outputname}i
# Replace all uninstalled libtool libraries with the installed ones
newdependency_libs=
for deplib in $dependency_libs; do
case $deplib in
*.la)
func_basename "$deplib"
name=$func_basename_result
func_resolve_sysroot "$deplib"
eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result`
test -z "$libdir" && \
func_fatal_error "'$deplib' is not a valid libtool archive"
func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name"
;;
-L*)
func_stripname -L '' "$deplib"
func_replace_sysroot "$func_stripname_result"
func_append newdependency_libs " -L$func_replace_sysroot_result"
;;
-R*)
func_stripname -R '' "$deplib"
func_replace_sysroot "$func_stripname_result"
func_append newdependency_libs " -R$func_replace_sysroot_result"
;;
*) func_append newdependency_libs " $deplib" ;;
esac
done
dependency_libs=$newdependency_libs
newdlfiles=
for lib in $dlfiles; do
case $lib in
*.la)
func_basename "$lib"
name=$func_basename_result
eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
test -z "$libdir" && \
func_fatal_error "'$lib' is not a valid libtool archive"
func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name"
;;
*) func_append newdlfiles " $lib" ;;
esac
done
dlfiles=$newdlfiles
newdlprefiles=
for lib in $dlprefiles; do
case $lib in
*.la)
# Only pass preopened files to the pseudo-archive (for
# eventual linking with the app. that links it) if we
# didn't already link the preopened objects directly into
# the library:
func_basename "$lib"
name=$func_basename_result
eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
test -z "$libdir" && \
func_fatal_error "'$lib' is not a valid libtool archive"
func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name"
;;
esac
done
dlprefiles=$newdlprefiles
else
newdlfiles=
for lib in $dlfiles; do
case $lib in
[\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;;
*) abs=`pwd`"/$lib" ;;
esac
func_append newdlfiles " $abs"
done
dlfiles=$newdlfiles
newdlprefiles=
for lib in $dlprefiles; do
case $lib in
[\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;;
*) abs=`pwd`"/$lib" ;;
esac
func_append newdlprefiles " $abs"
done
dlprefiles=$newdlprefiles
fi
$RM $output
# place dlname in correct position for cygwin
# In fact, it would be nice if we could use this code for all target
# systems that can't hard-code library paths into their executables
# and that have no shared library path variable independent of PATH,
# but it turns out we can't easily determine that from inspecting
# libtool variables, so we have to hard-code the OSs to which it
# applies here; at the moment, that means platforms that use the PE
# object format with DLL files. See the long comment at the top of
# tests/bindir.at for full details.
tdlname=$dlname
case $host,$output,$installed,$module,$dlname in
*cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll)
# If a -bindir argument was supplied, place the dll there.
if test -n "$bindir"; then
func_relative_path "$install_libdir" "$bindir"
tdlname=$func_relative_path_result/$dlname
else
# Otherwise fall back on heuristic.
tdlname=../bin/$dlname
fi
;;
esac
$ECHO > $output "\
# $outputname - a libtool library file
# Generated by $PROGRAM (GNU $PACKAGE) $VERSION
#
# Please DO NOT delete this file!
# It is necessary for linking the library.
# The name that we can dlopen(3).
dlname='$tdlname'
# Names of this library.
library_names='$library_names'
# The name of the static archive.
old_library='$old_library'
# Linker flags that cannot go in dependency_libs.
inherited_linker_flags='$new_inherited_linker_flags'
# Libraries that this one depends upon.
dependency_libs='$dependency_libs'
# Names of additional weak libraries provided by this library
weak_library_names='$weak_libs'
# Version information for $libname.
current=$current
age=$age
revision=$revision
# Is this an already installed library?
installed=$installed
# Should we warn about portability when linking against -modules?
shouldnotlink=$module
# Files to dlopen/dlpreopen
dlopen='$dlfiles'
dlpreopen='$dlprefiles'
# Directory that this library needs to be installed in:
libdir='$install_libdir'"
if test no,yes = "$installed,$need_relink"; then
$ECHO >> $output "\
relink_command=\"$relink_command\""
fi
done
}
# Do a symbolic link so that the libtool archive can be found in
# LD_LIBRARY_PATH before the program is installed.
func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?'
;;
esac
exit $EXIT_SUCCESS
}
if test link = "$opt_mode" || test relink = "$opt_mode"; then
func_mode_link ${1+"$@"}
fi
# func_mode_uninstall arg...
func_mode_uninstall ()
{
$debug_cmd
RM=$nonopt
files=
rmforce=false
exit_status=0
# This variable tells wrapper scripts just to set variables rather
# than running their programs.
libtool_install_magic=$magic
for arg
do
case $arg in
-f) func_append RM " $arg"; rmforce=: ;;
-*) func_append RM " $arg" ;;
*) func_append files " $arg" ;;
esac
done
test -z "$RM" && \
func_fatal_help "you must specify an RM program"
rmdirs=
for file in $files; do
func_dirname "$file" "" "."
dir=$func_dirname_result
if test . = "$dir"; then
odir=$objdir
else
odir=$dir/$objdir
fi
func_basename "$file"
name=$func_basename_result
test uninstall = "$opt_mode" && odir=$dir
# Remember odir for removal later, being careful to avoid duplicates
if test clean = "$opt_mode"; then
case " $rmdirs " in
*" $odir "*) ;;
*) func_append rmdirs " $odir" ;;
esac
fi
# Don't error if the file doesn't exist and rm -f was used.
if { test -L "$file"; } >/dev/null 2>&1 ||
{ test -h "$file"; } >/dev/null 2>&1 ||
test -f "$file"; then
:
elif test -d "$file"; then
exit_status=1
continue
elif $rmforce; then
continue
fi
rmfiles=$file
case $name in
*.la)
# Possibly a libtool archive, so verify it.
if func_lalib_p "$file"; then
func_source $dir/$name
# Delete the libtool libraries and symlinks.
for n in $library_names; do
func_append rmfiles " $odir/$n"
done
test -n "$old_library" && func_append rmfiles " $odir/$old_library"
case $opt_mode in
clean)
case " $library_names " in
*" $dlname "*) ;;
*) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;;
esac
test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i"
;;
uninstall)
if test -n "$library_names"; then
# Do each command in the postuninstall commands.
func_execute_cmds "$postuninstall_cmds" '$rmforce || exit_status=1'
fi
if test -n "$old_library"; then
# Do each command in the old_postuninstall commands.
func_execute_cmds "$old_postuninstall_cmds" '$rmforce || exit_status=1'
fi
# FIXME: should reinstall the best remaining shared library.
;;
esac
fi
;;
*.lo)
# Possibly a libtool object, so verify it.
if func_lalib_p "$file"; then
# Read the .lo file
func_source $dir/$name
# Add PIC object to the list of files to remove.
if test -n "$pic_object" && test none != "$pic_object"; then
func_append rmfiles " $dir/$pic_object"
fi
# Add non-PIC object to the list of files to remove.
if test -n "$non_pic_object" && test none != "$non_pic_object"; then
func_append rmfiles " $dir/$non_pic_object"
fi
fi
;;
*)
if test clean = "$opt_mode"; then
noexename=$name
case $file in
*.exe)
func_stripname '' '.exe' "$file"
file=$func_stripname_result
func_stripname '' '.exe' "$name"
noexename=$func_stripname_result
# $file with .exe has already been added to rmfiles,
# add $file without .exe
func_append rmfiles " $file"
;;
esac
# Do a test to see if this is a libtool program.
if func_ltwrapper_p "$file"; then
if func_ltwrapper_executable_p "$file"; then
func_ltwrapper_scriptname "$file"
relink_command=
func_source $func_ltwrapper_scriptname_result
func_append rmfiles " $func_ltwrapper_scriptname_result"
else
relink_command=
func_source $dir/$noexename
fi
# note $name still contains .exe if it was in $file originally
# as does the version of $file that was added into $rmfiles
func_append rmfiles " $odir/$name $odir/${name}S.$objext"
if test yes = "$fast_install" && test -n "$relink_command"; then
func_append rmfiles " $odir/lt-$name"
fi
if test "X$noexename" != "X$name"; then
func_append rmfiles " $odir/lt-$noexename.c"
fi
fi
fi
;;
esac
func_show_eval "$RM $rmfiles" 'exit_status=1'
done
# Try to remove the $objdir's in the directories where we deleted files
for dir in $rmdirs; do
if test -d "$dir"; then
func_show_eval "rmdir $dir >/dev/null 2>&1"
fi
done
exit $exit_status
}
if test uninstall = "$opt_mode" || test clean = "$opt_mode"; then
func_mode_uninstall ${1+"$@"}
fi
test -z "$opt_mode" && {
help=$generic_help
func_fatal_help "you must specify a MODE"
}
test -z "$exec_cmd" && \
func_fatal_help "invalid operation mode '$opt_mode'"
if test -n "$exec_cmd"; then
eval exec "$exec_cmd"
exit $EXIT_FAILURE
fi
exit $exit_status
# The TAGs below are defined such that we never get into a situation
# where we disable both kinds of libraries. Given conflicting
# choices, we go for a static library, that is the most portable,
# since we can't tell whether shared libraries were disabled because
# the user asked for that or because the platform doesn't support
# them. This is particularly important on AIX, because we don't
# support having both static and shared libraries enabled at the same
# time on that platform, so we default to a shared-only configuration.
# If a disable-shared tag is given, we'll fallback to a static-only
# configuration. But we'll never go from static-only to shared-only.
# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
build_libtool_libs=no
build_old_libs=yes
# ### END LIBTOOL TAG CONFIG: disable-shared
# ### BEGIN LIBTOOL TAG CONFIG: disable-static
build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac`
# ### END LIBTOOL TAG CONFIG: disable-static
# Local Variables:
# mode:shell-script
# sh-indentation:2
# End:
# ### BEGIN LIBTOOL TAG CONFIG: CXX
# The linker used to build libraries.
-LD="/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64"
+LD="/usr/bin/ld -m elf_x86_64"
# How to create reloadable object files.
reload_flag=" -r"
reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
# Commands used to build an old-style archive.
old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
# A language specific compiler.
CC="g++"
# Is the compiler the GNU compiler?
with_gcc=yes
# Compiler flag to turn off builtin functions.
no_builtin_flag=" -fno-builtin"
# Additional compiler flags for building library objects.
pic_flag=" -fPIC -DPIC"
# How to pass a linker flag through the compiler.
wl="-Wl,"
# Compiler flag to prevent dynamic linking.
link_static_flag="-static"
# Does compiler simultaneously support -c and -o options?
compiler_c_o="yes"
# Whether or not to add -lc for building shared libraries.
build_libtool_need_lc=no
# Whether or not to disallow shared libs when runtime libs are static.
allow_libtool_libs_with_static_runtimes=no
# Compiler flag to allow reflexive dlopens.
export_dynamic_flag_spec="\$wl--export-dynamic"
# Compiler flag to generate shared objects directly from archives.
whole_archive_flag_spec="\$wl--whole-archive\$convenience \$wl--no-whole-archive"
# Whether the compiler copes with passing no objects directly.
compiler_needs_object="no"
# Create an old-style archive from a shared archive.
old_archive_from_new_cmds=""
# Create a temporary old-style archive to link instead of a shared archive.
old_archive_from_expsyms_cmds=""
# Commands used to build a shared archive.
archive_cmds="\$CC \$pic_flag -shared -nostdlib \$predep_objects \$libobjs \$deplibs \$postdep_objects \$compiler_flags \$wl-soname \$wl\$soname -o \$lib"
archive_expsym_cmds="\$CC \$pic_flag -shared -nostdlib \$predep_objects \$libobjs \$deplibs \$postdep_objects \$compiler_flags \$wl-soname \$wl\$soname \$wl-retain-symbols-file \$wl\$export_symbols -o \$lib"
# Commands used to build a loadable module if different from building
# a shared archive.
module_cmds=""
module_expsym_cmds=""
# Whether we are building with GNU ld or not.
with_gnu_ld="yes"
# Flag that allows shared libraries with undefined symbols to be built.
allow_undefined_flag=""
# Flag that enforces no undefined symbols.
no_undefined_flag=""
# Flag to hardcode $libdir into a binary during linking.
# This must work even if $libdir does not exist
hardcode_libdir_flag_spec="\$wl-rpath \$wl\$libdir"
# Whether we need a single "-rpath" flag with a separated argument.
hardcode_libdir_separator=""
# Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
# DIR into the resulting binary.
hardcode_direct=no
# Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
# DIR into the resulting binary and the resulting library dependency is
# "absolute",i.e impossible to change by setting $shlibpath_var if the
# library is relocated.
hardcode_direct_absolute=no
# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
# into the resulting binary.
hardcode_minus_L=no
# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
# into the resulting binary.
hardcode_shlibpath_var=unsupported
# Set to "yes" if building a shared library automatically hardcodes DIR
# into the library and all subsequent libraries and executables linked
# against it.
hardcode_automatic=no
# Set to yes if linker adds runtime paths of dependent libraries
# to runtime path list.
inherit_rpath=no
# Whether libtool must link a program against all its dependency libraries.
link_all_deplibs=unknown
# Set to "yes" if exported symbols are required.
always_export_symbols=no
# The commands to list exported symbols.
export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
# Symbols that should not be listed in the preloaded symbols.
exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
# Symbols that must always be exported.
include_expsyms=""
# Commands necessary for linking programs (against libraries) with templates.
prelink_cmds=""
# Commands necessary for finishing linking programs.
postlink_cmds=""
# Specify filename containing input files.
file_list_spec=""
# How to hardcode a shared library path into an executable.
hardcode_action=immediate
# The directories searched by this compiler when creating a shared library.
compiler_lib_search_dirs="/usr/lib/gcc/x86_64-linux-gnu/7 /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu /usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib /lib/x86_64-linux-gnu /lib/../lib /usr/lib/x86_64-linux-gnu /usr/lib/../lib /usr/lib/gcc/x86_64-linux-gnu/7/../../.."
# Dependencies to place before and after the objects being linked to
# create a shared library.
predep_objects="/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o"
postdep_objects="/usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o"
predeps=""
postdeps="-lstdc++ -lm -lgcc_s -lc -lgcc_s"
# The library search path used internally by the compiler when linking
# a shared library.
compiler_lib_search_path="-L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.."
# ### END LIBTOOL TAG CONFIG: CXX
# ### BEGIN LIBTOOL TAG CONFIG: F77
# The linker used to build libraries.
-LD="/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64"
+LD="/usr/bin/ld -m elf_x86_64"
# How to create reloadable object files.
reload_flag=" -r"
reload_cmds="\$LD\$reload_flag -o \$output\$reload_objs"
# Commands used to build an old-style archive.
old_archive_cmds="\$AR \$AR_FLAGS \$oldlib\$oldobjs~\$RANLIB \$tool_oldlib"
# A language specific compiler.
CC="g77"
# Is the compiler the GNU compiler?
with_gcc=yes
# Compiler flag to turn off builtin functions.
no_builtin_flag=""
# Additional compiler flags for building library objects.
pic_flag=" -fPIC"
# How to pass a linker flag through the compiler.
wl="-Wl,"
# Compiler flag to prevent dynamic linking.
link_static_flag="-static"
# Does compiler simultaneously support -c and -o options?
compiler_c_o="yes"
# Whether or not to add -lc for building shared libraries.
build_libtool_need_lc=no
# Whether or not to disallow shared libs when runtime libs are static.
allow_libtool_libs_with_static_runtimes=no
# Compiler flag to allow reflexive dlopens.
export_dynamic_flag_spec="\$wl--export-dynamic"
# Compiler flag to generate shared objects directly from archives.
whole_archive_flag_spec="\$wl--whole-archive\$convenience \$wl--no-whole-archive"
# Whether the compiler copes with passing no objects directly.
compiler_needs_object="no"
# Create an old-style archive from a shared archive.
old_archive_from_new_cmds=""
# Create a temporary old-style archive to link instead of a shared archive.
old_archive_from_expsyms_cmds=""
# Commands used to build a shared archive.
archive_cmds="\$CC -shared \$pic_flag \$libobjs \$deplibs \$compiler_flags \$wl-soname \$wl\$soname -o \$lib"
archive_expsym_cmds="echo \\\"{ global:\\\" > \$output_objdir/\$libname.ver~
cat \$export_symbols | sed -e \\\"s/\\\\(.*\\\\)/\\\\1;/\\\" >> \$output_objdir/\$libname.ver~
echo \\\"local: *; };\\\" >> \$output_objdir/\$libname.ver~
\$CC -shared \$pic_flag \$libobjs \$deplibs \$compiler_flags \$wl-soname \$wl\$soname \$wl-version-script \$wl\$output_objdir/\$libname.ver -o \$lib"
# Commands used to build a loadable module if different from building
# a shared archive.
module_cmds=""
module_expsym_cmds=""
# Whether we are building with GNU ld or not.
with_gnu_ld="yes"
# Flag that allows shared libraries with undefined symbols to be built.
allow_undefined_flag=""
# Flag that enforces no undefined symbols.
no_undefined_flag=""
# Flag to hardcode $libdir into a binary during linking.
# This must work even if $libdir does not exist
hardcode_libdir_flag_spec="\$wl-rpath \$wl\$libdir"
# Whether we need a single "-rpath" flag with a separated argument.
hardcode_libdir_separator=""
# Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
# DIR into the resulting binary.
hardcode_direct=no
# Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes
# DIR into the resulting binary and the resulting library dependency is
# "absolute",i.e impossible to change by setting $shlibpath_var if the
# library is relocated.
hardcode_direct_absolute=no
# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
# into the resulting binary.
hardcode_minus_L=no
# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
# into the resulting binary.
hardcode_shlibpath_var=unsupported
# Set to "yes" if building a shared library automatically hardcodes DIR
# into the library and all subsequent libraries and executables linked
# against it.
hardcode_automatic=no
# Set to yes if linker adds runtime paths of dependent libraries
# to runtime path list.
inherit_rpath=no
# Whether libtool must link a program against all its dependency libraries.
link_all_deplibs=unknown
# Set to "yes" if exported symbols are required.
always_export_symbols=no
# The commands to list exported symbols.
export_symbols_cmds="\$NM \$libobjs \$convenience | \$global_symbol_pipe | \$SED 's/.* //' | sort | uniq > \$export_symbols"
# Symbols that should not be listed in the preloaded symbols.
exclude_expsyms="_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*"
# Symbols that must always be exported.
include_expsyms=""
# Commands necessary for linking programs (against libraries) with templates.
prelink_cmds=""
# Commands necessary for finishing linking programs.
postlink_cmds=""
# Specify filename containing input files.
file_list_spec=""
# How to hardcode a shared library path into an executable.
hardcode_action=immediate
# The directories searched by this compiler when creating a shared library.
compiler_lib_search_dirs=""
# Dependencies to place before and after the objects being linked to
# create a shared library.
predep_objects=""
postdep_objects=""
predeps=""
postdeps=""
# The library search path used internally by the compiler when linking
# a shared library.
compiler_lib_search_path=""
# ### END LIBTOOL TAG CONFIG: F77
Index: trunk/autom4te.cache/requests
===================================================================
--- trunk/autom4te.cache/requests (revision 579)
+++ trunk/autom4te.cache/requests (revision 580)
@@ -1,281 +1,281 @@
# This file was generated by Autom4te Sun Aug 20 23:09:08 UTC 2017.
# It contains the lists of macros which have been traced.
# It can be safely removed.
@request = (
bless( [
'0',
1,
[
'/usr/share/autoconf'
],
[
'/usr/share/autoconf/autoconf/autoconf.m4f',
'-',
'/usr/share/aclocal-1.15/internal/ac-config-macro-dirs.m4',
'/usr/share/aclocal/pkg.m4',
'/usr/share/aclocal-1.15/amversion.m4',
'/usr/share/aclocal-1.15/auxdir.m4',
'/usr/share/aclocal-1.15/cond.m4',
'/usr/share/aclocal-1.15/depend.m4',
'/usr/share/aclocal-1.15/depout.m4',
'/usr/share/aclocal-1.15/init.m4',
'/usr/share/aclocal-1.15/install-sh.m4',
'/usr/share/aclocal-1.15/lead-dot.m4',
'/usr/share/aclocal-1.15/make.m4',
'/usr/share/aclocal-1.15/missing.m4',
'/usr/share/aclocal-1.15/options.m4',
'/usr/share/aclocal-1.15/prog-cc-c-o.m4',
'/usr/share/aclocal-1.15/runlog.m4',
'/usr/share/aclocal-1.15/sanity.m4',
'/usr/share/aclocal-1.15/silent.m4',
'/usr/share/aclocal-1.15/strip.m4',
'/usr/share/aclocal-1.15/substnot.m4',
'/usr/share/aclocal-1.15/tar.m4',
'm4/libtool.m4',
'm4/ltoptions.m4',
'm4/ltsugar.m4',
'm4/ltversion.m4',
'm4/lt~obsolete.m4',
'configure.ac'
],
{
- 'LT_SYS_DLOPEN_SELF' => 1,
- 'AC_PROG_EGREP' => 1,
- 'LT_AC_PROG_EGREP' => 1,
- 'AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE' => 1,
- 'AC_LIBTOOL_LANG_CXX_CONFIG' => 1,
- 'AC_LIBTOOL_LINKER_OPTION' => 1,
- 'AM_AUX_DIR_EXPAND' => 1,
- 'AC_CONFIG_MACRO_DIR' => 1,
- 'AC_LTDL_PREOPEN' => 1,
- 'PKG_CHECK_MODULES' => 1,
- '_LT_AC_FILE_LTDLL_C' => 1,
- 'AM_DISABLE_STATIC' => 1,
- '_LT_PATH_TOOL_PREFIX' => 1,
- 'AC_LIBTOOL_DLOPEN_SELF' => 1,
- 'AC_CHECK_LIBM' => 1,
- '_LT_PROG_ECHO_BACKSLASH' => 1,
- 'LT_CMD_MAX_LEN' => 1,
- 'AC_LIBTOOL_LANG_F77_CONFIG' => 1,
- 'AC_LIBTOOL_OBJDIR' => 1,
- 'AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH' => 1,
- 'AC_DISABLE_STATIC' => 1,
- 'LTVERSION_VERSION' => 1,
- 'PKG_PROG_PKG_CONFIG' => 1,
- 'AM_SET_DEPDIR' => 1,
- 'm4_include' => 1,
- 'AC_DISABLE_FAST_INSTALL' => 1,
- '_LT_AC_LANG_RC_CONFIG' => 1,
- 'AM_SET_LEADING_DOT' => 1,
- 'AC_LIBTOOL_PROG_CC_C_O' => 1,
- 'PKG_INSTALLDIR' => 1,
+ 'AM_MISSING_HAS_RUN' => 1,
+ 'AC_PROG_NM' => 1,
+ 'AM_INIT_AUTOMAKE' => 1,
+ '_LT_AC_SYS_LIBPATH_AIX' => 1,
+ '_LT_AC_PROG_CXXCPP' => 1,
+ '_AM_IF_OPTION' => 1,
'_LT_AC_TAGVAR' => 1,
- 'AC_DEPLIBS_CHECK_METHOD' => 1,
- '_AM_AUTOCONF_VERSION' => 1,
- 'AC_LIBTOOL_PROG_LD_SHLIBS' => 1,
- '_AC_PROG_LIBTOOL' => 1,
- 'AC_LIBTOOL_PICMODE' => 1,
- 'AC_LIBTOOL_SYS_MAX_CMD_LEN' => 1,
- '_LT_PREPARE_SED_QUOTE_VARS' => 1,
- 'AC_PATH_MAGIC' => 1,
- 'LT_AC_PROG_GCJ' => 1,
- 'LT_PATH_NM' => 1,
- 'PKG_CHECK_VAR' => 1,
- 'AC_DISABLE_SHARED' => 1,
'_LT_AC_TAGCONFIG' => 1,
+ '_PKG_SHORT_ERRORS_SUPPORTED' => 1,
'_LT_COMPILER_OPTION' => 1,
+ '_LT_LINKER_BOILERPLATE' => 1,
+ 'AM_AUX_DIR_EXPAND' => 1,
+ '_LT_AC_LANG_C_CONFIG' => 1,
+ '_LT_AC_SYS_COMPILER' => 1,
+ 'AC_LIBTOOL_RC' => 1,
+ 'AC_LIBTOOL_SETUP' => 1,
+ 'AC_LIBTOOL_COMPILER_OPTION' => 1,
+ 'AC_LIBTOOL_DLOPEN' => 1,
+ 'AC_LIBTOOL_POSTDEP_PREDEP' => 1,
+ 'm4_pattern_forbid' => 1,
+ 'AC_LIBTOOL_SYS_OLD_ARCHIVE' => 1,
+ 'AC_LIBTOOL_PICMODE' => 1,
+ 'AC_DEFUN_ONCE' => 1,
+ '_LT_AC_LANG_CXX_CONFIG' => 1,
+ 'AC_LIBTOOL_DLOPEN_SELF' => 1,
+ '_LT_REQUIRED_DARWIN_CHECKS' => 1,
+ 'AC_PATH_MAGIC' => 1,
+ 'AC_ENABLE_STATIC' => 1,
+ '_AM_SET_OPTIONS' => 1,
+ 'AC_PATH_TOOL_PREFIX' => 1,
'AC_LIBTOOL_SYS_LIB_STRIP' => 1,
- 'AM_PROG_LD' => 1,
- '_m4_warn' => 1,
- 'AM_DEP_TRACK' => 1,
- 'LT_PROG_GCJ' => 1,
- 'AM_PROG_NM' => 1,
+ 'AC_PROG_LIBTOOL' => 1,
+ 'AM_PROG_LIBTOOL' => 1,
+ '_LT_LINKER_OPTION' => 1,
+ '_LT_AC_LOCK' => 1,
+ 'AM_MAKE_INCLUDE' => 1,
'AC_LTDL_ENABLE_INSTALL' => 1,
+ '_LT_PROG_ECHO_BACKSLASH' => 1,
'_LT_AC_LANG_F77' => 1,
- 'AM_INIT_AUTOMAKE' => 1,
- '_LT_AC_PROG_ECHO_BACKSLASH' => 1,
- 'include' => 1,
- 'LTOPTIONS_VERSION' => 1,
- 'AC_PROG_NM' => 1,
- 'AC_LIBTOOL_WIN32_DLL' => 1,
- 'AC_LIBTOOL_CXX' => 1,
- '_LT_CC_BASENAME' => 1,
- 'AC_LIBTOOL_F77' => 1,
- 'm4_pattern_forbid' => 1,
- '_AM_SET_OPTIONS' => 1,
- 'AM_SANITY_CHECK' => 1,
'_LT_AC_CHECK_DLFCN' => 1,
- 'AC_LIBTOOL_SYS_DYNAMIC_LINKER' => 1,
- 'AM_PROG_INSTALL_STRIP' => 1,
+ 'AC_LIBTOOL_LANG_F77_CONFIG' => 1,
+ 'AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE' => 1,
'_AM_SET_OPTION' => 1,
- 'AC_LIBTOOL_POSTDEP_PREDEP' => 1,
- '_LT_DLL_DEF_P' => 1,
- '_AM_DEPENDENCIES' => 1,
- 'AM_MISSING_HAS_RUN' => 1,
- 'AM_MAKE_INCLUDE' => 1,
- '_LT_AC_TRY_DLOPEN_SELF' => 1,
- 'PKG_CHECK_MODULES_STATIC' => 1,
- 'AC_PROG_LD_GNU' => 1,
- '_LT_WITH_SYSROOT' => 1,
- '_LT_AC_LANG_F77_CONFIG' => 1,
- '_LT_AC_LANG_CXX_CONFIG' => 1,
- 'AM_SUBST_NOTMAKE' => 1,
+ 'AC_LIBTOOL_SYS_HARD_LINK_LOCKS' => 1,
+ 'AC_LIBTOOL_PROG_COMPILER_NO_RTTI' => 1,
+ 'AM_DEP_TRACK' => 1,
+ 'PKG_CHECK_EXISTS' => 1,
+ 'AM_PROG_INSTALL_STRIP' => 1,
+ 'PKG_INSTALLDIR' => 1,
+ 'AM_SILENT_RULES' => 1,
+ 'PKG_PROG_PKG_CONFIG' => 1,
'AC_LIBTOOL_LANG_C_CONFIG' => 1,
- 'AM_ENABLE_STATIC' => 1,
- 'LT_PROG_GO' => 1,
+ 'LT_SUPPORTED_TAG' => 1,
+ 'AC_LIBTOOL_OBJDIR' => 1,
+ 'PKG_NOARCH_INSTALLDIR' => 1,
+ 'PKG_CHECK_VAR' => 1,
+ 'AC_LTDL_PREOPEN' => 1,
+ 'AC_LIBTOOL_PROG_CC_C_O' => 1,
+ 'AM_SANITY_CHECK' => 1,
+ 'LTVERSION_VERSION' => 1,
+ 'AC_LIBTOOL_SYS_DYNAMIC_LINKER' => 1,
+ 'LT_AC_PROG_EGREP' => 1,
+ '_LT_AC_LANG_GCJ' => 1,
'AM_MISSING_PROG' => 1,
- '_LT_AC_SYS_COMPILER' => 1,
+ 'AM_SUBST_NOTMAKE' => 1,
+ '_LT_AC_SHELL_INIT' => 1,
+ 'AM_OUTPUT_DEPENDENCY_COMMANDS' => 1,
+ 'AC_DISABLE_STATIC' => 1,
+ 'LT_PROG_GCJ' => 1,
+ '_LT_PROG_FC' => 1,
+ '_AM_MANGLE_OPTION' => 1,
+ '_LT_PROG_LTMAIN' => 1,
+ 'AC_PROG_LD_RELOAD_FLAG' => 1,
+ '_LT_AC_TRY_DLOPEN_SELF' => 1,
+ 'AM_PROG_CC_C_O' => 1,
+ 'AC_LIBTOOL_CXX' => 1,
+ 'LT_LANG' => 1,
'AC_CONFIG_MACRO_DIR_TRACE' => 1,
- '_LT_LINKER_OPTION' => 1,
+ 'LT_CMD_MAX_LEN' => 1,
+ 'AC_LIBTOOL_LINKER_OPTION' => 1,
+ 'AM_ENABLE_SHARED' => 1,
'AC_PROG_LD' => 1,
- 'AM_DISABLE_SHARED' => 1,
- 'AM_PROG_CC_C_O' => 1,
- 'AC_LIBTOOL_COMPILER_OPTION' => 1,
- '_AM_PROG_CC_C_O' => 1,
- '_LT_AC_LANG_CXX' => 1,
+ 'AC_CONFIG_MACRO_DIR' => 1,
+ 'AM_SET_LEADING_DOT' => 1,
'_LT_PROG_F77' => 1,
- 'AM_PROG_INSTALL_SH' => 1,
- 'PKG_NOARCH_INSTALLDIR' => 1,
- 'AM_OUTPUT_DEPENDENCY_COMMANDS' => 1,
- '_LT_PROG_CXX' => 1,
- 'AU_DEFUN' => 1,
- 'AM_CONDITIONAL' => 1,
- '_LT_AC_LANG_GCJ_CONFIG' => 1,
- 'AC_DEFUN_ONCE' => 1,
+ '_LT_WITH_SYSROOT' => 1,
'_AC_AM_CONFIG_HEADER_HOOK' => 1,
- 'LT_PATH_LD' => 1,
- 'AC_LIBTOOL_LANG_GCJ_CONFIG' => 1,
- 'AC_PROG_LIBTOOL' => 1,
- 'AC_DEFUN' => 1,
- '_AM_SUBST_NOTMAKE' => 1,
- '_LT_AC_LANG_C_CONFIG' => 1,
- '_LT_AC_SYS_LIBPATH_AIX' => 1,
- 'AC_LIBTOOL_PROG_COMPILER_NO_RTTI' => 1,
'AC_LIBTOOL_FC' => 1,
- 'AC_ENABLE_STATIC' => 1,
- '_LT_REQUIRED_DARWIN_CHECKS' => 1,
- '_LT_PROG_LTMAIN' => 1,
- '_LT_PROG_FC' => 1,
- 'AC_LTDL_OBJDIR' => 1,
- 'AM_RUN_LOG' => 1,
- '_LT_AC_LANG_GCJ' => 1,
- '_LT_AC_LOCK' => 1,
- '_PKG_SHORT_ERRORS_SUPPORTED' => 1,
- 'LT_LANG' => 1,
- '_LT_AC_SHELL_INIT' => 1,
- 'AC_ENABLE_FAST_INSTALL' => 1,
- 'AC_LIBTOOL_SETUP' => 1,
- 'AM_PROG_LIBTOOL' => 1,
- 'AC_LIBTOOL_RC' => 1,
+ 'AM_SET_DEPDIR' => 1,
'LT_INIT' => 1,
- 'LT_AC_PROG_RC' => 1,
- 'AC_PATH_TOOL_PREFIX' => 1,
- '_AM_PROG_TAR' => 1,
- 'AC_ENABLE_SHARED' => 1,
- 'AC_LIBTOOL_SYS_OLD_ARCHIVE' => 1,
- '_LT_LINKER_BOILERPLATE' => 1,
- '_LT_COMPILER_BOILERPLATE' => 1,
+ '_LT_CC_BASENAME' => 1,
+ 'LTSUGAR_VERSION' => 1,
'LTOBSOLETE_VERSION' => 1,
+ 'AM_ENABLE_STATIC' => 1,
+ 'AM_DISABLE_STATIC' => 1,
+ '_LT_AC_LANG_RC_CONFIG' => 1,
+ '_LT_PREPARE_SED_QUOTE_VARS' => 1,
+ '_LT_COMPILER_BOILERPLATE' => 1,
+ 'AC_CHECK_LIBM' => 1,
+ 'AM_SET_CURRENT_AUTOMAKE_VERSION' => 1,
+ 'AC_LTDL_OBJDIR' => 1,
+ 'include' => 1,
+ 'AC_LIBTOOL_GCJ' => 1,
+ 'LT_OUTPUT' => 1,
'AM_AUTOMAKE_VERSION' => 1,
- '_AM_OUTPUT_DEPENDENCY_COMMANDS' => 1,
+ 'AC_PROG_LD_GNU' => 1,
+ 'LT_PROG_GO' => 1,
+ 'm4_include' => 1,
+ 'AC_LIBTOOL_LANG_CXX_CONFIG' => 1,
'AC_LIBTOOL_CONFIG' => 1,
- 'AC_LIBTOOL_LANG_RC_CONFIG' => 1,
- 'AM_SILENT_RULES' => 1,
- 'PKG_CHECK_EXISTS' => 1,
- 'LT_LIB_M' => 1,
- 'LT_SUPPORTED_TAG' => 1,
+ 'AC_LIBTOOL_WIN32_DLL' => 1,
+ '_LT_AC_PROG_ECHO_BACKSLASH' => 1,
+ '_LT_DLL_DEF_P' => 1,
+ '_LT_AC_LANG_CXX' => 1,
+ 'LTOPTIONS_VERSION' => 1,
'_AM_CONFIG_MACRO_DIRS' => 1,
+ '_m4_warn' => 1,
+ 'PKG_CHECK_MODULES_STATIC' => 1,
+ '_LT_AC_FILE_LTDLL_C' => 1,
+ 'LT_PATH_LD' => 1,
+ '_AC_PROG_LIBTOOL' => 1,
+ 'PKG_CHECK_MODULES' => 1,
+ 'AM_PROG_NM' => 1,
+ '_AM_OUTPUT_DEPENDENCY_COMMANDS' => 1,
'm4_pattern_allow' => 1,
- 'AC_LIBTOOL_GCJ' => 1,
- 'AC_LIBTOOL_SYS_HARD_LINK_LOCKS' => 1,
- '_LT_AC_PROG_CXXCPP' => 1,
- 'AM_ENABLE_SHARED' => 1,
+ '_LT_PROG_CXX' => 1,
+ 'AC_DEPLIBS_CHECK_METHOD' => 1,
+ 'AC_LIBTOOL_LANG_GCJ_CONFIG' => 1,
+ '_AM_AUTOCONF_VERSION' => 1,
+ 'AC_LIBTOOL_F77' => 1,
+ 'LT_AC_PROG_SED' => 1,
+ 'AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH' => 1,
+ 'AM_CONDITIONAL' => 1,
+ 'AC_PROG_EGREP' => 1,
+ '_LT_AC_LANG_GCJ_CONFIG' => 1,
+ '_LT_PATH_TOOL_PREFIX' => 1,
'AC_LIBTOOL_PROG_COMPILER_PIC' => 1,
+ '_AM_PROG_TAR' => 1,
+ '_LT_AC_LANG_F77_CONFIG' => 1,
+ 'LT_AC_PROG_RC' => 1,
+ 'AM_PROG_LD' => 1,
+ 'AU_DEFUN' => 1,
+ 'AC_LIBTOOL_SYS_MAX_CMD_LEN' => 1,
+ '_AM_SUBST_NOTMAKE' => 1,
+ 'AC_ENABLE_SHARED' => 1,
+ 'LT_PATH_NM' => 1,
+ '_AM_PROG_CC_C_O' => 1,
+ 'AC_DEFUN' => 1,
+ 'AC_DISABLE_SHARED' => 1,
+ 'LT_SYS_DLOPEN_SELF' => 1,
+ 'AM_DISABLE_SHARED' => 1,
+ 'LT_LIB_M' => 1,
+ 'AC_LIBTOOL_PROG_LD_SHLIBS' => 1,
+ '_AM_DEPENDENCIES' => 1,
'LT_PROG_RC' => 1,
- 'LTSUGAR_VERSION' => 1,
- 'LT_OUTPUT' => 1,
- 'AC_PROG_LD_RELOAD_FLAG' => 1,
- 'LT_AC_PROG_SED' => 1,
- 'AM_SET_CURRENT_AUTOMAKE_VERSION' => 1,
- '_AM_MANGLE_OPTION' => 1,
- '_AM_IF_OPTION' => 1,
- 'AC_LIBTOOL_DLOPEN' => 1
+ 'AC_DISABLE_FAST_INSTALL' => 1,
+ 'AM_RUN_LOG' => 1,
+ 'AC_ENABLE_FAST_INSTALL' => 1,
+ 'LT_AC_PROG_GCJ' => 1,
+ 'AC_LIBTOOL_LANG_RC_CONFIG' => 1,
+ 'AM_PROG_INSTALL_SH' => 1
}
], 'Autom4te::Request' ),
bless( [
'1',
1,
[
'/usr/share/autoconf'
],
[
'/usr/share/autoconf/autoconf/autoconf.m4f',
'aclocal.m4',
'configure.ac'
],
{
- 'AM_MAINTAINER_MODE' => 1,
- 'AM_POT_TOOLS' => 1,
- 'AC_CANONICAL_HOST' => 1,
- '_LT_AC_TAGCONFIG' => 1,
- 'AM_ENABLE_MULTILIB' => 1,
- 'AM_XGETTEXT_OPTION' => 1,
- 'AM_PROG_LIBTOOL' => 1,
- 'AC_CONFIG_HEADERS' => 1,
- 'AH_OUTPUT' => 1,
- 'AC_REQUIRE_AUX_FILE' => 1,
- '_m4_warn' => 1,
- 'AM_PROG_CXX_C_O' => 1,
- 'AM_PROG_AR' => 1,
- 'AM_GNU_GETTEXT_INTL_SUBDIR' => 1,
- 'm4_sinclude' => 1,
- 'AC_FC_SRCEXT' => 1,
+ 'LT_CONFIG_LTDL_DIR' => 1,
'AC_CONFIG_SUBDIRS' => 1,
- 'AM_PROG_F77_C_O' => 1,
- '_AM_COND_ENDIF' => 1,
- 'AM_PROG_CC_C_O' => 1,
- 'AM_PROG_FC_C_O' => 1,
- 'AM_PROG_MKDIR_P' => 1,
- 'LT_INIT' => 1,
- 'AC_LIBSOURCE' => 1,
- 'AC_CANONICAL_TARGET' => 1,
- 'm4_include' => 1,
- 'AC_CANONICAL_BUILD' => 1,
- 'AC_CONFIG_LINKS' => 1,
+ 'AM_GNU_GETTEXT' => 1,
+ 'AC_DEFINE_TRACE_LITERAL' => 1,
+ 'm4_pattern_forbid' => 1,
+ 'AC_CONFIG_LIBOBJ_DIR' => 1,
+ '_AM_SUBST_NOTMAKE' => 1,
+ 'AM_PROG_AR' => 1,
'AM_PATH_GUILE' => 1,
+ 'AC_CONFIG_LINKS' => 1,
+ 'LT_INIT' => 1,
+ 'sinclude' => 1,
+ 'AM_PROG_CXX_C_O' => 1,
+ 'AC_FC_PP_DEFINE' => 1,
+ 'm4_sinclude' => 1,
+ 'AC_CONFIG_AUX_DIR' => 1,
+ 'AC_PROG_LIBTOOL' => 1,
'include' => 1,
- 'AM_INIT_AUTOMAKE' => 1,
- 'AM_AUTOMAKE_VERSION' => 1,
- 'LT_SUPPORTED_TAG' => 1,
+ 'AM_PROG_LIBTOOL' => 1,
'AM_NLS' => 1,
+ 'AC_CANONICAL_HOST' => 1,
+ 'AC_FC_FREEFORM' => 1,
+ 'AM_AUTOMAKE_VERSION' => 1,
+ 'AM_PROG_FC_C_O' => 1,
+ 'AM_MAINTAINER_MODE' => 1,
+ 'm4_include' => 1,
+ 'AC_SUBST_TRACE' => 1,
'AM_SILENT_RULES' => 1,
- '_AM_COND_IF' => 1,
- 'm4_pattern_forbid' => 1,
- 'AC_FC_PP_DEFINE' => 1,
- 'sinclude' => 1,
- 'LT_CONFIG_LTDL_DIR' => 1,
- '_AM_COND_ELSE' => 1,
- 'AM_MAKEFILE_INCLUDE' => 1,
+ 'AC_REQUIRE_AUX_FILE' => 1,
+ 'AC_CONFIG_FILES' => 1,
+ 'AC_INIT' => 1,
+ 'AM_ENABLE_MULTILIB' => 1,
'm4_pattern_allow' => 1,
- 'AC_PROG_LIBTOOL' => 1,
- 'AC_CONFIG_LIBOBJ_DIR' => 1,
- 'AC_CONFIG_AUX_DIR' => 1,
- 'AC_SUBST_TRACE' => 1,
- 'AM_CONDITIONAL' => 1,
- 'AC_FC_PP_SRCEXT' => 1,
'AC_SUBST' => 1,
+ 'AC_LIBSOURCE' => 1,
+ 'AM_INIT_AUTOMAKE' => 1,
+ 'AH_OUTPUT' => 1,
+ '_AM_COND_IF' => 1,
+ '_AM_COND_ENDIF' => 1,
+ 'AM_MAKEFILE_INCLUDE' => 1,
+ '_m4_warn' => 1,
+ 'AM_PROG_CC_C_O' => 1,
'AC_CANONICAL_SYSTEM' => 1,
- 'AC_FC_FREEFORM' => 1,
- 'AM_GNU_GETTEXT' => 1,
- 'AM_PROG_MOC' => 1,
+ 'AC_CANONICAL_TARGET' => 1,
+ 'AC_FC_SRCEXT' => 1,
+ 'AM_XGETTEXT_OPTION' => 1,
'AM_EXTRA_RECURSIVE_TARGETS' => 1,
- 'AC_CONFIG_FILES' => 1,
- 'AC_INIT' => 1,
- '_AM_SUBST_NOTMAKE' => 1,
+ 'AM_PROG_MKDIR_P' => 1,
+ 'AC_CONFIG_HEADERS' => 1,
+ 'AC_FC_PP_SRCEXT' => 1,
+ 'AM_PROG_MOC' => 1,
+ 'AM_GNU_GETTEXT_INTL_SUBDIR' => 1,
+ '_AM_COND_ELSE' => 1,
'_AM_MAKEFILE_INCLUDE' => 1,
- 'AC_DEFINE_TRACE_LITERAL' => 1
+ 'LT_SUPPORTED_TAG' => 1,
+ '_LT_AC_TAGCONFIG' => 1,
+ 'AM_PROG_F77_C_O' => 1,
+ 'AC_CANONICAL_BUILD' => 1,
+ 'AM_POT_TOOLS' => 1,
+ 'AM_CONDITIONAL' => 1
}
], 'Autom4te::Request' )
);
Index: trunk/doc/stat_methods.tex
===================================================================
--- trunk/doc/stat_methods.tex (revision 579)
+++ trunk/doc/stat_methods.tex (revision 580)
@@ -1,3788 +1,3790 @@
\documentclass[12pt,titlepage]{article}
\usepackage{fullpage}
\usepackage{amsmath}
\usepackage{makeidx}
\usepackage{epsfig}
\usepackage[hyperfootnotes=false]{hyperref}
\usepackage{longtable}
\usepackage[width=.85\textwidth]{caption}
\newcommand{\sub}[1]{\ensuremath{_{\mbox{\scriptsize \,#1}}}}
\newcommand{\supers}[1]{\ensuremath{^{\mbox{\scriptsize #1}}}}
\newcommand{\cname}[1]{\index{#1}\textsf{#1}}
\newcommand{\E}{\ensuremath{\mathbb{E}}}
\newcommand{\V}{\ensuremath{\mathbb{V}}}
\newcommand{\R}{\ensuremath{\mathbb{R}}}
\newcommand{\etal}{\emph{et al.}}
\newcommand{\hf}{\hat{f}_N}
\newcommand{\xf}{x\sub{fit}}
\newcommand{\scriptname}[1]{\texttt{#1}}
\newcommand{\at}{\tilde{a}\sub{fit}}
\newcommand{\bt}{\tilde{b}\sub{fit}}
\makeindex
\renewcommand\indexname{Functions and Classes}
\newenvironment{thinlist} {
\begin{list} {---} {
\setlength{\topsep}{0.075cm}
\setlength{\parsep}{0.075cm}
\setlength{\itemsep}{0.075cm}
}
} {\end{list}}
\begin{document}
\title{Statistical Methods in the NPStat Package}
\author{I. Volobouev, {\it i.volobouev@ttu.edu}}
\date{Version: 4.9.0 \hspace{1.5cm} Date: \today}
\maketitle
\tableofcontents
\newpage
\section{Preliminaries}
\label{sec:preliminaries}
The NPStat package provides a C++ implementation of
several nonparametric
statistical modeling tools together with
various supporting code. Nonparametric modeling becomes very
useful when there is little prior information available to justify an
assumption that the data belongs to a certain parametric family of
distributions. Even though nonparametric techniques are usually
more computationally intensive than parametric ones,
judicious use of fast algorithms in combination with
increasing power of modern computers often results in very practical
solutions --- at least until the ``curse of dimensionality'' starts
imposing its heavy toll.
It will be assumed that the reader of this note is reasonably
proficient in C++ programming, and that the meaning
of such words as ``class'', ``function'', ``template'',
``method'', ``container'', ``namespace'', {\it etc.}
is obvious from their context.
The latest version of the package code can be downloaded from
\href{http://npstat.hepforge.org/}{http://npstat.hepforge.org/}
Most classes and functions implemented in NPStat are placed in the
``npstat'' namespace. A~few functions and classes that
need Minuit2~\cite{ref:minuit} installation
in order to be meaningfully used belong
to the ``npsi'' namespace. Such functions and
classes can be found in the ``interfaces''
directory of the package.
When a C++ function, class, or template is mentioned in the text
for the first time, the header file which contains its declaration
is often mentioned as well. If the header file is not mentioned,
it is most likely ``npstat/stat/NNNN.hh'', where NNNN stands
for the actual name of the class or function.
An expression ``$I(Q)$'' is frequently employed in the text,
where $Q$ is some logical proposition.
$I(Q)$ is the proposition indicator function
defined as follows: $I(Q) = 1$ if $Q$ is true
and $I(Q) = 0$ if $Q$ is false. For example, a product of $I(0 \le x \le 1)$
with some other mathematical function is often used to
denote some probability density supported
on the $[0, 1]$ interval. Note that $I(\neg Q) = 1 - I(Q)$, where
$\neg Q$ is the logical negation of $Q$.
\section{Data Representation}
\label{sec:datarep}
For data storage, the NPStat software relies on the object serialization
and I/O capabilities provided
by the C++ Geners package~\cite{ref:geners}.
Geners supports persistence of all standard library containers
(vectors, lists, maps, {\it etc.}) including those introduced
in the C++11 Standard~\cite{ref:cpp11} (arrays, tuples, unordered sets
and maps). In addition to data structures supported by Geners, NPStat
offers several other persistent containers useful in statistical
data analysis: multidimensional arrays, homogeneous $n$-tuples, and
histograms.
Persistent multidimensional arrays are implemented with the \cname{ArrayND}
template (header file ``npstat/nm/ArrayND.hh'').
This is a~reasonably full-featured array class, with support for
subscripting (with and without bounds checking), vector space operations
(addition, subtraction,
multiplication by a scalar), certain tensor operations (index contraction,
outer product), slicing, iteration over subranges, linear and cubic
interpolation of array values
to fractional indices, {\it etc.} Arrays whose maximum
number of elements is known at the compile time can be efficiently
created on the stack.
Homogeneous $n$-tuples are two-dimensional tables in which the number of
columns is fixed while the number of rows can grow dynamically.
The stored object type is chosen when the $n$-tuple is created
(it is a template parameter). For
homogeneous $n$-tuples, storage type is the same for every column
which allows for more efficient optimization of memory allocation
in comparison with heterogeneous $n$-tuples.\footnote{The Geners
serialization package supports
several types of persistent heterogeneous $n$-tuples including
std::vector$<$std::tuple$<$...$> >$,
RowPacker variadic template
optimized for accessing large data samples row-by-row,
and ColumnPacker variadic template
optimized for data access column-by-column.}
Two persistent template classes are provided for homogeneous $n$-tuples:
\cname{InMemoryNtuple} for $n$-tuples which can completely fit inside
the computer memory (header file ``npstat/stat/InMemoryNtuple.hh'')
and \cname{ArchivedNtuple} for $n$-tuples
which can grow as large as the available disk space
(header file ``npstat/stat/ArchivedNtuple.hh''). Both of these
classes inherit from the abstract class \cname{AbsNtuple} which
defines all interfaces relevant for statistical analysis of the data
these $n$-tuples contain. Thus \cname{InMemoryNtuple} and \cname{ArchivedNtuple}
are completely interchangeable for use in various data analysis algorithms.
Arbitrary-dimensional\footnote{To be precise,
the number of dimensions for both histograms
and multidimensional arrays can not exceed
31 on 32-bit systems and 63 on 64-bit systems. However, your computer will
probably run out of memory long before this limitation
becomes relevant for your data analysis.} persistent
histograms are implemented with the \cname{HistoND}
template (header file ``npstat/stat/HistoND.hh''). Both uniform and non-uniform
placing of bin edges can be created using
axis classes \cname{HistoAxis} and \cname{NUHistoAxis}, respectively,
as \cname{HistoND} template parameters. The \cname{DualHistoAxis} class
can be employed for histograms in which only a fraction of axes
must have non-uniform bins.
Two binning schemes are supported
for data sample processing.
In the normal scheme, the bin count is incremented if the point coordinates
fall inside the given bin (the \cname{fill} method of the class).
In the centroid-preserving scheme,
all bins in the neighborhood of the sample point
are incremented in such a way that the center of mass of bin
increments coincides exactly with the point location
(the \cname{fillC} method)\footnote{For one-dimensional histograms,
this is called ``linear binning''~\cite{ref:linearbinning}.
NPStat supports this technique for uniformly binned
histograms of arbitrary dimensionality.}.
The objects used as
histogram bin contents and the objects used as
weights added to the bins when
the histogram is filled are not required to have
anything in common; the only requirement is that a~meaningful
binary function (typically, operator+=) can be defined for them.
This level of abstraction allows, for example, for using vectors and tensors
as histogram bin contents or for implementing profile histograms with
the same \cname{HistoND} template. As the bin contents are implemented
with the \cname{ArrayND} class, all features of \cname{ArrayND}
(vector space operations, slicing, interpolation, {\it etc}) are available
for them as well. Overflows are stored in separate arrays with
three cells in each dimension: underflow, overflow, and the central part
covered by the regular histogram bins.
\section{Descriptive Statistics}
\label{sec:descriptivestats}
A number of facilities is included in the NPStat package for calculating
standard descriptive sample statistics such as mean, covariance matrix,
{\it etc}. Relevant functions, classes, and templates are described below.
\subsection{Functions and Classes for Use with Point Clouds}
\cname{arrayStats} (header file ``npstat/stat/arrayStats.hh'') --- This
function estimates the population mean ($\mu$), standard deviation ($\sigma$),
skewness ($s$),
and kurtosis ($k$) for a one-dimensional set of points. A numerically sound
two-pass algorithm is used.
% Internally, calculations are performed in long double precision.
The following formulae are implemented ($N$ is the number of points
in the sample):
\begin{equation}
\mu = \frac{1}{N} \sum_{i=0}^{N-1} x_i
\label{eq:samplemean}
\end{equation}
\begin{equation}
\sigma = \sqrt{\frac{1}{N - 1} \sum_{i=0}^{N-1} (x_i - \mu)^2}
\label{eq:samplestdev}
\end{equation}
\begin{equation}
s = \frac{N}{(N - 1) (N - 2) \sigma^3} \sum_{i=0}^{N-1} (x_i - \mu)^3
\end{equation}
\begin{equation}
g_2 = N \frac{\sum_{i=0}^{N-1} (x_i - \mu)^4}{\left( \sum_{i=0}^{N-1} (x_i - \mu)^2 \right)^2} - 3
\label{eq:kurtosisg2}
\end{equation}
\begin{equation}
k = \frac{N - 1}{(N - 2) (N - 3)} ((N + 1) g_2 + 6) + 3
\label{eq:kurtosis}
\end{equation}
Note that the population kurtosis estimate is defined in such a way that its
expectation value for a sample drawn from the Gaussian distribution equals
3 rather than 0. For more details on the origin of these formulae
consult Ref.~\cite{ref:sampleskewkurt}.
\cname{empiricalCdf} (header file ``npstat/stat/StatUtils.hh'') --- This
function evaluates the empirical cumulative distribution function (ECDF) for
a sample of points. The points must be sorted
in the increasing order by the user before
the function call is made.
The ECDF is defined as follows. Suppose, the $N$ sorted point values are
$\{x_0, x_1, ..., x_{N-1}\}$ and all $x_i$ are distinct ({\it i.e.,} there
are no ties). Then
\begin{equation}
\mbox{ECDF}(x) = \left\{ \begin{array}{ll}
0 & \mbox{if\ } x \le x_0, \\
\frac{1}{N} \left( \frac{x-x_i}{x_{i+1}-x_i}+i+1/2 \right) & \mbox{if\ } x_i < x \le x_{i+1} \ \mbox{and} \ x < x_{N-1}, \\
1 & \mbox{if\ } x \ge x_{N-1}.
\end{array}
\right.
\label{eq:ecdf}
\end{equation}
When all sample points are distinct, $\mbox{ECDF}(x)$ is discontinuous
at $x = x_0$ and $x = x_{N-1}$ and continuous for all other $x$.
If the sample has more than one point
with the same $x$, let say $x_k$ and
$x_{k+1}$ where $k > 0$ and $k < N - 2$, $x_k$ also becomes
a point of discontinuity.
$\mbox{ECDF}(x_k)$ will be set to $\frac{1}{N} (k + 1/2)$, the value
coincident with the function left limit.
\cname{empiricalQuantile} (header file ``npstat/stat/StatUtils.hh'') --- This
is the empirical quantile function, $\mbox{EQF}(y)$, which provides
an inverse to $\mbox{ECDF}(x)$. For any $x$ from the interval
$[x_0, x_{N-1}]$, $\mbox{EQF}(\mbox{ECDF}(x)) = x$ (up to numerical
round-off errors). If $x < x_0$ then $\mbox{EQF}(\mbox{ECDF}(x)) = x_0$ and if
$x > x_{N-1}$ then $\mbox{EQF}(\mbox{ECDF}(x)) = x_{N-1}$.
\cname{MultivariateSumAccumulator} and \cname{MultivariateSumsqAccumulator}
--- These classes are intended for calculating means and covariance matrices
for multivariate data samples in a~numerically stable manner. The
classes can be
used inside user-implemented cycles over sets of points or with
\cname{cycleOverRows} and other similar methods of the \cname{AbsNtuple}
class. Two passes over the data
are necessary for calculating the covariance matrix,
first with
\cname{MultivariateSumAccumulator} and second with
\cname{MultivariateSumsqAccumulator}. The multivariate
mean is initially found from
\begin{equation}
\overline{{\bf x}} = \frac{1}{N} \sum_{i=0}^{N-1} {\bf x}_i
\end{equation}
and then an estimate of the population covariance matrix is calculated as
\begin{equation}
V = \frac{1}{N - 1} \
\sum_{i=0}^{N-1} ({\bf x}_i - \overline{{\bf x}}) ({\bf x}_i - \overline{{\bf x}})^T
\label{eq:covar}
\end{equation}
\cname{MultivariateWeightedSumAccumulator} and
\cname{MultivariateWeightedSumsqAccumulator} --- These classes are
intended for calculating means and covariance matrices
for multivariate data samples in which points enter with non-negative
weights:
\begin{equation}
\overline{{\bf x}} = \frac{\sum_{i=0}^{N-1} w_i {\bf x}_i}{\sum_{i=0}^{N-1} w_i},
\label{eq:weightedmean}
\end{equation}
\begin{equation}
V = \frac{N_{eff}}{N_{eff} - 1} \
\frac{\sum_{i=0}^{N-1} w_i ({\bf x}_i - \overline{\bf x}) ({\bf x}_i - \overline{\bf x})^T}{\sum_{i=0}^{N-1} w_i},
\label{eq:weightedcov}
\end{equation}
where $N_{eff}$ is the effective number of entries in a weighted
sample\footnote{$N_{eff}$ is also known as the Kish's effective sample size. Note that this
estimate of $V$ makes sense only if the weights and point locations are statistically independent
from each other. If the weights are functions of the observed ${\bf x}_i$ then there
appears to be no general estimate that is independent of the weighting function.}:
\begin{equation}
N_{eff} = \frac{\left( \sum_{i=0}^{N-1} w_i \right)^2}{\sum_{i=0}^{N-1} w_i^2}.
\label{eq:neff}
\end{equation}
Two passes over the data are required, first with
\cname{MultivariateWeightedSumAccumulator} and then with
\cname{MultivariateWeightedSumsqAccumulator}.
\cname{StatAccumulator} --- Persistent class which determines
the minimum, the maximum, the mean, and the standard
deviation of a~sample of values using a single-pass algorithm.
It can be used when the two-pass calculation
is either impossible or impractical for
some reason. This class can be conveniently employed as
a bin content type for \cname{HistoND} (this turns \cname{HistoND}
into a profile histogram).
\cname{StatAccumulator}
maintains a running average which is updated
after accumulating $2^K$ points, $K = 0, 1, 2, ...$ The
numerical precision of the standard deviation determined
in this manner is not as good as in the two-pass method but it
is usually much better than that expected
from a~naive implementation which simply accumulates sample moments about 0
and then evaluates $\sigma^2$ according to
$\frac{N}{N - 1} (\overline{{\bf x}^2} - {\overline{\bf x}}^2)$ (this formula
can suffer from a severe subtractive cancellation problem).
\cname{WeightedStatAccumulator} --- Similar class for calculating
mean and standard deviation of a~sample of weighted values using
a single-pass algorithm.
\cname{StatAccumulatorArr} --- This class provides functionality similar
to an array of \cname{StatAccumulator} objects but with a more convenient
interface for accumulating sample values (cycling over the input values
is performed by the class itself).
\cname{StatAccumulatorPair} --- Two \cname{StatAccumulator} objects
augmented by the sum of cross terms which allows for subsequent
calculation of covariance. The covariance is estimated using
a formula which can potentially suffer from subtractive
cancellation\footnote{This problem is partially alleviated
by using long double type for internal calculations.}.
Therefore, this class should not be used to process large samples of points in
which, for one of the variables or for both of them,
the absolute value of the mean can be much larger than the
standard deviation. This class is persistent.
\cname{CrossCovarianceAccumulator} --- To accumulate the sums for
covariance matrix calculation according to formula~\ref{eq:covar}
in $d$-dimensional space,
one has to keep track of $d (d + 1)/2$ sums of squares and cross terms.
Sometimes only a small part of the complete covariance matrix is of interest.
In this case $d$ can often be split into subspaces of dimensionalities
$d_1$ and $d_2$, $d = d_1 + d_2$, so that the interesting part of
covariance matrix is dimensioned $d_1 \times d_2$ (plus $d$ diagonal terms).
For large values
of $d$, accumulating $d + d_1 \times d_2$ sums instead of $d (d + 1)/2$
can mean big difference in speed and memory consumption, especially
in case $d_1 \ll d_2$ (or $d_1 \gg d_2$).
The \cname{CrossCovarianceAccumulator} allows
its users to do just that: it accumulates cross terms between two
arrays with $d_1$ and $d_2$ elements but not the cross terms between
elements of the same array. The covariances are estimated by a single-pass
method using a~formula which can suffer from subtractive
cancellation\footnotemark[5]. Use with care.
\cname{SampleAccumulator} --- This class stores all values
it gets in an internal buffer. Whenever mean, standard deviation,
or some quantile function values are needed for the accumulated sample,
they are calculated from the stored values using numerically sound techniques.
Mean and standard deviation are calculated according to
Eqs.~\ref{eq:samplemean} and~\ref{eq:samplestdev}.
Covariance and correlations can be calculated for a pair of
\cname{SampleAccumulator} objects with the same number of stored elements
by corresponding methods of the class.
Empirical CDF and empirical quantile calculations
are handled by calling \cname{empiricalCdf} and \cname{empiricalQuantile}
functions internally, preceded by data sorting.
This class is persistent and can be used as the bin content template
parameter for \cname{HistoND}.
\cname{WeightedSampleAccumulator} --- Similar to SampleAccumulator,
but intended for calculating various statistics for samples
of weighted points.
Kendall's $\tau$ and Spearman's $\rho$
rank correlation coefficients can be estimated
by functions \cname{sampleKendallsTau} and \cname{sampleSpearmansRho},
respectively. These functions are declared in the header files
``npstat/stat/kendallsTau.hh'' and ``npstat/stat/spearmansRho.hh'',
respectively.
\subsection{Functions for Use with Binned Data}
\cname{histoMean} (header file ``npstat/stat/histoStats.hh'') --- This
function calculates the histogram center of mass according to
Eq.~\ref{eq:weightedmean} with bin contents
used as weights for bin center coordinates.
Note that the code of this function does not enforce non-negativity of all bin
values, and the result of~\ref{eq:weightedmean} will not necessarily
make a lot of sense in case negative weights are present.
In order to check that all bins are non-negative
and that there is at least one positive bin, you can call the \cname{isDensity}
method of the \cname{ArrayND} class on the histogram
bin contents. The \cname{histoMean}
function is standalone and not a member of the \cname{HistoND} class
because Eq.~\ref{eq:weightedmean} is not going to make sense for all
possible bin types.
\cname{histoCovariance} (header file ``npstat/stat/histoStats.hh'') --- This
function estimates the population covariance matrix for histogrammed data
according to Eq.~\ref{eq:weightedcov}, with bin contents
used as weights for bin center coordinates.
As for \cname{histoMean}, results
produced by \cname{histoCovariance}
will not make much sense in case negative bin values are present.
\cname{arrayCoordMean} (header file ``npstat/stat/arrayStats.hh'') --- Similar
to \cname{histoMean} but the bin values are provided in an \cname{ArrayND}
object and bin locations are specified by additional arguments.
Only uniform bin spacing is supported by this function
(unlike \cname{histoMean} which simply gets its bin centers from
the argument histogram).
\cname{arrayCoordCovariance} (header file ``npstat/stat/arrayStats.hh'')
--- Similar to \cname{histoCovariance} but the bin values are provided
in an \cname{ArrayND} object and bin locations are specified by additional
arguments. Uniform binning only.
\cname{arrayShape1D} (header file ``npstat/stat/arrayStats.hh'') --- Estimates
population mean, standard deviation, skewness, and kurtosis for
one-dimensional histogrammed data with equidistant bins.
Bin values $b_i$ used as weights are provided in an \cname{ArrayND}
object and bin locations $x_i$ are specified by additional arguments.
The mean and the standard deviations squared
are calculated according to Eqs.~\ref{eq:weightedmean}
and~\ref{eq:weightedcov} reduced to one dimension.
The skewness is estimated as
\begin{equation}
s = \frac{N_{eff}^2}{(N_{eff} - 1) (N_{eff} - 2) \sigma^3} \
\frac{\sum_{i=0}^{N_b-1} b_i (x_i - \mu)^3}{\sum_{i=0}^{N_b-1} b_i},
\end{equation}
with $N_{eff}$ defined by Eq.~\ref{eq:neff}. The kurtosis is found from Eq.~\ref{eq:kurtosis} in which $N$
is replaced by $N_{eff}$ and, instead of Eq.~\ref{eq:kurtosisg2},
$g_2$ is determined from
\begin{equation}
g_2 = \sum_{i=0}^{N_b-1} b_i \ \frac{\sum_{i=0}^{N_b-1} b_i (x_i - \mu)^4}{\left( \sum_{i=0}^{N_b-1} b_i (x_i - \mu)^2 \right)^2} - 3.
\end{equation}
\cname{arrayQuantiles1D} (header file ``npstat/stat/arrayStats.hh'')
--- This function treats
one-dimensional arrays as histograms and determines the values
of the quantile function for a~given set of cumulative density values
(after normalizing the histogram area to 1). Each bin is considered
to have uniform probability density between its edges. If you need
to perform this type of calculation more than once with the same
array, it will be more efficient to construct a \cname{BinnedDensity1D}
object and then to use its \cname{quantile} method instead of calling
the \cname{arrayQuantiles1D} function multiple times.
\cname{arrayEntropy} (header file ``npstat/stat/arrayStats.hh'')
--- This function calculates
\begin{equation}
\label{eq:entropy}
H = -\frac{1}{N_b} \sum_{i=0}^{N_b-1} b_i \ln b_i
\end{equation}
This formula is appropriate for arrays that tabulate probability density
values on uniform grids. To convert the result into the actual entropy
of the density, multiply it by the volume of the distribution support.
\subsection{ArrayND Projections}
The following set of classes works with \cname{ArrayND} class
and assists in making array ``projections''.
Projections reduce array dimensionality
by calculating certain array properties over projected indices.
For example, one might want to create a one-dimensional array, $a$,
from a three-dimensional array, $b$, by summing all array elements
with the given second index: $a_j = \sum_{i, k} b_{ijk}$ (this means
that the first and the third indices of the array are ``projected''). This
and other similar operations can be performed by the \cname{project}
method of the \cname{ArrayND} class. What is done with the
array elements when the projection is performed can be specified
by the ``projector'' argument of the \cname{project} method. In particular,
this argument can be an object which performs some statistical
analysis of the projected elements.
\cname{ArrayMaxProjector} (header file ``npstat/stat/ArrayProjectors.hh'')
--- For each value of the array indices which are not projected,
finds the maximum array element for all possible values of
projected indices.
\cname{ArrayMinProjector} (header file ``npstat/stat/ArrayProjectors.hh'')
--- Finds the minimum array element for all possible values of
projected indices.
\cname{ArraySumProjector} (header file ``npstat/stat/ArrayProjectors.hh'')
--- Sums all array values in the iteration over projected indices.
\cname{ArrayMeanProjector} (header file ``npstat/stat/ArrayProjectors.hh'')
--- Calculates the mean array value over projected indices.
\cname{ArrayMedianProjector} (header file ``npstat/stat/ArrayProjectors.hh'')
--- Calculates the median array value over projected indices.
\cname{ArrayStdevProjector} (header file ``npstat/stat/ArrayProjectors.hh'')
--- Calculates the standard deviation of array values encountered
during the iteration over projected indices.
\cname{ArrayRangeProjector} (header file ``npstat/stat/ArrayProjectors.hh'')
--- Calculates the ``range'' of array values over projected indices.
``Range'' is defined here as the difference between 75\supers{th} and
25\supers{th} percentiles of the sample values divided by the
distance between 75\supers{th} and 25\supers{th} percentiles of
the Gaussian distribution with $\sigma = 1$. Therefore, ``range'' can
be used as a robust estimate of the standard deviation.
All ``projectors'' are derived either from \cname{AbsArrayProjector} class
in case the calculated quantity depends on the array indices
(header file ``npstat/nm/AbsArrayProjector.hh'')
or from \cname{AbsVisitor} class in case it is sufficient
to know just the element values (header file ``npstat/nm/AbsVisitor.hh'').
Naturally, the user
can supply his/her own projector implementations derived
from these base classes for use
with the the \cname{project} method of the \cname{ArrayND} class.
\section{Statistical Distributions}
\label{sec:distros}
A number of univariate and multivariate statistical distributions
is supported by the NPStat package. All implementations of
univariate continuous distributions share a common interface and can be
used interchangeably in a variety of statistical algorithms.
A similar approach has been adopted for univariate discrete and
multivariate continuous distributions.
\subsection{Univariate Continuous Distributions}
All classes which represent univariate continuous distributions
inherit from the \cname{AbsDistribution1D} abstract base class. These classes
must implement methods \cname{density} (probability density function),
\cname{cdf} (cumulative distribution function), \cname{exceedance}
(exceedance probability is just 1 $-$ cdf, but direct application
of this formula is often
unacceptable in numerical calculations due to subtractive cancellation),
and \cname{quantile} (the quantile function, inverse of cdf). For
a large number of statistical distributions, the probability density
function looks like $\frac{1}{\sigma}\, p\left( \frac{x - \mu}{\sigma} \right)$, where
$\mu$ and $\sigma$ are the distribution location and scale parameters, respectively.
Distributions of this kind should inherit from the base class
\cname{AbsScalableDistribution1D} which handles proper
scaling and shifting of the argument (this base class itself inherits
from \cname{AbsDistribution1D}, and it is declared in the same
header file ``npstat/stat/AbsDistribution1D.hh'').
Some of the standard univariate continuous distributions implemented
in NPStat are listed in Table~\ref{table:distros1d}. A few special
distributions less frequently encountered in the statistical
literature are described in more detail in the following subsections.
Of course, user-developed classes inheriting from \cname{AbsDistribution1D}
or \cname{AbsScalableDistribution1D} can also be employed with all
NPStat functions and classes that take an instance of
\cname{AbsDistribution1D} as one of parameters.
\begin{longtable}{|c|c|c|}
% \captionsetup{width=.9\textwidth}
\caption{Continuous univariate distributions included in NPStat.
$P_n(x)$ are the Legendre polynomials.
Parameters $\mu$ and $\sigma$ are not shown for scalable distributions.
When not given explicitly, the normalization constant $\cal{N}$
ensures that
$\int_{-\infty}^{\infty} p(x) dx = 1$. Most of the classes
listed in this table are declared in the header file
``npstat/stat/Distributions1D.hh''. If the distribution
is not declared in that header then it has a dedicated header
with the same name, {\it e.g.}, ``npstat/stat/TruncatedDistribution1D.hh''
for \cname{TruncatedDistribution1D}.}
\label{table:distros1d} \\
\hline \multicolumn{1}{|c|}{Class Name} &
\multicolumn{1}{c|}{$p(x)$} &
\multicolumn{1}{c|}{Scalable?} \\ \hline \hline
\endfirsthead
\multicolumn{3}{c}%
{\tablename\ \thetable{} --- continued from the previous page} \\
\hline \multicolumn{1}{|c|}{Class Name} &
\multicolumn{1}{c|}{$p(x)$} &
\multicolumn{1}{c|}{Scalable?} \\ \hline \hline
\endhead
\multicolumn{3}{|r|}{{Continued on the next page}} \\ \hline
\endfoot
\hline
\endlastfoot
\cname{Uniform1D} & $I(0 \le x \le 1)$ & yes \\ \hline
+\cname{IsoscelesTriangle1D} & $(1 - |x|) I(-1 \le x \le 1)$ & yes \\ \hline
\cname{Exponential1D} & $e^{-x} I(x \ge 0)$ & yes \\ \hline
\cname{Quadratic1D} & $(1 + a P_1(2 x - 1) + b P_2(2 x - 1)) \,I(0 \le x \le 1)$ & yes \\ \hline
\cname{LogQuadratic1D} & ${\cal{N}} \exp (a P_1(2 x - 1) + b P_2(2 x - 1)) \,I(0 \le x \le 1)$ & yes \\ \hline
\cname{Gauss1D} & $\frac{1}{\sqrt{2 \pi}} e^{-x^2/2}$ & yes \\ \hline
\cname{GaussianMixture1D} & $\frac{1}{\sqrt{2 \pi}} \sum_i \frac{w_i}{\sigma_i} e^{-(x-\mu_i)^2/(2 \sigma_i^2)}, \ \mbox{where} \ \sum_i w_i = 1$ & yes \\ \hline
\cname{TruncatedGauss1D} & $\frac{\cal{N}}{\sqrt{2 \pi}} e^{-x^2/2} \, I(-n_{\sigma} \le x \le n_{\sigma})$ & yes \\ \hline
\cname{MirroredGauss1D} & \begin{minipage}{0.52\linewidth}
\vskip-3mm \begin{eqnarray*}
I(0 \le x \le 1)\, \frac{1}{\sqrt{2 \pi} s} \sum_{i=-\infty}^{\infty} &
\left[ e^{-(x - m + 2\,i)^2/(2 s^2)} + \right.\\
& \left.e^{-(x + m + 2\,i)^2/(2 s^2)}\right],
\end{eqnarray*} \vskip-1mm
where $0 \le m \le 1$ and $s > 0$ \newline \vskip-8mm $\mbox{}$ \end{minipage} & yes \\ \hline
\cname{BifurcatedGauss1D} & \begin{minipage}{0.52\linewidth}
\vskip-2mm \begin{eqnarray*}
{\cal{N}} & \left[e^{-x^2/(2 \alpha)} I(-n_{\sigma,L} \le x < 0) \, + \right.\\
& \left. e^{-x^2/(2 (1 - \alpha))} I(0 \le x < n_{\sigma,R})\right]
\end{eqnarray*} \vskip0mm \end{minipage} & yes \\ \hline
\cname{UGaussConvolution1D} & $\frac{1}{\sqrt{2 \pi} (b - a)} \, e^{-x^2/2} * I(a \le x \le b)$ & yes \\ \hline
\cname{SymmetricBeta1D} & ${\cal{N}} \, (1 - x^2)^p \, I(-1 < x < 1)$ & yes \\ \hline
\cname{Beta1D} & ${\cal{N}} \, x^{\alpha - 1} (1 - x)^{\beta - 1} \, I(0 < x < 1)$ & yes \\ \hline
\cname{Gamma1D} & ${\cal{N}} \, x^{\alpha - 1} e^{-x} \, I(x > 0)$ & yes \\ \hline
\cname{Pareto1D} & $\alpha x^{-\alpha - 1} I(x \ge 1)$ & yes \\ \hline
\cname{Huber1D} & ${\cal{N}} \, [e^{-x^2/2} I(|x| \le a) + e^{\, a (a/2 - |x|)} (1 - I(|x| \le a))] $ & yes \\ \hline
\cname{Cauchy1D} & $\pi^{-1} (1 + x^2)^{-1}$ & yes \\ \hline
\cname{StudentsT1D} & ${\cal{N}} \, (1 + x^2/N_{dof})^{-(N_{dof} + 1)/2}$ & yes \\ \hline
\cname{Moyal1D} & $\frac{1}{\sqrt{2 \pi}} e^{-(x + e^{-x})/2}$ & yes \\ \hline
+\cname{Logistic1D} & $e^{-x} \,(1 + e^{-x})^{-2}$ & yes \\ \hline
\cname{Tabulated1D} & \begin{minipage}{0.52\linewidth}
\vskip1mm
Defined by a table of equidistant values on
the [0, 1] interval, interpolated by
a~polynomial (up to cubic). The first table
point is at $x = 0$ and the last is at $x = 1$. \newline \vskip-8mm $\mbox{}$
% Normalization is computed automatically.
\end{minipage} & yes \\ \hline
\cname{BinnedDensity1D} & \begin{minipage}{0.52\linewidth}
\vskip1mm
Defined by a table of $N$ equidistant values
on the [0, 1] interval, with optional linear
interpolation. The first table point is at
$x = 1/(2 N)$ and the last is at $x = 1 - 1/(2 N)$.
Useful for converting 1-d histograms into distributions.
\end{minipage} & yes \\ \hline
\cname{QuantileTable1D} & \begin{minipage}{0.52\linewidth}
\vskip1mm
Defined by a table of $N$ equidistant quantile function values
on the [0, 1] interval with linear interpolation between these values
(so that density looks like a histogram with equal area bins).
The first table point is at
$x = 1/(2 N)$ and the last is at $x = 1 - 1/(2 N)$.
Useful for converting data samples into distributions
by sampling empirical quantiles.
\end{minipage} & yes \\ \hline
\cname{DistributionMix1D} & $\sum_i w_i p_i(x), \ \mbox{where} \ \sum_i w_i = 1$ & no \\ \hline
\cname{RatioOfNormals} & \begin{minipage}{0.52\linewidth}
\vskip1mm
Ratio of two correlated normal random variables, as described in~\cite{ref:ratioofnormals}.
\end{minipage} & no \\ \hline
\cname{LeftCensoredDistribution} & $f \, p\sub{other}(x) + (1 - f) \, \delta(x - x_{-\infty})$ & no \\ \hline
\cname{RightCensoredDistribution} & $f \, p\sub{other}(x) + (1 - f) \, \delta(x - x_{+\infty})$ & no \\ \hline
\cname{TruncatedDistribution1D} & ${\cal{N}} \, p\sub{other}(x) \, I(x\sub{min} \le x \le x\sub{max})$ & no \\ \hline
\cname{TransformedDistribution1D} & $p\sub{other}(y) \left| \frac{dy}{dx} \right|$ for monotonous $y(x)$ & no \\
\end{longtable}
\subsection{Johnson Curves}
The density functions of the Johnson
distributions~\cite{ref:johnson, ref:johnsonbook, ref:hahnshap}
are defined as follows:
\begin{equation}
S_{U}\ \mbox{(unbounded)}:\ \ p(x) = \frac{\delta}{\lambda \sqrt{2 \pi \left(1 +
\left(\frac{x - \xi}{\lambda}\right)^{2}\right)}} \,e^{-\frac{1}{2}
\left(\gamma + \delta\,\mbox{\scriptsize sinh}^{-1}\left(\frac{x-\xi}
{\lambda}\right)\right)^{2}}
\label{eq:johnsu}
\end{equation}
\begin{equation}
S_{B}\ \mbox{(bounded)}:\ \ \ p(x) = \frac{\delta}{\lambda \sqrt{2 \pi}
\left(\frac{x - \xi}{\lambda}\right)\left(1 - \frac{x - \xi}{\lambda}\right)}
\,e^{-\frac{1}{2}\left( \gamma + \delta \log \left( \frac{x-\xi}
{\xi + \lambda - x}\right) \right)^{2}} I(\xi < x < \xi + \lambda)
\label{eq:johnsb}
\end{equation}
They are related to the normal distribution, $N(\mu, \sigma)$,
by simple variable
transformations. Variable $z$ distributed according to $N(0, 1)$
can be
obtained from Johnson's variates $x$ by transformation
$z = \gamma + \delta f\left(\frac{x-\xi}{\lambda}\right)$:
\begin{displaymath}
\begin{array}{rlll}
f(y) = & \mbox{sinh}^{-1}(y) & & S_{U}\ \mbox{curves}\\
f(y) = & \log \left(\frac{y}{1-y}\right) & (0 < y < 1) & S_{B}\ \mbox{curves}
\end{array}
\end{displaymath}
Both densities become arbitrarily close to the lognormal density (for which
variable $z = \gamma + \delta \log (x - \xi)$ is normally distributed)
in the limit $\gamma \rightarrow \infty$
and to the normal distribution in the limit $\delta \rightarrow \infty$
($\xi$ and $\lambda$ have to be adjusted accordingly). Johnson's
parameterization of the lognormal density is
\begin{equation}
p(x) = \frac{\delta}{\sqrt{2 \pi} (x - \xi)} e^{-\frac{1}{2} (\gamma + \delta \log (x - \xi))^2} I(x > \xi)
\end{equation}
Together with their limiting cases, Johnson's $S_{U}$ and $S_{B}$
distributions attain {\it all possible values of skewness and kurtosis.}
Unfortunately, parameters $\xi, \lambda, \gamma$, and $\delta$
of $S_{U}$ and $S_{B}$ in
Eqs.~\ref{eq:johnsu} and~\ref{eq:johnsb} have no direct
relation to each other.
Crossing the lognormal boundary requires a discontinuous change in
the parameter values which is
rather inconvenient for practical data fitting purposes. This
problem can be alleviated by reparameterizing the functions
in terms of mean $\mu$, standard deviation $\sigma$,
skewness $s$, and kurtosis $k$, so that the corresponding
curve type and the original parameters $\xi, \lambda, \gamma, \delta$
are determined numerically. Examples of Johnson's density functions
are shown in Fig~\ref{johns_exa}.
\begin{figure}[t]
\begin{center}
\includegraphics[width=0.6\textwidth]{johnson_examples.pdf}
\caption{ Black: $s = 0, ~k = 3$, Gaussian. Red: $s = -1.5, ~k = 10$, $S_{U}$.
Blue: $s = 0.5, ~ k = 2$, $S_{B}$.
For all three curves, the mean is 0 and the standard deviation is 1.}
\label{johns_exa}
\end{center}
\end{figure}
Johnson's $S_{U}$ and $S_{B}$ curves are implemented in NPStat with classes
\cname{JohnsonSu} and \cname{JohnsonSb}, respectively (header file
``npstat/stat/JohnsonCurves.hh''). Both of these
distributions are parameterized by $\mu$, $\sigma$, $s$, and $k$,
with an automatic internal conversion into $\xi, \lambda, \gamma, \delta$.
An original algorithm was developed to perform this conversion,
based in part on ideas from Refs.~\cite{ref:draper, ref:hill}.
The lognormal distribution parameterized by $\mu$, $\sigma$, and $s$
is implemented by the \cname{LogNormal} class (header file
``npstat/stat/Distributions1D.hh''). The class \cname{JohnsonSystem}
(header file ``npstat/stat/JohnsonCurves.hh'') can be used when automatic
switching between $S_{U}$, $S_{B}$,
lognormal, and Gaussian distributions is desired.
\subsection{Composite Distributions}
Composite distributions are built out of two or more
component distributions. One of these component distributions is
arbitrary while
all others must have a density supported on the interval
[0, 1]. Suppose, $G_k(x), \ k = 1, 2, 3, ...$ are cumulative
distribution functions with corresponding densities $g_i(x)$
proportional to $I(0 \le x \le 1)$.
Then, if $H(x)$ is a cumulative distribution function
with density $h(x)$, $F_1(x) = G_1(H(x))$ is also a cumulative
distribution function with density $f_1(x) = h(x) \, g_1(H(x))$.
Similarly, $F_2(x) = G_2(F_1(x))$ is a cumulative distribution with density
$f_2(x) = f_1(x) g_2(F_1(x)) = h(x) \, g_1(H(x)) g_2(G_1(H(x)))$.
We can now construct $F_3(x) = G_3(F_2(x))$ and so on. This sequence can
be terminated after an arbitrary number of steps.
Note that $f_1(x) = h(x)$ in case $g_1(x)$ is a uniform
probability density. Small deviations from uniformity in $g_1(x)$
will lead to corresponding small changes in $f_1(x)$.
The resulting density model can be quite flexible, even if
the component distributions $H(x)$ and $G_1(x)$ are
simple. Therefore, data samples with complicated sets
of features can be modeled as follows: construct
an approximate model $h(x)$ first, even if it does not fit
the sample quite right.
Then transform the data points $x_i$ to the [0, 1] interval
according to $y_i = H(x_i)$. The density of
points $y_i$\footnote{This density is called ``relative density''
in the statistical literature~\cite{ref:handcock}. Note
that $h(x)$ should be selected in such a way that the ratio between
the unknown population density of the sample under study and $h(x)$ should
be bounded for all $x$. If it is not, the relative density
will usually be unbounded, and its subsequent representation
by polynomials or log-polynomials will not lead to a consistent
estimate. Johnson curves often work reasonably well as $h(x)$.}
can now be
fitted to another parametric distribution (including composite one)
or it can be modeled by nonparametric techniques~\cite{ref:kdetransform}.
Due to the elimination of the boundary bias, the LOrPE density
estimation method described in Section~\ref{sec:lorpe}
becomes especially useful in the latter approach.
Once the appropriate $G(y)$ is constructed (parametrically
or not), the resulting composite density will provide
a good fit to the original set of points $x_i$\footnote{There is,
of course, a close connection between this density modeling
approach and a number of goodness-of-fit techniques based on
the comparison of the empirical cdf
with the cdf of the fitted density. For example, using
Legendre polynomials to model $\log (g_1(x))$, as in the
\cname{LogQuadratic1D} density, directly
improves the test criterion used in the Neyman's
smooth test for goodness-of-fit~\cite{ref:thas, ref:smoothtest}.}.
The composite distributions are implemented in NPStat
with the \cname{CompositeDistribution1D} class.
One composite distribution can be used to construct
another, thus allowing for composition chains of
arbitrary length, as described at the beginning of
this subsection. Several pre-built distributions of this type
are included in the NPStat package (they are declared in the header file
``npstat/stat/CompositeDistros1D.hh''). Flexible
models potentially capable of fitting wide varieties
of univariate data samples are implemented by the \cname{JohnsonLadder}
and \cname{BinnedCompositeJohnson} classes. In both of these,
Johnson curves are used as $H(x)$. \cname{JohnsonLadder}
takes an arbitrarily long sequence of parametric \cname{LogQuadratic1D}
distributions for $G_k(x)$ while \cname{BinnedCompositeJohnson}
is using a single nonparametric \cname{BinnedDensity1D} as $G_1(x)$.
\subsection{Univariate Discrete Distributions}
Classes which represent univariate discrete distributions
inherit from the \cname{AbsDiscreteDistribution1D} abstract base class.
The interface defined by this base class differs in a number of ways
from the \cname{AbsDistribution1D} interface.
Instead of the method \cname{density}
used with arguments of type ``double'',
discrete distributions have the method \cname{probability} defined for
the arguments of type ``long int''.
The methods \cname{cdf} and \cname{exceedance}
have the same signatures as corresponding methods of continuous distributions,
but the \cname{quantile} function is returning long integers. The
method \cname{random} generates long integers as well.
Discrete univariate distributions which can be trivially shifted should
inherit from the \cname{ShiftableDiscreteDistribution1D} base class which
handles the shift operation. There is, however, no operation analogous
to scaling of continuous distributions.
Univariate discrete distributions implemented
in NPStat are listed in Table~\ref{table:discrdistros1d}.
\begin{table}[ht!]
\caption{Discrete univariate distributions included in NPStat.
The location parameter is not shown explicitly for shiftable distributions.
Classes listed in this table are declared in the header file
``npstat/stat/DiscreteDistributions1D.hh''.}
\label{table:discrdistros1d}
\begin{center}
\noindent\begin{tabular}{|c|c|c|} \hline
Class Name & $p(n)$ & Shiftable? \\ \hline\hline
\cname{DiscreteTabulated1D} & \begin{minipage}{0.52\linewidth}
\vskip1mm
Defined by a table of probability values.
Normalization is computed automatically.
\end{minipage} & yes \\ \hline
\cname{Poisson1D} & $\frac{\lambda^n}{n!} e^{-\lambda}$ & no \\ \hline
\end{tabular}
\end{center}
\end{table}
Mixtures of discrete univariate distributions with finite support
can be implemented using the function \cname{pooledDiscreteTabulated1D}
(header file ``npstat/stat/DiscreteDistributions1D.hh'').
\subsection{Multivariate Continuous Distributions}
All classes which represent multivariate continuous distributions
inherit from the \cname{AbsDistributionND} abstract base class.
These classes must implement methods \cname{density} (probability
density function) and \cname{unitMap} (mapping from the unit
$d$-dimensional cube, $U_d$, into the density support region).
Densities that can be shifted and scaled in each coordinate separately
should be derived from the \cname{AbsScalableDistributionND} base class
(which itself inherits from \cname{AbsDistributionND}).
Classes representing densities that look like $p({\bf x}) = \prod q(x_i)$,
where $q(x)$ is some one-dimensional density,
should be derived from the \cname{HomogeneousProductDistroND} base class
(the three base classes just mentioned
are declared in the ``npstat/stat/AbsDistributionND.hh'' header file).
Simple classes inheriting from \cname{AbsDistributionND}
are listed in Table~\ref{table:distrosnd}.
\begin{table}[ht!]
\caption{Continuous multivariate distributions included in NPStat.
These distributions are predominantly intended for use as multivariate
density estimation kernels.
Shifts and scale factors are not shown for scalable distributions.
Here, ``scalability'' means
the ability to adjust the shift and scale parameters in each
dimension, not the complete bandwidth matrix.
When not given explicitly, the normalization constant $\cal{N}$
ensures that
$\int p({\bf x}) d{\bf x} = 1$. All of these distributions
are declared in the header file ``npstat/stat/DistributionsND.hh''
with exception of \cname{ScalableGaussND} which has its own header.}
\label{table:distrosnd}
\begin{center}
\noindent\begin{tabular}{|c|c|c|} \hline
Class Name & $p({\bf x})$ & Scalable? \\ \hline\hline
\cname{ProductDistributionND} & $\prod_{i=1}^{d} p_i(x_i)$ &
\begin{minipage}{0.13\linewidth}
depends on components
\end{minipage} \\ \hline
\cname{UniformND} & $\prod_{i=1}^{d} I(0 \le x_i \le 1)$ & yes \\ \hline
\cname{ScalableGaussND} & $(2 \pi)^{-d/2} e^{-|{\bf x}|^2/2}$ & yes \\ \hline
\cname{ProductSymmetricBetaND} & ${\cal{N}} \prod_{i=1}^{d} (1 - x_i^2)^p I(-1 < x_i < 1)$ & yes \\ \hline
\cname{ScalableSymmetricBetaND} & ${\cal{N}} (1 - |{\bf x}|^2)^p I(|{\bf x}| < 1)$ & yes \\ \hline
\cname{ScalableHuberND} & ${\cal{N}} \, [e^{-|{\bf x}|^2/2} I(|{\bf x}| \le a) + e^{\, a (a/2 - |{\bf x}|)} (1 - I(|{\bf x}| \le a))]$ & yes \\ \hline
\cname{RadialProfileND} & \begin{minipage}{0.52\linewidth}
\vskip1mm
Arbitrary centrally-symmetric density.
Defined by its radial profile:
a table of equidistant values on
the [0, 1] interval, interpolated by
a~polynomial (up to cubic). The first table
point is at $|{\bf x}| = 0$ and the last is at
$|{\bf x}| = 1$. For $|{\bf x}| > 1$ the density is 0.
Normalization is computed automatically.
\end{minipage} & yes \\ \hline
\cname{BinnedDensityND} & \begin{minipage}{0.52\linewidth}
\vskip1mm
Defined by a table of values
on $U_d$, equidistant in each dimension,
with optional multilinear
interpolation. In each dimension,
the first table point is at
$x_i = 1/(2 N_i)$ and the last is at
$x_i = 1 - 1/(2 N_i)$.
Useful for converting multivariate
histograms into distributions.
\end{minipage}
& yes \\ \hline
\end{tabular}
\end{center}
\end{table}
\subsection{Copulas}
For any continuous multivariate density $p({\bf x}|{\bf a})$
in a $d$-dimensional space $X$ of random variables ${\bf x} \in X$
depending on a vector of parameters ${\bf a}$,
we define $d$ marginal densities $p_i(x_i|{\bf a})$, $i = 1, \ldots, d$ by
$$
p_i(x_i|{\bf a}) \equiv \int p(x_1, \ldots, x_d |{\bf a}) \mathop{\prod_{j=1}^d}_{j \ne i} d x_j
$$
with their corresponding cumulative distribution functions
$$
F_i(x_i|{\bf a}) \equiv \int_{-\infty}^{x_i} p_i(\tau|{\bf a}) d \tau .
$$
For each point ${\bf x} \in X$, there is a corresponding
point ${\bf y}$ in a unit $d$-dimensional
cube $U_d$ such that $y_i(x_i) \equiv F_i(x_i|{\bf a})$, $i = 1, \ldots, d$. The {\it copula density}
is defined on $U_d$ by
\begin{equation}
c({\bf y}({\bf x})|{\bf a}) \equiv \frac{p({\bf x}|{\bf a})}{\prod_{i=1}^{d} p_i(x_i|{\bf a})}.
\label{eq:copuladef}
\end{equation}
Copula density (as well as its corresponding multivariate
distribution function $C({\bf y}|{\bf a})$, or just {\it copula})
contains all information
about mutual dependence of individual variables $x_i$.
It can be shown that all copula marginals are uniform and, conversely,
that any distribution on $U_d$ whose marginals are uniform
is a copula~\cite{ref:copulabook}.
Naturally, when the copula and the marginals of some multivariate density
are known, the density itself is expressed by
\begin{equation}
p({\bf x}|{\bf a}) = c({\bf y}({\bf x})|{\bf a}) \prod_{i=1}^{d} p_i(x_i|{\bf a}).
\end{equation}
Note that $c({\bf y}|{\bf a}) = 1$ for all ${\bf y}$ if and only if all $x_i$
are independent and the density $p({\bf x}|{\bf a})$ is fully
factorizable.
The NPStat package allows its users to model multivariate continuous
distributions using copula and marginals with the aid of
\cname{CompositeDistributionND} class. An object of this class is
normally constructed out of user-provided copula and marginals.
\cname{CompositeDistributionND} object can also be constructed
from histogram bins. Several standard copulas are
implemented: Gaussian, Student's-$t$, and
Farlie-Gumbel-Morgenstern~\cite{ref:copulabook}.
The corresponding class names are \cname{GaussianCopula},
\cname{TCopula}, and \cname{FGMCopula} (these classes are
declared in the header file ``npstat/stat/Copulas.hh'').
Empirical multivariate copulas densities can be constructed
as follows. Let's assume that there are no coincident
point coordinates in each dataset variable. We can sort all
$N$ elements of the data set in the increasing order in each coordinate
separately and assign to each data point $i$ a multi-index
$\{m_{i0}, ..., m_{id}\}$. For each dimension $k$, $m_{ik}$
represents the number of the point $i$ in the sequence ordered
by the dimension $k$ coordinate, so that
$0 \le m_{ik} < N$. The empirical copula density, $\mbox{ECD}({\bf x})$,
is then defined by
\begin{equation}
\mbox{ECD}({\bf x}) = \sum_{i=0}^{N-1} \prod_{k=0}^{d-1} \delta\left(x_k - \frac{m_{ik} + 1/2}{N}\right),
\end{equation}
where $\delta(x)$ is the Dirac delta function. To get a better idea about
locations of these delta functions, think of
an $N \times N \times ... \times N$ uniform grid in $d$ dimensions on
which $N$ points are placed in such a way that no two
points share the same coordinate in any of the dimensions.
In two dimensions, this is like the placement of chess pieces
in the eight queens puzzle~\cite{ref:eightqueens}
which is simplified to use rooks instead of queens~\cite{ref:eightrooks}.
In the NPStat package, empirical copula densities can be approximated by
histograms defined on $U_d$. Construction of empirical copula histograms
can be performed by functions \cname{empiricalCopulaHisto}
(header file ``npstat/stat/empiricalCopulaHisto.hh'') and
\cname{empiricalCopulaDensity}
(header file ``npstat/stat/empiricalCopula.hh''). The integrated empirical copulas can be
constructed on uniform grids by the \cname{calculateEmpiricalCopula}
function (header ``npstat/stat/empiricalCopula.hh'').
The Spearman's rank correlation coefficient, $\rho$, can
be estimated from two-dimensional empirical copulas using functions
\cname{spearmansRhoFromCopula} and \cname{spearmansRhoFromCopulaDensity}
declared in the header file ``npstat/stat/spearmansRho.hh''.
These functions evaluate the following integrals numerically:
\begin{align}
\rho &= 12 \int_0^1 \int_0^1 C({\bf y}|{\bf a}) dy_0 dy_1 - 3 & \mbox{\ } & \mbox{used by \textsf{spearmansRhoFromCopula}} \\
\rho &= 12 \int_0^1 \int_0^1 c({\bf y}|{\bf a}) y_0 y_1 dy_0 dy_1 - 3 & \mbox{\ } & \mbox{used by \textsf{spearmansRhoFromCopulaDensity}}
\end{align}
While these two formulas are identical mathematically~\cite{ref:copulabook},
the approximations used
in numerical integration and differentiation will usually lead to slightly
different results returned by these functions for some copula and its
corresponding density.
The Kendall's rank correlation coefficient, $\tau$,
can be estimated from the empirical copulas using the
function \cname{kendallsTauFromCopula} declared in
the header ``npstat/stat/kendallsTau.hh''.
This function evaluates the following formula numerically:
\begin{equation}
\tau = 4 \int_0^1 \int_0^1 C({\bf y}|{\bf a}) c({\bf y}|{\bf a}) dy_0 dy_1 - 1.
\end{equation}
The mutual information between variables can be estimated from
copula densities represented on uniform grids according
to Eq.~\ref{eq:entropy}\footnote{Mutual information is
simply the negative of the copula entropy~\cite{ref:copulaentropy}.}.
Decomposition of multivariate densities into the marginals and the
copula is useful not only for a subsequent analysis of mutual
dependencies of the variates but also for implementing nonparametric
density interpolation, as described in the next section.
\section{Nonparametric Interpolation of Densities}
\label{sec:densityinterpolation}
There are numerous problems in High Energy Physics in which construction of some
probability density requires extensive simulations and, due to the CPU
time limitations, can only be performed for a~limited number of
parameter settings. This is typical, for example, for ``mass
templates'' which depend on such parameters as the pole mass of the
particle under study, sample background fraction, detector jet energy
scale, {\it etc}. It is often desirable to have the capability to
evaluate such a density for arbitrary parameter values.
It is sometimes possible to address this problem by postulating
an explicit parametric
model and fitting that model to the sets of simulated distributions.
However, complex dependence of the distribution shapes on the parameter
values often leads to infeasibility of this approach. Then it becomes
necessary to interpolate the densities without postulating a concrete
model.
\subsection{Interpolation of Univariate Densities using Quantile Functions}
\label{sec:interpolatebyquant}
As it was shown in~\cite{ref:read}, a general
interpolation of one-dimensional distributions which
leads to very naturally looking results
can be achieved by
interpolating the {\it quantile function} (defined as the
inverse of the cumulative distribution function). While study~\cite{ref:read}
considers linear interpolation in one-dimensional parameter space, it is obvious that
similar weighted average interpolation can be easily constructed in
multivariate parameter settings and with higher order interpolation
schemes. In general, the interpolated quantile function for
an~arbitrary parameter value ${\bf a}$ is expressed by
\begin{equation}
\label{eq:quantileinterp1d}
q(y|{\bf a}) = \sum_{j=1}^m w_j q(y|{\bf a}_j),
\end{equation}
where the weighted quantile functions are summed at the $m$ ``nearby''
parameter settings ${\bf a}_j$ for which the distribution was
explicitly constructed. The weights $w_j$ are normalized by
$\sum_{j=1}^m w_j = 1$. Their precise values depend on the location
of ${\bf a}$ w.r.t. nearby ${\bf a}_j$ and on the interpolation scheme used.
Simple high-order interpolation schemes can be constructed
in one-dimensional parameter space by using Lagrange interpolating
polynomials
to determine the weights\footnote{To determine interpolated value of a function using
Lagrange interpolating polynomials, weights are assigned to function
values at a set of nearby points according to a rule
published by Lagrange in 1795. The weights depend only on the
abscissae of the interpolated points and on the coordinate
at which the function value is evaluated but not on the
interpolated function values.}. In $r$-dimensional parameter
space, rectangular grids can be utilized with multilinear or
multicubic interpolation. For
example, in the case of multilinear
interpolation, the weights can be calculated as follows:
\begin{thinlist}
\item Find the hyperrectangular parameter grid
cell inside which the value of ${\bf a}$ falls.
\item Shift and scale this cell so that it becomes a hypercube
with diagonal vertices at $(0, 0, ..., 0)$ and $(1, 1, ..., 1)$.
\item Let's designate the shifted and scaled value of ${\bf a}$ by ${\bf z}$,
with components $(z_1, z_2, ..., z_r)$.
If we were to interpolate towards ${\bf z}$
a scalar function $f$ defined at the
vertices with coordinates $(c_1, c_2, \ldots, c_r)$,
where all $c_k$ values are now either 0 or 1,
then we would use the formula
\begin{equation}
\label{eq:multilinear}
f(z_1, z_2, ..., z_r) = \sum_{\substack{
c_1 \in \,\{0, 1\}\\
c_2 \in \,\{0, 1\}\\
\cdot \cdot \cdot \cdot \cdot \cdot \cdot \cdot \cdot \\
c_r \in \,\{0, 1\}\\
}} f(c_1, c_2, ..., c_r) \prod_{k=1}^r z_k^{c_k} (1 - z_k)^{1 - c_k}
\end{equation}
which is obviously linear in every $z_k$ and has
correct function values when evaluated at the vertices.
In complete analogy, we define the weights for the quantile functions
constructed at the vertices with coordinates $(c_1, c_2, \ldots, c_r)$ to be
$w(c_1, c_2, \ldots, c_r) = \prod_{k=1}^r z_k^{c_k} (1 - z_k)^{1 - c_k}$.
Naturally, there are $m = 2^r$ weights total.
\end{thinlist}
The quantile interpolation of univariate distributions is implemented
in the NPStat package with the \cname{InterpolatedDistribution1D} class.
A collection of quantile functions is assembled incrementally together
with their weights (formula for calculating the weights is to be supplied
by the user).
For calculating the interpolated density at point $x$,
the equation $x = q(y|{\bf a})$,
with $q(y|{\bf a})$ from~\ref{eq:quantileinterp1d},
is solved numerically for $y$. The density is then evaluated by
numerically differentiating the interpolated quantile function:
$p(x|{\bf a}) = \left( \frac{\partial q(y|{\bf a})}{\partial y} \right)^{-1}$.
A class with a simpler interface implementing linear weight assignments
between nearby points in a 1-d parameter space is called \cname{InterpolatedDistro1D1P}.
Less reliable but computationally faster linear interpolation of densities (instead of
quantile functions) is implemented for 1-d parameter spaces by the
class \cname{VerticallyInterpolatedDistro1D1P}. This class allows
the user to perform interpolation in a transformed space (for example,
the transform can center all distributions at 0 and rescale them to
the same width) in order to alleviate deficiencies of ``vertical''
interpolation.
\subsection{Interpolation of Multivariate Densities}
\label{sec:interpolatemultivar}
The procedure described in the previous section can be
generalized to interpolate multivariate distributions.
Let $p({\bf x}|{\bf a})$ be a multivariate probability density
in a $d$-dimensional space $X$ of random variables ${\bf x} \in X$.
${\bf a}$ is a vector of parameters, and
$\int_{X} p({\bf x}|{\bf a}) d {\bf x} = 1$
for every ${\bf a}$\footnote{Compared to interpolation of
arbitrary functions, preserving this normalization is one of the
major complications in interpolating multivariate densities.}.
For a~``well-behaved'' density (Riemann-integrable, {\it etc}),
it is always possible to construct a one-to-one
mapping from the space $X$
into the unit $d$-dimensional cube, $U_d$, using a sequence of one-dimensional
conditional cumulative distribution functions. These functions are defined
as follows:
$$
\begin{array}{c}
F_{1}(x_1|x_2, x_3, \ldots, x_d, {\bf a}) \equiv \int_{-\infty}^{x_1} p(z_1, x_2, x_3, \ldots, x_d | {\bf a}) d z_1 /
\int_{-\infty}^{\infty} p(z_1, x_2, x_3, \ldots, x_d | {\bf a}) d z_1
\\
F_{2}(x_2 | x_3, \ldots, x_d, {\bf a}) \equiv \int_{-\infty}^{x_2} F_{1}(\infty | z_2, x_3, \ldots, x_d, {\bf a}) d z_2 /
\int_{-\infty}^{\infty} F_{1}(\infty | z_2, x_3, \ldots, x_d, {\bf a}) d z_2
\\
F_{3}(x_3 | \ldots, x_d, {\bf a}) \equiv \int_{-\infty}^{x_3} F_{2}(\infty | z_3, \ldots, x_d, {\bf a}) d z_3 /
\int_{-\infty}^{\infty} F_{2}(\infty | z_3, \ldots, x_d, {\bf a}) d z_3
\\
.\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .\ .
\\
F_{d}(x_d | {\bf a}) \equiv \int_{-\infty}^{x_d} F_{d-1}(\infty | z_d, {\bf a}) d z_d /
\int_{-\infty}^{\infty} F_{d-1}(\infty | z_d, {\bf a}) d z_d
\end{array}
$$
Naturally, $F_{k}(\infty | x_{k+1}, \ldots, x_d, {\bf a})$ is just renormalized $p({\bf x}|{\bf a})$
in which the first $k$ components of ${\bf x}$ are integrated out ({\it i.e.,} marginalized).
The mapping from ${\bf x} \in X$ into ${\bf y} \in U_d$ is defined by $y_i = F_i(x_i | \ldots), \ i = 1, \ldots, d$. In terms of conditional cumulative distribution functions,
$$
p({\bf x}|{\bf a}) = \prod_{i=1}^{d}{\frac{\partial F_i(x_i | \ldots)}{\partial x_i}}.
$$
This method is not unique and other mappings
from $X$ to $U_d$ are possible. What makes this particular construction useful
is that the inverse mapping, from $U_d$ into $X$, can be
easily constructed as well: we simply solve the equations
\begin{equation}
\label{eq:invertcondcdf}
y_i = F_i(x_i | \ldots)
\end{equation}
in the reverse order, starting from dimension $d$ and going back
to dimension $1$. Each equation in this sequence {\it has only one unknown}
and therefore it can be efficiently solved numerically
(and sometimes algebraically) by a variety
of standard root finding techniques. The solutions of these equations,
$x_i = q_i(y_i | x_{i+1}, \ldots, x_d, {\bf a}) \equiv F_i^{-1}(y_i | \ldots)$, are
the {\it conditional quantile functions} (CQFs). Note that
\begin{equation}
\label{eq:invderiv}
p({\bf x}|{\bf a}) = \left( \prod_{\ i=1}^{d}{\frac{\partial q_i(y_i | \ldots)}{\partial y_i}} \right)^{-1}.
\end{equation}
Now, if the CQFs are known for some parameter values
${\bf a}_1$ and ${\bf a}_2$,
interpolation towards ${\bf a} = (1 - \lambda) {\bf a}_1 + \lambda {\bf a}_2$ is made
in the same manner as described in Section~\ref{sec:interpolatebyquant}:
$x_i = (1 - \lambda) q_i(y_i | x_{i+1}, \ldots, x_d, {\bf a}_1) + \lambda q_i(y_i | x_{i+1}, \ldots, x_d, {\bf a}_2)$.
If the CQFs
are known on a~grid in the parameter space, we have to use
an appropriate interpolation technique (multilinear, multicubic, {\it etc})
in that space in order to assign
the weights to the CQFs at the nearby grid points. In general,
the interpolated CQFs are defined by
a weighted average
\begin{equation}
\label{eq:interpq}
q_i(y_i | x_{i+1}, \ldots, x_d, {\bf a}) = \sum_{j=1}^m w_j q_i(y_i | x_{i+1}, \ldots, x_d, {\bf a}_j),
\end{equation}
where the sum is performed over $m$ nearby parameter points,
weights $w_j$ are normalized by $\sum_{j=1}^m w_j = 1$, and their
exact values depend on the parameter grid chosen and the interpolation method
used.
Basically, it is the whole mapping from ${\bf y}$ into ${\bf x}$ which gets
interpolated in this method.
The CQF interpolation results look
very natural, but the process is rather CPU-intensive:
for each ${\bf x}$ we need to solve $d$ one-dimensional nonlinear equations
\begin{equation}
\label{eq:interpsolve}
x_i = q_i(y_i | x_{i+1}, \ldots, x_d, {\bf a}), \ \ \ \ i = d, \ldots, 1
\end{equation}
in order to determine ${\bf y}$ of the interpolated mapping.
In this process, each call to evaluate $q_i(y_i | x_{i+1}, \ldots, x_d, {\bf a})$
triggers $m$ calls to evaluate $q_i(y_i | x_{i+1}, \ldots, x_d, {\bf a}_j)$.
Depending on implementation details, each of these $m$ calls
may in turn trigger root finding in an equation
like~\ref{eq:invertcondcdf}.
Once ${\bf y}$ is found for the interpolated CQFs,
density is determined by numerical
evaluation of~\ref{eq:invderiv}
in which $q_i(y_i | \ldots)$ are given by~\ref{eq:interpq}.
In practice, finite steps of the parameter grids will cause
dependence of the interpolation results on the order in which
conditional quantiles are evaluated. If choosing some
particular order is considered undesirable and increased CPU
loads are acceptable, all $d!$ possible permutations
should be averaged.
In the NPStat package, multivariate densities whose mapping
from the multidimensional unit cube into the density support
region is implemented via CQFs return ``true'' when
their virtual function \cname{mappedByQuantiles} is called.
User-developed implementations of
multivariate densities should follow this convention as well.
In particular, mapping of
the densities represented by the \cname{BinnedDensityND} class
(lookup tables on hyperrectangular
grids with multilinear interpolation) is performed by CQFs,
as well as mapping of all fully factorizable densities.
When the fidelity of the model is less critical or when the
correlation structure of the distribution is more stable w.r.t.
parameter changes than its location and scales, much faster
multivariate density interpolation can be performed by decomposing
the density into the copula and the marginals. The support
of the copula density $c({\bf y}({\bf x})|{\bf a})$ defined
in Eq.~\ref{eq:copuladef}
is always $U_d$ and it does not depend on the
parameter ${\bf a}$. This suggests that the marginals and the copula can
be interpolated separately, using quantile function interpolation
for the marginals and the standard weighted average interpolation
for the copula.
The NPStat package uses the same data structure to perform both
CQF-based and copula-based interpolation of multivariate densities.
Multilinear interpolation is supported on a rectangular parameter
grid (not necessarily equidistant).
The distributions at the grid points are collected together
using the \cname{GridInterpolatedDistribution} template,
while method-specific
differences are isolated inside a policy class which serves as
the \cname{GridInterpolatedDistribution} template parameter.
Currently, \cname{UnitMapInterpolationND} or
\cname{CopulaInterpolationND} classes can serve as such a~parameter
which results in CQF-based or copula-based interpolation, respectively.
\section{Nonparametric Density Estimation}
\label{sec:densityestimation}
The problem of estimating population probability density function
from a finite sample drawn from this population is ubiquitous
in data analysis practice. It is often the case that
there are no substantial reasons for choosing a particular parametric
density model but certain assumptions,
such as continuity
of the density together with some number of derivatives or absence
of narrow spatial features, can still be justified. There is
a~number of approaches by which assumptions of this kind can be introduced
into the statistical model. These approaches are collectively known
as ``nonparametric
density estimation'' methods~{\cite{ref:silverman, ref:izenmann, ref:kde}}. The NPStat
package provides an efficient implementation of one of
such methods, kernel density estimation (KDE), together with its extension
to polynomial density models and densities with bounded support.
This extension is called ``local orthogonal polynomial expansion'' (LOrPE).
\subsection{Kernel Density Estimation (KDE)}
\label{sec:kde}
Suppose, we have an i.i.d.~sample of measurements $x_i$, $i = 0, 1,
..., N-1$ from a univariate probability density $p(x)$. The empirical
probability density function (EPDF) for this sample is defined by
\begin{equation}
\mbox{EPDF}(x) = \frac{1}{N} \sum_{i=0}^{N-1} \delta (x - x_i),
\label{eq:edf}
\end{equation}
where $\delta(x)$ is the Dirac delta function. $\mbox{EPDF}(x)$
can itself be considered an estimate of $p(x)$. However, this
estimate can be substantially improved if some additional information
about $p(x)$ is available. For example, we can often assume that
$p(x)$ is continuous together with its first few derivatives or
that it can have at most a few modes. In such cases a convolution
of $\mbox{EPDF}(x)$ with a kernel function, $K(x)$, often provides
a much better estimate of the population density. $K(x)$ itself is
usually chosen to be a symmetric continuous density with a location
and scale parameter, so that the resulting estimate looks
like
\begin{equation}
\hat{p}\sub{KDE}(x|h) = \frac{1}{N h} \sum_{i=0}^{N-1} K \left(\frac{x - x_i}{h}\right).
\label{eq:kde}
\end{equation}
In the context of
density estimation, parameter $h$ is usually referred to as
``bandwidth''.
Use of the Gaussian distribution or one of the distributions from the
symmetric beta family as $K(x)$ is very common. In fact, it is so common
that beta family kernels have their own names in the density estimation
literature. These names are listed in Table~\ref{table:betakernels}.
\begin{table}[h!]
\caption{One-dimensional kernels from the symmetric beta family.
All these kernels look like
${\cal{N}} \, (1 - x^2)^p \, I(-1 < x < 1)$, where ${\cal{N}}$
is the appropriate normalization factor. In the
NPStat package, their formulae are implemented by the
\cname{SymmetricBeta1D} function.}
\label{table:betakernels}
\begin{center}
\noindent\begin{tabular}{|c|c|} \hline
Kernel name & Power $p$ \\ \hline\hline
Uniform (also called ``boxcar'') & 0 \\ \hline
Epanechnikov & 1 \\ \hline
Biweight (also called ``quartic'') & 2 \\ \hline
Triweight & 3 \\ \hline
Quadweight & 4 \\ \hline
% Quintweight & 5 \\ \hline
\end{tabular}
\end{center}
\end{table}
In the limit $N \rightarrow \infty$ and with proper choice of $h$ so that
$h \rightarrow 0$ and $N h \rightarrow \infty$,
$\hat{p}\sub{KDE}(x)$ becomes a~consistent
estimate of $p(x)$ in terms of integrated squared error (ISE):
\begin{equation}
\mbox{ISE}(h) = \int_{-\infty}^{\infty} (\hat{p}\sub{KDE}(x|h) - p(x))^2 dx, \ \ \ \ \ \lim_{N \rightarrow \infty} \mbox{ISE}(h) = 0.
\end{equation}
For the method analysis purposes, it is useful to understand
what happens when we reconstruct known densities $p(x)$.
Here, another measure of distance between the true
density and its estimator becomes indispensable,
called ``mean integrated squared error'' (MISE):
\begin{equation}
\mbox{MISE}(h) = E(\mbox{ISE}(h)) = E \left( \int_{-\infty}^{\infty} (\hat{p}\sub{KDE}(x|h) - p(x))^2 dx \right),
\label{eq:mise}
\end{equation}
where $E(...)$ stands for the expectation value over samples of $N$ points
drawn from $p(x)$.
While other distance measures can be defined
(and may be more relevant for your problem),
$\mbox{MISE}(h)$ is usually
the easiest to analyze
mathematically\footnote{Note that both ISE and MISE are not dimensionless and,
therefore, not invariant under scaling transformations.
It is OK to compare different $\hat{p}\sub{KDE}(x|h)$ with each other if
the underlying $p(x)$ is the same, but ISE or MISE comparison for different $p(x)$
requires certain care if meaningful results are to be obtained.}.
A typical goal of such an analysis consists in
finding the value of $h$ which minimizes $\mbox{MISE}(h)$ for
a~sample of given size, and a~significant amount of effort
has been devoted to such bandwidth optimization studies
(see, {\it e.g.,} Refs.~\cite{ref:bwopt, ref:loaderbw} for a review).
A large fraction of these studies employs a~simple MISE approximation valid for large
values of $N$ known as ``AMISE'' (asymptotic MISE). This approximation
includes just the two leading terms known as ``bias'' which increases with
increasing bandwidth and ``variance'' which decreases with increasing bandwidth,
so that the bandwidth optimization procedure is reduced to finding
the best bias-variance trade-off.
For subsequent discussion, it will be useful to introduce the concept
of kernel order. Let's define the functional
\begin{equation}
\mu_{j}(f) = \int_{-\infty}^{\infty} x^j f(x) dx
\label{eq:mufunct}
\end{equation}
which is the $j$-th moment of $f(x)$ about 0.
Then it is said that the kernel $K(x)$ is of order $m$ if
\begin{equation}
\mu_0(K) = 1, \ \mu_j(K) = 0 \ \mbox{for} \ j = 1, ..., m-1, \ \mbox{and} \ \mu_m(K) \ne 0.
\end{equation}
It can be shown that, for one-dimensional
KDE, the rate of AMISE convergence to 0 is proportional
to $N^{-\frac{2 m}{2 m + 1}}$ (see, for example, section~2.8
of~\cite{ref:kernsmooth}). Therefore, kernels with high values
of $m$ should be preferred for large samples. At the same time,
only in case $m = 2$ the kernels can be everywhere non-negative
({\it i.e.,} bona fide densities).
When $m > 2$ (so called ``high-order'' kernels), in order to have
$\mu_2(K) = 0$, $K(x)$ must become negative somewhere.
Therefore, negative values of $\hat{p}\sub{KDE}(x|h)$ also
become possible, and a mechanism for dealing with this problem
must be specified. In NPStat, this problem is normally taken care
of by setting negative values of $\hat{p}\sub{KDE}(x|h)$ to 0
with subsequent renormalization of the density estimate so that its
integral is 1.
It is instructive to consider $p(x)$, $\mbox{EPDF}(x)$,
and $\hat{p}\sub{KDE}(x|h)$ in the frequency
domain\footnote{The Fourier transform of a
probability density is called ``characteristic function'' of the distribution.}. There, the Fourier
transform of $K(x)$ acts as a low-pass filter which suppresses the sampling
noise present in $\mbox{EPDF}(x)$ so that the result, $\hat{p}\sub{KDE}(x|h)$,
has a better match
to the spectrum of $p(x)$. By Parseval's identity, this
leads to the reduction of the ISE. High-order kernels allow
for a sharper frequency cutoff in the filter. Fortunately, the precise shape
of $p(x)$ frequency spectrum
is relatively
unimportant, it is only important that its high frequency components
decay ``fast enough'' so that their suppression together with the
noise does not cause a significant distortion of $\hat{p}\sub{KDE}(x|h)$
in comparison with $p(x)$. Powerful automatic bandwidth selection rules
can be derived from this type of analysis if certain
assumptions about the $p(x)$ spectrum decay rate
at high frequencies are satisfied~\cite{ref:chiu2, ref:chiumultivar}.
With a few modifications, the ideas described above can be translated
to multivariate settings. The kernel function becomes a multivariate
density, and the bandwidth parameter becomes, in general, the bandwidth
matrix (for example, the covariance matrix in the case of multivariate
Gaussian kernel).
The NPStat package calculates $\hat{p}\sub{KDE}(x|h)$ on an equidistant
grid in one or more dimensions.
Initially, the data sample is histogrammed using a finely
binned histogram and then convoluted with a kernel using
Discrete Fast Fourier Transform (DFFT).
The number of histogram bins, $N_b$, should be selected
taking into account the following considerations:
\begin{thinlist}
\item The bins should be sufficiently small so that no ``interesting''
detail will be missed due to discretization of the density.
\item The bins should be sufficiently small so that the expected optimal
MISE is significantly larger than the ISE due to
density discretization. Detailed exposition of this
requirement can be found in Ref.~\cite{ref:discretizedkde}.
\item DFFT should be efficient for this number of bins. It is best to use
$N_b = 2^k$ bins in each dimension, where $k$ is a positive integer.
\end{thinlist}
The computational complexity of this method is
${\cal O}(N) + {\cal O}(N_b \ln N_b)$ which
is usually much better than the ${\cal O}(N \times N_b)$ complexity of
a ``naive'' KDE implementation. However, for large sample
dimensionalities $N_b$ can become very large which limits the
applicability of this technique. There are other computational
methods (not yet in NPStat) which can work efficiently for
high-dimensional samples~\cite{ref:fastkde1, ref:fastkde2}.
The following NPStat functions and classes can be used to perform
KDE and to assist in bandwidth selection:
\cname{amiseOptimalBwGauss} (header file ``npstat/stat/amiseOptimalBandwidth.hh'') --- calculates AMISE-optimal bandwidth for
fixed-bandwidth KDE with Gaussian kernel as well as high-order kernels
derived from Gaussian. The following formula is implemented:
\begin{equation}
h\sub{AMISE} = \left( \frac{R(K) (m!)^2}{2 m \mu_{m}^2(K) R(p^{(m)}) N} \right)^{\frac{1}{2 m + 1}}.
\label{eq:amisebw}
\end{equation}
In this formula, $R(f)$ denotes the functional
$R(f) = \int_{-\infty}^{\infty} f^2(x) dx$ defined
for any square-integrable function $f$, $\mu_{m}(f)$
is the functional defined in Eq.~\ref{eq:mufunct},
$K$ is the kernel,
$m$ is the kernel order, $p^{(m)}$ is the $m$-th derivative of the
reconstructed density\footnote{Of course, in
practice the reconstructed density is usually unknown.
It is expected that a~look-alike density will be substituted.
}, and $N$ is the number of points in the sample.
The expected AMISE corresponding to this bandwidth is calculated from
\begin{equation}
\mbox{AMISE}(h\sub{AMISE}) = \frac{(2 m + 1) R(K)}{2 m \, h\sub{AMISE} \,N}.
\label{eq:bestamise}
\end{equation}
For the origin and further discussion
of these formulae consult, for example, section~2.8 in
Ref.~\cite{ref:kernsmooth}. It is assumed that high-order kernels are
generated according to Eq.~\ref{eq:effk}.
\cname{amiseOptimalBwSymbeta} (header file ``npstat/stat/amiseOptimalBandwidth.hh'')
--- calculates AMISE-optimal bandwidth
and corresponding expected AMISE according to
Eqs.~\ref{eq:amisebw} and~\ref{eq:bestamise}
for fixed-bandwidth KDE with kernels from symmetric beta family as well
as with high-order kernels derived from symmetric beta distributions.
\cname{amisePluginBwGauss} (header file ``npstat/stat/amiseOptimalBandwidth.hh'')
--- Gaussian $p(x)$ is substituted
in Eqs.~\ref{eq:amisebw} and~\ref{eq:bestamise} and the corresponding
quantities are found for the Gaussian kernel as well as high-order kernels
derived from Gaussian.
\cname{amisePluginBwSymbeta} (header file ``npstat/stat/amiseOptimalBandwidth.hh'')
--- Gaussian $p(x)$ is substituted
in Eqs.~\ref{eq:amisebw} and~\ref{eq:bestamise} and the corresponding
quantities are found for the kernels from the symmetric beta family as well
as high-order kernels derived from symmetric beta distributions.
\cname{miseOptimalBw} and \cname{gaussianMISE} methods of the
\cname{GaussianMixture1D} class --- these methods calculate
MISE-optimal bandwidth and the corresponding exact MISE for
Gaussian mixture densities estimated with the Gaussian kernel
(or high-order kernels derived from the Gaussian) according
to the formulae from Ref.~\cite{ref:bwgaussmix}.
\cname{ConstantBandwidthSmoother1D} --- This class implements fixed-bandwidth
KDE for one-dimensional samples of points using Gaussian kernels
or kernels from the symmetric beta family (including high-order
kernels which are generated internally). Boundary effects can be
alleviated by data mirroring. Kernel convolutions are performed
by DFFT after sample discretization.
\cname{ConstantBandwidthSmootherND} --- This class implements fixed-bandwidth
KDE for multivariate histograms. Arbitrary density implementations
which inherit from \cname{AbsDistributionND} can be used as weight
functions for generating high-order kernels. Boundary effects can be
alleviated by data mirroring.
\cname{JohnsonKDESmoother} --- This class constructs adaptive bandwidth KDE estimates for
one-dimensional samples. It operates in several steps:
\begin{thinlist}
\item The sample is discretized using centroid-preserving binning.
\item Population mean, $\mu$, standard deviation, $\sigma$,
skewness, $s$, and kurtosis, $k$, are
estimated by the \cname{arrayShape1D} function.
\item A distribution from the Johnson system with these
values of $\mu$, $\sigma$, $s$, and $k$ is used as a template
for the \cname{amiseOptimalBwSymbeta} (or \cname{amiseOptimalBwGauss})
function which calculates optimal constant bandwidth
according to Eq.~\ref{eq:amisebw}.
\item A pilot density estimate is built by KDE
(using \cname{LocalPolyFilter1D} class) with the bandwidth determined
in the previous step.
\item The sample is smoothed by the \cname{variableBandwidthSmooth1D}
function (described below) using this pilot estimate.
\end{thinlist}
This method achieves better MISE
convergence rate without using high-order kernels (so that density
truncation below zero is unnecessary). Give this method a serious
consideration if you are working with one-dimensional samples,
expect to reconstruct a unimodal distribution, and do not have to worry
about boundary effects.
\cname{KDECopulaSmoother} --- multivariate KDE in which extra care
is applied in order to ensure that the estimation result
is a bona fide copula ({\it i.e.,} that all of its marginals
are uniform).
This class should normally be used to smooth empirical copula densities
constructed by \cname{empiricalCopulaHisto} or
\cname{empiricalCopulaDensity}.
The bandwidth can be supplied by the user or
it can be chosen by cross-validation. In general,
\cname{LOrPECopulaSmoother} will produce better results
in this context, so \cname{KDECopulaSmoother} should
be used only in case the slower speed of \cname{LOrPECopulaSmoother}
is deemed unacceptable.
\cname{KDEFilterND} --- A collection of \cname{KDEFilterND} objects which utilize
common workspace and DFFT engine can be employed to perform cross-validation
calculations and bandwidth scans. Such a collection is used, for example,
by the \cname{KDECopulaSmoother} class described above.
If you already know the bandwidth,
the \cname{ConstantBandwidthSmootherND} class
will likely be more convenient to use than this one.
\cname{variableBandwidthSmooth1D} --- implements one-dimensional
variable kernel adaptive KDE (which should not be confused
with {\it local kernel}, or {\it balloon}, method --- see section~2.10 in
Ref.~\cite{ref:kernsmooth} for further discussion). In general, this
approach consists in assigning different bandwidth values to each sample
point. In the \cname{variableBandwidthSmooth1D} function, this assignment
is performed according to the formula
\begin{equation}
h_i = \frac{c}{{\hat \rho}^{\alpha}(x_i)},
\end{equation}
where ${\hat \rho(x)}$ is a~pilot density estimate constructed, for example,
by fixed-bandwidth KDE.
Boundary kernel adjustments are performed automatically.
The normalization constant $c$ is
determined so that the geometric mean
of $h_i$ equals to a~user-provided bandwidth. There are
certain reasons to believe that the choice of power
parameter $\alpha = 1/2$ is a~particularly good one~\cite{ref:abramson}.
\cname{simpleVariableBandwidthSmooth1D} --- a high-level driver function for
variable kernel adaptive KDE which uses kernels from the symmetric beta
family and automatically generates a~pilot density estimate employing
AMISE plugin bandwidth.
\subsection{Local Orthogonal Polynomial Expansion (LOrPE)}
\label{sec:lorpe}
Local Orthogonal Polynomial Expansion (LOrPE) can be viewed as
a convenient method
for creating kernels with desired properties (including high-order
kernels) and for eliminating the KDE boundary bias\footnote{If you
are familiar with orthogonal series density estimators (OSDE), you can
also view LOrPE as a~localized version of OSDE.}.
LOrPE amounts to constructing a truncated expansion of the EPDF
defined by Eq.~\ref{eq:edf}
into orthogonal polynomial series near each point $x\sub{fit}$
where we want to build the initial density estimate:
\begin{equation}
\label{eq:expansion}
\hat{f}\sub{LOrPE}(x|h) = \sum_{k=0}^{M}c_{k}(x\sub{fit}, h) P_{k}\left(\frac{x - \xf}{h}\right).
\end{equation}
The polynomials $P_{k}(x)$ are built to satisfy the normalization condition
\begin{equation}\label{eq:norm0}
\frac{1}{h} \int_{a}^{b} P_{j}\left(\frac{x - \xf}{h}\right)P_{k}\left(\frac{x - \xf}{h}\right) K\left(\frac{x - \xf}{h}\right)dx = \delta_{jk},
\end{equation}
which is equivalent to
\begin{equation}
\label{eq:norm}
\int_{(a - x\sub{fit})/h}^{(b - x\sub{fit})/h} P_{j}(y)P_{k}(y)K(y)dy = \delta_{jk},
\end{equation}
where $\delta_{jk}$ is the Kronecker delta,
$K(x)$ is a suitably chosen kernel function,
and $[a, b]$ is the support interval of the estimated density.
For commonly used kernels from
the beta family (Epanechnikov, biweight, triweight, {\it etc}.), condition (\ref{eq:norm})
generates the normalized Gegenbauer polynomials (up to a common multiplicative
constant) at points $\xf$ sufficiently deep inside the support
interval, provided $h$ is small enough to guarantee that
$(a-\xf)/h\leq -1$ and $(b-\xf)/h\geq 1$.
If $\xf$ is
sufficiently close to the boundaries of the density
support $[a,b]$ then the polynomial system will vary
depending on $\xf$, and the notation
$P_{k}(\cdot,\xf)$ would be more appropriate. However, in the subsequent
text this dependence on $\xf$ will be suppressed in order to simplify
the notation.
The expansion coefficients $c_{k}(x\sub{fit}, h)$
are determined by the usual scalar product of the expanded function with
$P_{k}$:
\begin{equation}\label{eq:convol}
c_{k}(x\sub{fit}, h) = \frac{1}{h} \int \mbox{EPDF}(x) P_{k}\left( \frac{x - x\sub{fit}}{h} \right)K\left( \frac{x - x\sub{fit}}{h} \right)dx,
\end{equation}
which, after substituting $\mbox{EPDF}(x)$ from~\ref{eq:edf}, leads to
\begin{equation}\label{eq:emp-ck}
c_{k}(x\sub{fit}, h) = \frac{1}{Nh}\sum_{i=0}^{N-1}P_{k}\left( \frac{x_i - x\sub{fit}}{h} \right) K\left( \frac{x_i - x\sub{fit}}{h} \right).
\end{equation}
Note that the coefficients $c_{k}(x\sub{fit}, h)$ calculated according to
Eq.~\ref{eq:convol} can be naturally
interpreted as localized expectation values of orthogonal polynomials
$P_{k}\left( \frac{x - x\sub{fit}}{h} \right)$ w.r.t. probability
density $\mbox{EPDF}(x)$ in which localization weights are given by
$\frac{1}{h} K\left( \frac{x - x\sub{fit}}{h} \right)$.
The density estimate at $x\sub{fit}$ is defined by
\begin{equation}
\hat{p}\sub{LOrPE}(x\sub{fit}|h) = \max\{0, \hat{f}\sub{LOrPE}(x\sub{fit}|h)\}.
\label{eq:trunc}
\end{equation}
In general, LOrPE does not produce a bona fide density (in this respect it is similar to the
orthogonal series estimator), and after calculating
the density for all $x\sub{fit}$ values one has to perform the overall renormalization.
Equation~\ref{eq:expansion} can be usefully generalized as follows:
\begin{equation}\label{eq:lorpe}
\hat{f}\sub{LOrPE}(x|h) = \sum_{k=0}^{\infty} g(k) c_{k}(x\sub{fit}, h) P_{k}\left( \frac{x - x\sub{fit}}{h} \right).
\end{equation}
Here, $g(k)$ is a ``taper function''. Normally, $g(0) = 1$ and
there is an integer $M$ such that $g(k) = 0$ for any $k > M$. The
taper function suppresses
high order terms gradually instead of using a
sharp cutoff at $M$.
When evaluated at points $x\sub{fit}$
which are sufficiently far away from the density support
boundaries and if $K(x)$ is an even function, Eq.~\ref{eq:lorpe}
is equivalent to a~kernel density estimator with the effective
kernel
\begin{equation}\label{eq:effk}
K\sub{eff}(x) = K(x) \sum_{j=0}^{\infty} g(2 j) P_{2 j}(0) P_{2 j}(x).
\end{equation}
Moreover, if $g(k)$ is a step function,
{\it i.e.,}~$g(k) = 1$ for all $k \le M$ and $g(k) = 0$ for all $k > M$,
it can be shown that the effective kernel is of order $M+1$ if $M$
is odd and $M+2$ if $M$ is
even~\cite{ref:placida}\footnote{For such kernels the sum in Eq.~\ref{eq:effk}
can be
reduced to a simple algebraic form via the Christoffel-Darboux identity.
However, the resulting formula is not easy to evaluate in a numerically
stable manner near $x = 0$.}.
A slightly different modification of LOrPE is based on the following
identity:
\begin{equation}\label{eq:deltasum}
\frac{1}{h} \sum_{j=0}^{\infty} P_{j}\left(\frac{x - x_i}{h}\right)P_{j}\left(0\right) K\left(\frac{x - x_i}{h}\right) = \delta(x - x_i).
\end{equation}
Substituting this into Eq.~\ref{eq:edf}, we obtain
\begin{equation}
\mbox{EPDF}(x) = \frac{1}{N h} \sum_{i=0}^{N-1} \sum_{j=0}^{\infty} P_{j}\left(\frac{x - x_i}{h}\right)P_{j}\left(0\right) K\left(\frac{x - x_i}{h}\right).
\end{equation}
The modified density estimate is obtained from this expansion by introducing a taper:
\begin{equation}
\hat{\hat{f}}\sub{LOrPE}(x\sub{fit}|h) = \frac{1}{N h} \sum_{i=0}^{N-1} \sum_{j=0}^{\infty} g(j) P_{j}\left(\frac{x\sub{fit} - x_i}{h}\right)P_{j}\left(0\right) K\left(\frac{x\sub{fit} - x_i}{h}\right)
\label{eq:lorpe2}
\end{equation}
and then truncation is handled just like in Eq.~\ref{eq:trunc}.
For even kernels and points $x\sub{fit}$ sufficiently far away from the density support
boundaries, Eq.~\ref{eq:lorpe2} is equivalent to Eq.~\ref{eq:lorpe}
evaluated at $x = x\sub{fit}$. However, this is no longer true near the boundaries.
Perhaps, the easiest way to think about it is that, in Eq.~\ref{eq:lorpe2},
an effective kernel is placed at the location of each data point
(and, in general, shapes of these effective kernels
depend on the data point location $x_i$).
All these kernels are then summed to obtain the density estimates
at all $x\sub{fit}$.
On the other hand, in Eq.~\ref{eq:lorpe} the effective kernel is placed
at the location of each point at which the density estimate is made (so
that the effective kernel shape depends on $x\sub{fit}$).
This kernel generates the weights for each data point which are summed to
obtain the estimate. The difference between these two approaches
is usually ignored for KDE
(simple KDE is unable to deal with boundary bias anyway),
but for LOrPE substantially different results can be obtained near
the boundaries.
It is not obvious apriori which density estimate is better:
$\hat{\hat{f}}\sub{LOrPE}$ from Eq.~\ref{eq:lorpe2}
or $\hat{f}\sub{LOrPE}$ from Eq.~\ref{eq:lorpe},
although some preliminary experimentation with simple distributions
does indicate that
$\hat{f}\sub{LOrPE}$ typically results in smaller MISE.
The integral of the $\hat{\hat{f}}\sub{LOrPE}$ estimate
on the complete density support interval is automatically 1
which is not true for $\hat{f}\sub{LOrPE}$. On the other
hand, $\hat{f}\sub{LOrPE}$ admits an appealing interpretation
in terms of the local density expansion~\ref{eq:lorpe} in which
the localized expectation values of the orthogonal polynomials
$P_k$ are matched to their observed values
in the data sample (this also leads to automatic matching
of localized distribution moments about $x\sub{fit}$).
One-dimensional KDE with fixed kernel $K(x)$ has only one important parameter
which regulates the amount of smoothing:
bandwidth $h$. LOrPE has two such parameters: bandwidth $h$ and the highest
polynomial order $M$ (or, in general, the shape of the taper function).
It is intuitively obvious that polynomial modeling
should result in a smaller bias than KDE for densities
with several (at least $M$) continuous
derivatives, and that a~proper balance of $h$ and $M$ should
result in a better estimator overall.
LOrPE calculations remain essentially unchanged
in multivariate settings: the only difference is switching to multivariate
orthogonal polynomial systems.
Even though LOrPE is equivalent to KDE far away from the density support
boundaries, LOrPE does not suffer from the boundary bias because
Eq.~\ref{eq:norm} automatically adjusts the shape of
orthogonal polynomials near the boundary. This makes LOrPE
applicable in a~wider set of problems than KDE.
In addition to just making better estimates of densities with
a~sharp cutoff at the boundary, LOrPE fixes the main
problem with some existing KDE-based methods which are rarely
used in practice due to their severe degradation from
the boundary bias. Examples of such methods include transformation
kernel density estimation
(see, for example, section 2.10.3 of~\cite{ref:kernsmooth})
in which the transformation target is the uniform distribution,
as well as separate estimation
of the marginals and the copula for multivariate densities.
Unfortunately, LOrPE improvements over KDE do not come without
a price in terms of the algorithmic complexity of the method.
The density estimate can no longer be represented as a simple
convolution of the sample EPDF and a kernel. Because of this,
DFFT-based calculations are no longer sufficient.
In the LOrPE implementation within NPStat, simple algorithms
are used instead which perform pointwise convolutions. Their
computational complexity scales as ${\cal O}(N) + {\cal O}(N_b N_s)$, where
$N_b$ is the number of bins in the sample discretization
histogram and $N_s$ is the number of bins of the same length
(area, volume, {\it etc}) inside the kernel support. For
large bandwidth values (or for kernels with infinite support)
this essentially becomes ${\cal O}(N) + {\cal O}(N_b^2)$ which
can be significantly slower than the KDE implementation based on DFFT.
The following NPStat classes and functions can be used to perform LOrPE
and to assist in choosing the bandwidth:
\cname{LocalPolyFilter1D} --- LOrPE for one-dimensional samples.
A numerical Gram-Schmidt procedure is used
to build polynomials defined by Eq.~\ref{eq:norm0}
on an equidistant grid. A linear filter which combines
formulae~\ref{eq:convol} and~\ref{eq:lorpe} is then
constructed for each grid point $\xf$ from the density
support region (the same ``central'' filter is used for
all $\xf$ points far away from the support boundaries).
The \cname{filter} method of the class can then be used to
build density estimates defined by Eq.~\ref{eq:lorpe} with $x = \xf$
from sample histograms. Alternatively, the \cname{convolve} method
can be used to to make estimates according to Eq.~\ref{eq:lorpe2}.
If necessary, subsequent truncation of the reconstructed
densities below 0 together with renormalization should be performed
by the user.
\cname{WeightTableFilter1DBuilder}
(header file ``npstat/stat/WeightTableFilter1DBuilder.hh'') ---
Helper class designed to work with \cname{LocalPolyFilter1D}.
This helper constructs linear filters out of arbitrary scanned weights
utilizing orthogonal polynomials. If it is necessary to introduce exclusion
regions into the data (for example, for the purpose of interpolating
background density from sidebands), constructor of this class is the place
where this can be done.
\cname{NonmodifyingFilter1DBuilder}
(header file ``npstat/stat/WeightTableFilter1DBuilder.hh'') ---
Helper class designed to work with \cname{LocalPolyFilter1D}.
This filter does not change the data ({\it i.e.}, this filter is
represented by the unit matrix).
\cname{getBoundaryFilter1DBuilder}
(header file ``npstat/stat/AbsFilter1DBuilder.hh'') --- This
function can be used to construct various filters that inherit
from \cname{AbsBoundaryFilter1DBuilder} class. These filters differ
by the kernel adjustments they perform near the density support boundaries.
Many filters of this kind are declared in the
``npstat/stat/Filter1DBuilders.hh'' header file.
\cname{PolyFilterCollection1D} --- A collection of \cname{LocalPolyFilter1D}
objects which can be used, for example, in bandwidth scans or in
cross-validation calculations.
\cname{LocalPolyFilterND} --- similar to \cname{LocalPolyFilter1D} but
intended for estimating multivariate densities.
\cname{SequentialPolyFilterND} --- similar to \cname{LocalPolyFilterND}
but each dimension is processed sequentially using 1-d filtering.
Employs a collection of \cname{LocalPolyFilter1D} objects, one for
each dataset dimension.
\cname{LOrPECopulaSmoother} --- multivariate LOrPE in which extra care
is applied in order to ensure that the estimation result
is a bona fide copula ({\it i.e.,} that all of its marginals
are uniform).
This class should normally be used to smooth empirical copula densities
constructed by \cname{empiricalCopulaHisto} or
\cname{empiricalCopulaDensity}.
The bandwidth can be supplied by the user or
it can be chosen by cross-validation. Less reliable but faster
calculations of this type can be performed with the
\cname{KDECopulaSmoother} class described in Section~\ref{sec:kde}.
\cname{SequentialCopulaSmoother} --- similar to \cname{LOrPECopulaSmoother}
but each dimension is processed sequentially using 1-d filtering.
\cname{NonparametricCompositeBuilder} --- a high-level API for estimating
multivariate densities by applying KDE or LOrPE separately to
each marginal and to the copula. This class builds
\cname{CompositeDistributionND} objects from collections of sample points.
\cname{symbetaLOrPEFilter1D} (header file
``npstat/stat/LocalPolyFilter1D.hh'') --- a convenience utility
for building one-dimensional LOrPE filters using kernels from
the symmetric beta family (including the Gaussian).
\cname{lorpeMise1D} (header file ``npstat/stat/lorpeMise1D.hh'')
--- calculates LOrPE MISE for an arbitrary known density
according to Eq.~\ref{eq:mise}. The support of the density is
split into $M$ subintervals. It is assumed that the sample
points are distributed in these subintervals according to the
multinomial distribution. The covariance matrix of this
distribution is then propagated to the density estimation
result. The trace of the propagated covariance, multiplied by the width of the subinterval,
is added to the integrated squared bias in order to obtain the MISE. Of course, this method
reproduces Eq.~\ref{eq:mise} exactly only in the limit $M \rightarrow \infty$,
while in practice the $O(M^3)$ computational complexity of the error propagation
formulae (based on conventional matrix multiplication) will necessitate a~reasonable
choice of finite~$M$. I suggest choosing $M$ in such a manner
that the ISE introduced by the discretization of the density is
significantly smaller than the estimated MISE.
\subsection {Density Estimation with Bernstein Polynomials}
\label{sec:bernstein}
Density representation by Bernstein polynomial series
is an alternative
approach which can be used to alleviate the boundary bias problem of KDE.
Bernstein polynomials are defined as follows:
\begin{equation}
b_{m,n}(x) = C_n^m x^m (1 - x)^{n - m},
\label{eq:bpoly}
\end{equation}
where $m = 0, 1, ..., n$ and $C_n^m$ are the binomial coefficients:
\begin{equation}
C_n^m = \frac{n!}{m! (n-m)!}.
\end{equation}
In the density estimation context, Bernstein polynomials are often
generalized to non-integer values of $n$ and $m$ in which case
$C_n^m$ is replaced by
$\frac{\Gamma(n + 1)}{\Gamma(m + 1) \Gamma(n - m +1)}$. Up to
normalization, this representation is equivalent to the
beta distribution with parameters $\alpha = m + 1$ and $\beta = n - m + 1$.
For notational simplicity, I will use the term ``Bernstein polynomials''
even if $n$ and $m$ are not integer.
There are two substantially different variations of this density estimation
method. In the first scheme~\cite{ref:betakern},
Bernstein polynomials are used
as variable-shape kernels in a KDE-like formula:
\begin{equation}
\hat{f}\sub{B}(x|n) = \frac{n + 1}{N} \sum_{i=0}^{N-1} b_{m(x), n}(x_i),
\label{eq:betakernels}
\end{equation}
where it is assumed
that the reconstructed density is supported on the [0, 1] interval
(naturally, this interval can be shifted and scaled as needed).
The requirement of asymptotic estimator consistency does not
fix $m(x)$ uniquely, and this mapping can be chosen in a~variety
of ways. In NPStat, the following relationship is implemented:
\begin{equation}
m(x) = \left\{ \begin{array}{ll}
c & \mbox{ if } x (n - 2 s) + s \leq c \\
x (n - 2 s) + s & \mbox{ if } c < x (n - 2 s) + s < n - c \\
n - c & \mbox{ if } x (n - 2 s) + s \geq n - c
\end{array} \right.
\end{equation}
This formula reproduces the simple mapping $m(x) = x n$ considered in
Ref.~\cite{ref:betakern} in case $s = 0$ and $c = 0$. The offset parameter
$s$ plays the role of effective Bernstein polynomial degree used
at $x = 0$ and regulates the amount of boundary bias. Meaningful
values of $s$ lie between $-1$ and 0. As $m \rightarrow -1$, the mean
of the generalized Bernstein polynomial kernels tends to $x$ so that
the estimator
becomes uniformly unbiased for linear density functions.
At the same time, the width of
the kernel tends to 0 at the boundary which leads to an increase in the estimator
variance. As it makes little sense to use kernels whose width
is smaller than the discretization bin size, the cutoff parameter $c$
was introduced. This cutoff effectively limits kernel width from below
in a manner which preserves asymptotic consistency of the estimator
(the useful range of $c$ values is also [-1, 0]).
In addition to appropriate selection of the main
bandwidth parameter $n$, proper choice of parameters $s$ and $c$
can significantly improve estimator convergence at the boundary.
In the second variation of this density estimation technique~\cite{ref:babu},
the polynomials are chosen
based on the location of the observed points:
\begin{equation}
\hat{\hat{f}}\sub{B}(x|n) = \frac{n + 1}{N} \sum_{i=0}^{N-1} b_{m(x_i), n}(x).
\label{eq:bern2}
\end{equation}
For any $m$ and $n$,
$\int_0^1 b_{m,n}(x) dx = \frac{1}{n + 1}$, so this particular estimate
is a bona fide density. Due to the reasons that will be mentioned later
in this subsection,
it can be advantageous to keep integer $m$ and $n$ in this approach.
As in the case of $x$-dependent kernel shape, there is
some amount of freedom in the $m(x_i)$ assignment. For asymptotic
consistency we must require that $m(x_i)/n \rightarrow x_i$ as
$n \rightarrow \infty$. However, such
assignments are not unique. One can choose, for example,
$m(x_i) = \lfloor x_i/(n + 1) \rfloor$ as in Ref.~\cite{ref:babu}
(the symbol $\lfloor \cdot \rfloor$ stands for the ``floor''
function), but it can also be useful to assign more than one polynomial
to $x_i$. The following scheme is implemented in NPStat in addition to
the $m(x_i)$ assignment just mentioned. First, an
integer $k$ is found such that $k + 0.5 \leq x_i (n + 1) < k + 1.5$.
Then, if $0 \leq k < n$,
\begin{equation}
m(x_i) = \left\{ \begin{array}{ll}
k & \mbox{ with weight } k + 1.5 - x_i (n + 1) \\
k + 1 & \mbox{ with weight } x_i (n + 1) - k - 0.5
\end{array} \right.
\end{equation}
If $k < 0$ then $m(x_i) = 0$ with weight 1, and if $k >= n$ then $m(x_i) = n$
with weight~1.
The polynomial weighting schemes actually implemented in the code are
slightly more complicated as they take into account data binning.
With integer values of $m$ and $n$, density estimates constructed according
to Eq.~\ref{eq:bern2} (or its weighted version just described) have an important
property of being positive doubly stochastic. What it means is that not only
$\int_0^1 \hat{\hat{f}}\sub{B}(x|n) dx = 1$
but also a sum of an arbitrary number of separate $\hat{\hat{f}}\sub{B}(x|n)$
estimators will be flat in $x$
as long as the sum of $x_i$ values used to build all these estimators
is itself flat. If the $x_i$ values are flat
between 0 and 1 then assigned $m$ values will be flat
between 0 and n (inclusive). Then double stochasticity
follows directly from the partition
of unity property of Bernstein polynomials: $\sum_{m=0}^n b_{m,n}(x) = \sum_{m=0}^n C_n^m x^m (1 - x)^{n - m} = [x + (1 - x)]^n = 1$.
A collection of positive double stochastic estimators can be used for copula
filtering by sequentially applying these estimators in each dimension (with the help
of \cname{SequentialPolyFilterND} class).
If the initial data set processed by this sequence represents a copula density
(for example, in case it is an empirical copula density histogram), the result
is also guaranteed to be a copula density.
In NPStat, any density estimator
implemented via the \cname{LocalPolyFilter1D} class
can be turned into closest (in some sense) doubly stochastic estimator by
calling the \cname{doublyStochasticFilter} method of that class.
Non-negative estimators will be converted into non-negative
doubly stochastic estimators
using an iterative procedure similar to the one described in~\cite{ref:sinkhorn}
while filters with negative entries will be converted into generalized doubly
stochastic filters according to the method described in~\cite{ref:khoury}.
The following facilities are provided by NPStat for estimating
densities with Bernstein polynomials and beta distribution kernels:
\cname{BetaFilter1DBuilder} --- Constructs linear filters for
\cname{LocalPolyFilter1D} class according to Eq.~\ref{eq:betakernels}.
\cname{BernsteinFilter1DBuilder} --- Constructs linear filters for
\cname{LocalPolyFilter1D} according to Eq.~\ref{eq:bern2}. These
filters are intended for use with the \cname{convolve} method
of \cname{LocalPolyFilter1D} class rather than the \cname{filter} method.
\cname{betaKernelsBandwidth} --- This function estimates optimal
bandwidth (order $n$ of the generalized Bernstein polynomial)
for Eq.~\ref{eq:betakernels}
according to the AMISE calculation
presented in~\cite{ref:betakern}. This bandwidth estimate should not be taken
very seriously for realistic sample sizes, as finite sample performance
of Bernstein polynomial methods is not well understood.
It is worth mentioning that there are other ways to create
doubly stochastic filters. For example, the filter which
represents the discretized Green's function for the homogeneous
1-d heat equation with the Neumann boundary conditions is doubly
stochastic. This particular filter can be constructed
with the help of the \cname{symbetaLOrPEFilter1D} function,
using Gaussian kernel and specifying 0 for the degree of the
LOrPE polynomial as well as \textsf{BM\_FOLD} for the boundary
handling option. Subsequently, the \cname{convolve} method of this filter
should be utilized to smooth the data.
\subsection {Using Cross-Validation for Choosing the Bandwidth}
\label{sec:crossval}
Cross-validation is a technique for adaptive bandwidth selection
applicable to both KDE and LOrPE. Two types of cross-validation
are supported by NPStat: least squares and pseudo-likelihood.
The least squares cross-validation is based on the following idea.
The MISE from Eq.~\ref{eq:mise} can be written as
\begin{eqnarray*}
\mbox{MISE}(h) & = & E \left( \int_{-\infty}^{\infty} (\hat{p}(x|h) - p(x))^2 dx \right) \\ & = & E \left( \int_{-\infty}^{\infty} \hat{p}^2(x|h) dx \right) - 2 E \left( \int_{-\infty}^{\infty} \hat{p}(x|h) p(x) dx \right) +
\int_{-\infty}^{\infty} p^2(x) dx.
\label{eq:lsqcv}
\end{eqnarray*}
The last term, $\int_{-\infty}^{\infty} p^2(x) dx$, does not depend on $h$,
so minimization of MISE is equivalent to minimization of $B(h) \equiv E \left( \int_{-\infty}^{\infty} \hat{p}^2(x|h) dx - 2 \int_{-\infty}^{\infty} \hat{p}(x|h) p(x) dx \right).$ Of course, $p(x)$ itself is unknown. However, it can be
shown (as in section 3.4.3 of~\cite{ref:silverman})
that an unbiased estimator of $B(h)$ can be constructed as
\begin{equation}
\mbox{LSCV}(h) = \int_{-\infty}^{\infty} \hat{p}^2(x|h) dx - \frac{2}{N} \sum_{i=0}^{N-1} \hat{p}_{-1,i}(x_i, h),
\label{eq:lscv}
\end{equation}
where $\hat{p}_{-1,i}(x, h)$ is a ``leaving-one-out'' density estimator
to which the point at $x_i$ does not contribute. For example, in the
case of KDE this estimator is defined by
\begin{equation}
\hat{p}_{-1,i}(x|h) = \frac{1}{(N-1) \, h} \sum_{\stackrel{j=0}{j \ne i}}^{N-1} K \left(\frac{x - x_j}{h}\right).
\end{equation}
Minimization of $\mbox{LSCV}(h)$ can lead to a reasonable bandwidth estimate,
$h\sub{LSCV}$. However, as $h\sub{LSCV}$ is itself a random quantity,
its convergence towards the bandwidth that optimizes MISE, $h\sub{MISE}$,
is known to be rather slow. Moreover, $\mbox{LSCV}(h)$ can have
multiple minima, so its minimization is best carried out by simply
scanning $h$ within a certain range in the proximity of some value
$h^{*}$ suggested by plug-in methods. For more information on the
issues related to the least squares cross-validation see
Refs.~\cite{ref:silverman, ref:kernsmooth}.
The pseudo-likelihood cross-validation (also sometimes called
likelihood cross-validation) is based on maximizing the ``leaving-one-out''
likelihood:
\begin{equation}
\mbox{PLCV}(h) = \prod_{i=0}^{N-1} \hat{p}_{-1,i}(x_i, h)
\end{equation}
The criterion of maximum $\mbox{PLCV}(h)$
can be obtained by minimizing an~approximate
Kullback-Leibler distance between the density and its
estimate~\cite{ref:silverman}.
Maximizing $\mbox{PLCV}(h)$ is only appropriate in certain
situations. In particular, whenever $\hat{p}_{-1,i}(x_i, h)$
becomes 0 even for a~single point, this criterion fails
to produce meaningful results. Its use is also problematic
for densities with infinite support due to the strong
influence fluctuations in the distribution tails
exert on $\mbox{PLCV}(h)$. Because of these problems, NPStat
implements a ``regularized'' version of $\mbox{PLCV}(h)$
defined by
\begin{equation}
\mbox{RPLCV}(h) = \prod_{i=0}^{N-1} \max \left(\hat{p}_{-1,i}(x_i, h),\, \frac{\hat{p}\sub{self,$i$}(x_i, h)}{N^{\alpha}}\right),
\label{eq:rplcv}
\end{equation}
where $\alpha$ is the regularization parameter chosen by the user
($\alpha = 1/2$ usually works reasonably well) and
$\hat{p}\sub{self,$i$}(x, h)$ is the contribution of the data point at $x_i$
into the original density estimator. For KDE, this contribution is
\begin{equation}
\hat{p}\sub{self,$i$}(x|h) = \frac{1}{N h} K \left(\frac{x - x_i}{h}\right).
\end{equation}
If the bandwidth is fixed, $\hat{p}\sub{self,$i$}(x_i|h) = K(0)/(Nh)$
for every point $x_i$.
$\hat{p}\sub{self,$i$}(x_i, h)$ changes from one point to another for
LOrPE and for variable-bandwidth KDE.
Cross-validation in the NPStat package is implemented for discretized
KDE and LOrPE density estimators.
It is assumed that the optimal bandwidth corresponds
to the maximum of some quantity, as in the case of $\mbox{RPLCV}(h)$.
All classes which perform cross-validation for univariate densities
inherit from the abstract base class \cname{AbsBandwidthCV1D}. For
multivariate densities, the corresponding base class is \cname{AbsBandwidthCVND}
(both of these base classes are declared in the header file
``npstat/stat/AbsBandwidthCV.hh''). The following concrete classes
can be used:
\cname{BandwidthCVLeastSquares1D} --- implements Eq.~\ref{eq:lscv} for
KDE and LOrPE in 1-d.
\cname{BandwidthCVLeastSquaresND} --- implements Eq.~\ref{eq:lscv} for
multivariate KDE and LOrPE.
\cname{BandwidthCVPseudoLogli1D} --- implements Eq.~\ref{eq:rplcv} for
KDE and LOrPE in 1-d.
\cname{BandwidthCVPseudoLogliND} --- implements Eq.~\ref{eq:rplcv} for
multivariate KDE and LOrPE.
\noindent The cross-validation classes are used internally by such high-level
classes as \cname{KDECopulaSmoother}, \cname{LOrPECopulaSmoother}, and
\cname{SequentialCopulaSmoother}.
\subsection {The Nearest Neighbor Method}
Using NPStat tools, a simple density estimation algorithm similar to
the $k$-nearest neighbor method~\cite{ref:silverman}
can be implemented for one-dimensional samples as follows:
\begin{thinlist}
\item Discretize the data using a finely binned histogram.
\item Convert this histogram into a distribution by constructing a \cname{BinnedDensity1D} object.
\item For any point $x$ at which a density estimate is desired,
calculate the corresponding cumulative distribution value $y = F(x)$.
\item For some interval $\Delta$, $0 < \Delta < 1$, estimate the density
at $x$ by
\begin{equation}
\hat{p}\sub{NN}(x) = \frac{\Delta}{q(y + \Delta/2)\, - \,q(y - \Delta/2)},
\end{equation}
where $q(y)$ is the quantile function: $q(F(x)) = x$.
This formula assumes $y - \Delta/2 \ge 0$ and $y + \Delta/2 \le 1$.
If $y + \Delta/2 > 1$ then the $\hat{p}\sub{NN}(x)$ denominator should
be replaced by $q(1)\, - \,q(1 - \Delta)$, and if $y - \Delta/2 < 0$
then the denominator should become $q(\Delta)\, - \,q(0)$.
\end{thinlist}
In this approach, the parameter $\Delta$ plays the same role as the $k/N$ ratio
in the standard $k$-nearest neighbor method.
For best results, $\Delta$ should scale with the number
of sample points as $N^{-1/5}$,
and the optimal constant of proportionality depends on
the estimated density itself~\cite{ref:silverman}.
The $k$-nearest neighbor method (and its modification just described)
is not recommended for estimating
densities with infinite support as it leads to a diverging
density integral.
For multivariate samples, a similar estimate can be constructed with
the help of \cname{HistoNDCdf} class. Its method \cname{coveringBox}
can be used to find the smallest $d$-dimensional box with the given center
and fixed proportions which encloses the desired sample fraction.
\section{Nonparametric Regression}
\label{sec:npregression}
``Regression'' refers to a form of data analysis
in which the behavior of the dependent variable, called ``response'',
is deduced as a function of the independent variable, called ``predictor'',
from a sample of observations.
The response values are considered random ({\it e.g.}, contaminated by
noise) while the predictor values are usually assumed to be deterministic.
The analysis purpose is thus to determine the location parameter
of the response distribution
(mean, median, mode, {\it etc}) as a function of the predictor.
In the NPStat algorithms, the response is always assumed to be a
univariate quantity while the predictor can be either univariate or
multivariate. In the discussion below, predictor will be denoted
by ${\bf x}$, response by $y$, $\mu({\bf x})$ will be used
to describe the response location function, and $\hat{\mu}({\bf x})$
will denote an~estimator of $\mu({\bf x})$.
``Nonparametric regression'' refers to a form of regression analysis
in which no global parametric model is postulated for $\mu({\bf x})$.
Instead, for every ${\bf x}\sub{fit}$, $\mu({\bf x})$ is described
in the vicinity of ${\bf x}\sub{fit}$ by a relatively
simple model which is fitted using sample points located nearby.
Further discussion of $\mu({\bf x})$ estimation depends critically
on the assumptions which can be made about the distribution of
response values.
\subsection{Local Least Squares}
With the additional assumption of Gaussian error distribution ({\it i.e.,}
$y_i = \mu({\bf x}_i) + \epsilon_i$, where $\epsilon_i$ are normally distributed
with mean 0 and standard deviation $\sigma_i$), the
model fitting can be efficiently performed by the method of local
least squares. In this method, $\hat{\mu}({\bf x})$ is found by
minimizing the quantity:
\begin{equation}
\chi^2({\bf x}\sub{fit}, h) = \sum_{i=0}^{N-1} \left(\frac{y_i - \hat{\mu}({\bf x}_i|{\bf x}\sub{fit},h)}{\sigma_i}\right)^2 K_h({\bf x}_i - {\bf x}\sub{fit}).
\label{eq:localleastsq}
\end{equation}
Here, $h$ refers to one or more parameters which determine
the extent of the kernel $K_h({\bf x})$.
In NPStat, $\hat{\mu}({\bf x}|{\bf x}\sub{fit},h)$ is usually decomposed
into orthogonal polynomials. In the case of univariate predictor,
\begin{equation}
\hat{\mu}(x|x\sub{fit},h) = \sum_{k=0}^{M} \hat{a}_k(x\sub{fit},h) P_k\left(\frac{x - x\sub{fit}}{h}\right),
\label{eq:regmodel}
\end{equation}
where polynomials $P_k(x)$ are subject to normalization condition~\ref{eq:norm0}.
The expansion coefficients $\hat{a}_k(x\sub{fit},h)$ are determined from
the equations $\partial \chi^2(x\sub{fit},h)/ \partial \hat{a}_k = 0$
which leads to $M + 1$ simultaneous equations for $k = 0, 1, ..., M$:
\begin{equation}
\sum_{i=0}^{N-1} \frac{1}{\sigma_i^2} \left( y_i - \sum_{j=0}^{M} \hat{a}_j(x\sub{fit},h) P_j\left(\frac{x_i - x\sub{fit}}{h}\right) \right) K\left(\frac{x_i - x\sub{fit}}{h}\right) P_k\left(\frac{x_i - x\sub{fit}}{h}\right) = 0.
\label{eq:regsystem}
\end{equation}
If the predictor values $x_i$ are specified on a regular grid of points
then the discretized version of Eq~~\ref{eq:norm0} is just
\begin{equation}
\frac{h_g}{h} \sum_{i=0}^{N-1} P_j\left(\frac{x_i - x\sub{fit}}{h} \right) K\left(\frac{x_i - x\sub{fit}}{h}\right) P_k\left(\frac{x_i - x\sub{fit}}{h}\right) = \delta_{jk},
\end{equation}
where $h_g$ is the distance between any two adjacent values of $x_i$.
This leads to a particularly simple solution for $\hat{a}_k(x\sub{fit},h)$
if the model can be assumed at least locally homoscedastic
({\it i.e.,} if all $\sigma_i$ are the same in the vicinity of $x\sub{fit}$):
\begin{equation}
\hat{a}_k(x\sub{fit},h) = \frac{h_g}{h} \sum_{i=0}^{N-1} y_i K\left(\frac{x_i - x\sub{fit}}{h}\right) P_k\left(\frac{x_i - x\sub{fit}}{h}\right).
\label{eq:regconvol}
\end{equation}
Substituting this into Eq.~\ref{eq:regmodel}, one gets
\begin{equation}
\hat{\mu}(x\sub{fit} | h) = \frac{h_g}{h} \sum_{i=0}^{N-1} \sum_{k=0}^{M} y_i K\left(\frac{x_i - x\sub{fit}}{h}\right) P_k\left(\frac{x_i - x\sub{fit}}{h}\right) P_k(0).
\label{eq:effreg}
\end{equation}
If $K(x)$ is an even function and $x$ is far away from
the boundaries of the interval on which the regression is performed,
Eq.~\ref{eq:effreg} is equivalent to the well-known Nadaraya-Watson estimator,
\begin{equation}
\hat{\mu}_{NW}(x\sub{fit} | h) = \frac{\sum_{i=0}^{N-1} y_i K\sub{eff}\left(\frac{x_i - x\sub{fit}}{h}\right)}{\sum_{i=0}^{N-1} K\sub{eff}\left(\frac{x_i - x\sub{fit}}{h}\right)},
\end{equation}
with the effective kernel given by
\begin{equation}
K\sub{eff}(x) = \sum_{k=0}^{M} P_{k}(0) P_{k}(x) K(x).
\end{equation}
Just as in the case of LOrPE, a taper function can be introduced
for this kernel which leads to Eq~\ref{eq:effk}.
If the predictor values $x_i$ are arbitrary or if the model can not
be considered homoscedastic even locally, there is no
simple formula which solves the linear system~\ref{eq:regsystem}.
In this case the equations must be solved numerically.
To perform this calculation, NPStat calls appropriate routines
from LAPACK~\cite{ref:lapack}.
% Modeling the response by orthogonal polynomial series
% (as opposed to monomial expansion) helps to prevent ill-conditioning
% of the system and significantly improves the numerical stability
% of the solutions.
Generalization of local least squares methods to multivariate predictors
is straightforward: one simply switches to multivariate kernels
and polynomial systems.
The following NPStat classes can be used to perform local least
squares regression of locally homoscedastic polynomial models
on regular grids:
\cname{LocalPolyFilter1D}, \cname{LocalPolyFilterND}, and \cname{SequentialPolyFilterND} --- these
classes have already been mentioned in Section~\ref{sec:lorpe}.
It turns out that LOrPE of discretized data essentially amounts to
local least squares regression on histogram bin contents. To see that,
compare Eqs.~\ref{eq:regconvol} and~\ref{eq:convol}.
Up to an overall normalization
constant, Eq.~\ref{eq:regconvol} is just a discretized version of Eq.~\ref{eq:convol}.
In fact, all three ``PolyFilter'' classes actually perform local least squares regression
which, in the signal analysis terminology, is a linear filtering procedure. If the result
is to be treated as a density, it has to be truncated below zero and renormalized by the user.
\cname{QuadraticOrthoPolyND} --- this class supports a finer interface to the
local least squares regression functionality on a grid than \cname{LocalPolyFilterND},
but only for polynomials up to second degree. In addition to the response
itself, this class can be used to calculate the gradient
and the hessian of the local response surface defined by
Eq.~\ref{eq:regmodel}\footnote{This can be useful, for example,
for summarizing properties of log-likelihoods defined on grids
in the space of estimated parameters for some parametric
statistical models.}.
The predictor/response data can be
provided by a method of some class (callback)
which is sometimes more convenient
than using just a~grid of points.
\cname{LocalQuadraticLeastSquaresND} --- this class fits local
least squares regression models for arbitrary multivariate predictor values
(no longer required to be on a grid).
The models can be heteroscedastic, as in the most general case of Eq.~\ref{eq:regsystem}.
The polynomials can be at most quadratic. Calculation of the gradient
and the hessian of the local response surface is supported.
\subsection{Local Logistic Regression}
\label{sec:locallog}
For regressing binary response variables,
NPStat implements a method known as ``local quadratic logistic regression''
(LQLR). This method is a trivial extension of the local linear
logistic regression originally proposed in~\cite{ref:localreg}.
In this type of analysis, ``response'' is the probability of
success in a Bernoulli trial, estimated as a function of one or more
predictors. Due to the manner in which it is often used, this probability
will be called ``efficiency'' for the remainder of this section.
In the LQLR model, the efficiency dependence on ${\bf x}$ is represented by
\begin{equation}
\epsilon({\bf x}) = \frac{1}{1 + e^{-P({\bf x})}}
\end{equation}
where $P({\bf x})$ is a multivariate quadratic polynomial whose
coefficients are determined at each predictor value ${\bf x}\sub{fit}$
by maximizing the local log-likelihood:
\begin{equation}
L({\bf x}\sub{fit}, h) = \sum_{i=0}^{N-1} K_h({\bf x}_{i} - {\bf x}\sub{fit}) \left[y_i \ln (\hat{\epsilon}({\bf x}_{i})) + (1 - y_i) \ln (1 - \hat{\epsilon}({\bf x}_{i})) \right].
\label{eq:lqlrlogli}
\end{equation}
Here,
$K_h({\bf x}_{i} - {\bf x}\sub{fit})$ is a~suitable localizing kernel which decays to 0
when ${\bf x}_{i}$ is far from ${\bf x}\sub{fit}$, and $y_i$ are the observed values
of the Bernoulli trial: 1 if the point ``passes'' and 0 if it ``fails''.
The local log-likelihood~\ref{eq:lqlrlogli} is very similar to the one
implemented in the Locfit package~\cite{ref:locfit}, the only difference
is that orthogonal polynomials are used in NPStat to construct
$P({\bf x})$ series instead of monomials.
Setting partial derivatives of $L({\bf x}\sub{fit}, h)$ with respect to
polynomial coefficients to 0 results in a system of nonlinear
equations for these coefficients. Solving such a system of equations
does not appear to be any easier than dealing with the original log-likelihood
optimization problem by applying, let say, the Levenberg-Marquardt
algorithm~\cite{ref:lvm}.
NPStat includes facilities for efficient calculation of the LQLR
log-likelihood together with
its gradient with respect to $P({\bf x})$ expansion coefficients.
This code does not rely on any specific optimization solver,
and can be easily interfaced to a~number of different external
optimization tools. The relevant classes are:
\cname{LogisticRegressionOnKDTree} (header file ``npstat/stat/LocalLogisticRegression.hh'') --- this class calculates the log-likelihood
from Eq.~\ref{eq:lqlrlogli} in the assumption that the kernel $K_h({\bf x})$
has finite support. In this case iterating over all $N$ points in the sample
becomes rather inefficient: there is no reason to cycle over values of
${\bf x}_{i}$ far away from ${\bf x}\sub{fit}$ because
$K_h({\bf x}_{i} - {\bf x}\sub{fit})$ is identically zero for all such points.
To automatically restrict the iterated range, the predictor values
are arranged into a space-partitioning data structure known
as $k$-$d$~tree~\cite{ref:kdtree} which is implemented in NPStat with the
\cname{KDTree} class (header file ``npstat/nm/KDTree.hh'').
\cname{LogisticRegressionOnGrid} (header file ``npstat/stat/LocalLogisticRegression.hh'') --- this class calculates the log-likelihood
from Eq.~\ref{eq:lqlrlogli} in the assumption that the predictor values
are histogrammed. Two identically binned histograms must be available: the one
with all values of $y_i$ (``the denominator'') and the one which collects
only those ${\bf x}_{i}$ for which $y_i = 1$ (``the numerator''). Naturally,
only the bins sufficiently close to ${\bf x}\sub{fit}$ are
processed when the LQLR log-likelihood is evaluated for these histograms.
The ``interfaces'' directory of the NPStat package includes two
high-level driver functions for fitting LQLR response
surfaces (header file
``npstat/interfaces/minuitLocalRegression.hh'').
These functions employ a general-purpose
optimization package Minuit~\cite{ref:minuit} for maximizing
the log-likelihood. The names of the functions are
\cname{minuitUnbinnedLogisticRegression}
(intended for use with \cname{LogisticRegressionOnKDTree})
and \cname{minuitLogisticRegressionOnGrid}
(for use with \cname{LogisticRegressionOnGrid}).
To use these functions, the Minuit
package has to be compiled and linked together
with the user code which provides the data
and calls the functions.
\subsection{Local Quantile Regression}
\label{sec:localqreg}
The method of least squares allows us to solve the
problem of response mean determination in the regression
context. By recasting calculation of the sample
mean as a minimization problem, we have gained the
ability to condition the mean
on the value of the predictor. In a~similar manner,
the method of least absolute deviations can be used
to determine conditional median.
In the method of least squares,
the expression $S(f) = \sum_{i} f(y_i - \hat{\mu}({\bf x_i}))$
is minimized, with $f(t) = t^2$. The method of least absolute
deviations differs only by setting $f(t) = |t|$.
Moreover, just like the problem of determination
of response mean can be localized
by introducing a kernel in the predictor space
(resulting in local least squares, as in Eq.~\ref{eq:localleastsq}),
the problem of response median determination
can be subjected to the same localization treatment.
As a method of determination of response location,
local median regression is extremely robust (insensitive to outliers).
Not only the median but an arbitrary
distribution quantile can also be determined in this manner.
The corresponding function to use is
\begin{equation}
f_q(t) = q \,t \,I(t \ge 0) - (1 - q) \,t \,I(t < 0),
\end{equation}
where $q$ is the cumulative distribution value of interest, $0 < q < 1$.
You can easily convince yourself of the validity of this statement
as follows. For a sample of points ${y_0, ..., y_{N-1}}$,
define $t = y - y_q$, where $y_q$ is a parameter on which $S(f_q)$ depends.
The condition for the minimum of $S(f_q)$, $\frac{d S(f_q)}{d y_q} = 0$,
is then equivalent to $\frac{d S(f_q)}{d t} = 0$.
As $\frac{d f_q(t)}{d t} = q \,I(t > 0) - (1 - q) \,I(t < 0)$, the minimum
of $S(f_q)$ is reached when
\begin{equation}
q \sum_{i=0}^{N-1} I(y_i > y_q) - (1 - q) \sum_{i=0}^{N-1} I(y_i < y_q) = 0.
\label{eq:qmin}
\end{equation}
This equation is solved when the number of sample points for which
$y_i > y_q$ is $(1 - q) N$ and the number of sample points for which
$y_i < y_q$ is $q N$. But this is precisely the definition of
the sample quantile which, in the limit $N \rightarrow \infty$, becomes
the population quantile of interest.
Unfortunately, solving Eq.~\ref{eq:qmin} numerically
in the regression context is usually significantly more
challenging than solving the corresponding $\chi^2$ minimization
problem. For realistic finite samples, $\frac{d S(f_q)}{d y_q}$ is not
a continuous function, and Eq.~\ref{eq:qmin} can have multiple solutions.
This basically rules out the use of standard gradient-based methods
for $S(f_q)$ minimization.
The following NPStat classes facilitate the solution of the local
quantile regression problem:
\cname{QuantileRegression1D} --- calculates the expression to
be minimized (also called the ``loss function'') for a single univariate
predictor value $x\sub{fit}$. This expression looks as follows:
\begin{equation}
S\sub{LQR}(x\sub{fit}) = \sum_{i=0}^{N-1} f_q(y_i - \hat{y}_q(x_i|x\sub{fit})) w_i,
\end{equation}
where weights $w_i$ are provided by the user (for example, these
weights can be calculated as $w_i = K((x_i - x\sub{fit})/h)$, but more sophisticated
weighting schemes can be applied as well). The quantile dependence on the
predictor is modeled by
\begin{equation}
\hat{y}_q(x|x\sub{fit}) = \sum_{k=0}^{M} \hat{a}_k(x\sub{fit}) P_k\left(\frac{x - x\sub{fit}}{h}\right),
\label{eq:quantpred}
\end{equation}
where $P_k(x)$ are either Legendre or Gegenbauer polynomials. The use of Legendre
polynomials is appropriate for a global fit of the regression curve, while
Gegenbauer polynomials are intended for use in combination with symmetric beta
kernels that generate localizing weights~$w_i$. The expansion coefficients
$\hat{a}_k(x\sub{fit})$ are to be determined by minimizing $S\sub{LQR}(x\sub{fit})$.
\cname{QuantileRegressionOnKDTree} (header file ``npstat/stat/LocalQuantileRegression.hh'')
--- calculates the quantile regression
loss function for a multivariate predictor (the class method
which returns it is called \cname{linearLoss}):
\begin{equation}
S\sub{LQR}({\bf x}\sub{fit}, h) = \sum_{i=0}^{N-1} f_q(y_i - \hat{y}_q({\bf x}_i|{\bf x}\sub{fit})) K_h({\bf x}_i - {\bf x}\sub{fit}).
\label{eq:quantregnd}
\end{equation}
The quantile dependence on the predictor is modeled by an expansion similar to
Eq.~\ref{eq:quantpred} in which multivariate orthogonal polynomials
(up to second order) are generated by $K_h({\bf x})$ used as the weight
function. The expansion coefficients are to be determined for each
${\bf x}\sub{fit}$ separately by minimizing
$S\sub{LQR}({\bf x}\sub{fit}, h)$. The predictor values are arranged into
a~$k$-$d$~tree structure for reasons similar to those mentioned when
the \cname{LogisticRegressionOnKDTree} class was described.
\cname{QuantileRegressionOnHisto} (header file ``npstat/stat/LocalQuantileRegression.hh'')
--- calculates loss function~\ref{eq:quantregnd}
in the assumption that the response is histogrammed on a regular
grid of predictor values (so that the input histogram dimensionality is larger
by one than the dimensionality of the predictor variable).
\cname{CensoredQuantileRegressionOnKDTree} (header ``npstat/stat/ensoredQuantileRegression.hh'')
--- this class constructs an
appropriate quantile regression loss function in case some of the response
values are unavailable due to censoring ({\it i.e.,} there is a cutoff
from above or from below on the response values).
For example, this situation can occur in modeling of jet response of a
particle detector when jets with visible energy below certain cutoff
are not reconstructed due to limitations in the clustering procedure.
It is assumed that the censoring efficiency ({\it i.e.,} the fraction
of points not affected by the cutoff) can be determined as a~function
of the predictor by other techniques, like the local logistic regression
described in section~\ref{sec:locallog}. It is also assumed
that the cutoff value is known as a~function of the predictor, and that
the presence of this cutoff is the only reason for inefficiency.
The appropriate loss function in this case is
\begin{equation}
S\sub{CLQR}({\bf x}\sub{fit}, h) = \sum_{i=0}^{N\sub{p}-1} \left[f_q(y_i - \hat{y}_q({\bf x}_i|{\bf x}\sub{fit})) + \frac{1 - \epsilon_i}{\epsilon_i} g_q(\epsilon_i, y_{\mbox{\scriptsize cut},i} - \hat{y}_q({\bf x}_i|{\bf x}\sub{fit}))\right] K_h({\bf x}_i - {\bf x}\sub{fit}),
\label{eq:clqr}
\end{equation}
where the summation is performed only over the $N\sub{p}$ points
in the sample surviving the cutoff. $\epsilon_i$ is the censoring
efficiency for the given ${\bf x}_i$. The function
$g_q(\epsilon, t)$ is defined differently for right-censored (R-C) samples
in which surviving points are below the cutoff and left-censored (L-C) samples
in which surviving points are above the cutoff:
\begin{equation}
g_{q,\mbox{\scriptsize R-C}}(\epsilon, t) = I(q < \epsilon) f_q(t) + I(q \ge \epsilon) \left(\frac{q - \epsilon}{1 - \epsilon} f_q(t) + \frac{1 - q}{1 - \epsilon} f_q(+\infty) \right)
\label{eq:clqrrc}
\end{equation}
\begin{equation}
g_{q,\mbox{\scriptsize L-C}}(\epsilon, t) = I(q > 1 - \epsilon) f_q(t) + I(q \le 1 - \epsilon) \left( \frac{1 - \epsilon - q}{1 - \epsilon} f_q(t) + \frac{q}{1 - \epsilon} f_q(-\infty) \right)
\label{eq:clqrlc}
\end{equation}
Formulae~\ref{eq:clqr}, \ref{eq:clqrrc}, and~\ref{eq:clqrlc} were inspired
by Ref.~\cite{ref:localquantreg}. They can be understood as follows. Consider, for example,
a right-censored sample. If this sample was not censored, the value of the
response cumulative distribution function at $y_{\mbox{\scriptsize cut},i}$
would be equal $\epsilon_i$.
For each point with response $y_i$ below the cutoff,
there are $(1 - \epsilon_i)/\epsilon_i$ unobserved points above the cutoff (and $1/\epsilon_i$ points total).
Before localization, points below
the cutoff contribute the usual amount $f_q(y_i - \hat{y}_q({\bf x}_i|{\bf x}\sub{fit}))$
into $S\sub{CLQR}({\bf x}\sub{fit}, h)$ (compare with
Eq.~\ref{eq:quantregnd}). To the points above cutoff, we assign
the value of response which equals either
$y_{\mbox{\scriptsize cut},i}$ or $+\infty$, in such a manner that the estimate
$\hat{y}_q({\bf x}_i|{\bf x}\sub{fit})$ is ``pushed'' in the
right direction and by the right amount when it crosses $y_{\mbox{\scriptsize cut},i}$.
If the estimated quantile is less than the efficiency, all unobserved points
are assigned the response value $y_{\mbox{\scriptsize cut},i}$
(this corresponds to the term $I(q < \epsilon) f_q(t)$ in the Eq.~\ref{eq:clqrrc}). This is
because we know that the correct value of $\hat{y}_q({\bf x}_i|{\bf x}\sub{fit})$
should be below $y_{\mbox{\scriptsize cut},i}$, so the penalty for placing
$\hat{y}_q({\bf x}_i|{\bf x}\sub{fit})$ above the cutoff is generated
by all points, including the unobserved ones. If the chosen quantile is
larger than the efficiency, the only thing we know is that the correct value of
$\hat{y}_q({\bf x}_i|{\bf x}\sub{fit})$ should be inside the interval
$(y_{\mbox{\scriptsize cut},i}, +\infty)$. There is no reason to prefer
any particular value from this interval, so the overall contribution of
sample point $i$ for which $\hat{y}_q({\bf x}_i|{\bf x}\sub{fit}) \in (y_{\mbox{\scriptsize cut},i}, +\infty)$
into $S\sub{CLQR}({\bf x}\sub{fit}, h)$ must not depend on
$\hat{y}_q({\bf x}_i|{\bf x}\sub{fit})$\footnote{If most points ${\bf x}_i$
are like that for some ${\bf x}\sub{fit}$ value, the fit becomes
unreliable. Consider increasing the bandwidth of the localization
kernel or avoid such situations altogether. You should not expect
to obtain good results for high (low) $q$ values and all possible
${\bf x}\sub{fit}$ in a right (left)-censored sample.}
We do, however, want to prevent $\hat{y}_q({\bf x}_i|{\bf x}\sub{fit})$ from leaving
this interval. This is achieved by placing so many points at $y_{\mbox{\scriptsize cut},i}$
that the fraction of sample points (including both observed and unobserved ones)
at or below $y_{\mbox{\scriptsize cut},i}$ is exactly $q$, and this is precisely
what the term proportional to $I(q \ge \epsilon)$ does in Eq.~\ref{eq:clqrrc}.
Similar reasoning applied to a left-censored sample leads to Eq.~\ref{eq:clqrlc}.
Naturally, in the computer program, $-\infty$ and $+\infty$ in Eqs.~\ref{eq:clqrrc} and~\ref{eq:clqrlc}
should be replaced by suitable user-provided numbers which are known to be below and above,
respectively, all possible response values. In order to avoid deterioration
in $S\sub{CLQR}({\bf x}\sub{fit}, h)$ numerical precision, these numbers should not be
very different from the minimum and maximum observed response.
\cname{CensoredQuantileRegressionOnHisto} (header ``npstat/stat/ensoredQuantileRegression.hh'')
--- this class calculates
the loss function~\ref{eq:clqr}
in the assumption that all information about response, efficiency, and cutoffs is
provided on a~regular grid in the space of predictor values.
The ``interfaces'' directory of the NPStat package includes several high-level
driver functions for performing local quantile regression. These driver functions
use the simplex minimization method of the Minuit package to perform local fitting of the
quantile curve expansion coefficients.
\cname{minuitLocalQuantileRegression1D} function uses \cname{QuantileRegression1D}
class internally to perform local quantile regression with one-dimensional predictors.
This driver function can be used, for example, to construct Neyman belts from
numerical simulations of some statistical estimator.
A similar function, \cname{weightedLocalQuantileRegression1D}, can be used
to perform local quantile regression with one-dimensional predictors when
the points are weighted.
The \cname{minuitQuantileRegression} driver function can use one
of the \cname{QuantileRegressionOnKDTree}, \cname{QuantileRegressionOnHisto},
\cname{CensoredQuantileRegressionOnKDTree}, or
\cname{CensoredQuantileRegressionOnHisto} classes
(all of which inherit from a common base)
to perform local quantile regression with multivariate predictors.
The function \cname{minuitQuantileRegressionIncrBW} has similar
functionality but it can also automatically increase the localization kernel
bandwidth so that not less than a certain predefined fraction of the whole
sample participates in the the local quantile determination
for each ${\bf x}\sub{fit}$.
\subsection{Iterative Local Least Trimmed Squares}
The NPStat package includes an implementation of an iterative
local least trimmed
squares (ILLTS) algorithm applicable when predictor/response
values are supplied on a regular grid of points in a multivariate
predictor space (think image denoising). The algorithm operation
consists of the following steps:
\begin{enumerate}
\item Systems of orthogonal polynomials up to user-defined degree $M$
are constructed using weight functions $K_{h,-j}({\bf x})$.
These weight functions are defined using symmetric
finite support kernels in which the point in the kernel
center together with $j - 1$ other points are set to 0.
Imagine, for example, a $5 \times 5$ grid in two dimensions.
Start with the uniform $K_h({\bf x})$ kernel which is 1 at every
grid point. The $K_{h,-1}({\bf x})$ weight function
is produced from $K_h({\bf x})$ by setting the central grid
cell to 0. 24 weight functions of type $K_{h,-2}({\bf x})$
are produced from $K_{h,-1}({\bf x})$ by setting one other
grid cell to 0, in addition to the central one. 276
weight functions of type $K_{h,-3}({\bf x})$ are produced
from $K_{h,-1}({\bf x})$ by choosing 2 out of 24 remaining
cells which are set to 0, and so on.
\item Local least squares regression is performed using all
polynomial systems generated by $K_{h,-j}({\bf x})$ weights
for all possible positions of the kernel inside the predictor
grid (sliding window)\footnote{Edge effects are taking into
account by constructing special polynomial systems
which use boundary kernels similar to
$K_{h,-j}({\bf x})$ but with different placement of zeros.}.
For each kernel position,
we find the polynomial system which produces the best
$\chi^2$ calculated over grid points for
which the weight function is not 0. For this polynomial
system, we determine the value
$\Delta = |y_c - y_{c,\mbox{\scriptsize fit}}|$ for the
kernel center, where $y_c$ is the response value in
the data sample and $y_{c,\mbox{\scriptsize fit}}$ is
the response value produced by the local least squares fit.
\item The position of the kernel is found for which
$\Delta$ is the largest in the whole data sample.
\item The response for the position found
in the previous step is adjusted by setting it
to the value fitted at the kernel center.
\item $\Delta$ is recalculated for all kernel positions affected by
the adjustment performed in the previous step.
\item The previous three steps are repeated until some stopping
criterion is satisfied. For example, the requirement that
the largest $\Delta$ in the grid becomes small (below certain
cutoff) can serve as such a stopping criterion.
\end{enumerate}
The ILLTS algorithm works best if the fraction of outliers in the
sample is relatively small and the response errors are homoscedastic.
ILLTS is expected to be more efficient (in the statistical sense) than the local
quantile regression. If the spectrum of response errors is not known
in advance, it becomes very instructive to plot the history
of $\Delta$ values for which response adjustments were performed.
This plot often exhibits two characteristic ``knees'' which
correspond to the suppression of outliers and suppression of ``normal''
response noise. The procedure can be stopped somewhere
between these knees and followed up by normal local
least squares on the adjusted sample, perhaps utilizing
different bandwidth.
Unfortunately, computational complexity of the ILLTS
algorithm is increasing exponentially
with increasing $j$, so only very small values of $j$ are
practical. The inability to use high values of $j$ can
be partially compensated for by choosing smaller bandwidth
values and performing more iteration cycles. More cycles
result in larger {\it effective} bandwidth --- think, for example,
what happens when you perform local least squares multiple
times. For certain kernels like Gaussian or Cauchy
({\it i.e.,} stable distributions) and polynomial degree $M = 0$ (local
constant fit),
this is exactly equivalent to choosing larger bandwidth.
For other types of kernels not only the effective bandwidth increases
with the number of passes
but also the effective kernel shape gets modified\footnote{ILLTS adds
the dimension of time (adjustment cycle number) to the
solution of robust regression problem. It would be interesting to explore
this, for example, by introducing time-dependent bandwidth or
by studying the connection between the ILLTS updating scheme and
the numerous updating schemes employed in solving
partial differential equations.}.
The top-level API function for running the ILLTS algorithm is called
\cname{griddedRobustRegression}. The $K_{h,-1}({\bf x})$ weights
can be used by supplying an object of \cname{WeightedLTSLoss} type
as its loss calculator argument, and $K_{h,-2}({\bf x})$ weights
are used by choosing \cname{TwoPointsLTSLoss} instead. A simple
stopping criterion based on the $\Delta$ value, local least trimmed
squares $\chi^2$, or the number of adjustment cycles can be specified
with an object of \cname{GriddedRobustRegressionStop} type.
The \cname{griddedRobustRegression} implementation
is rather general, and can accept user-developed loss calculators and
stopping criteria.
\subsection{Organizing Regression Results}
It is usually desirable to calculate the regression
surface on a reasonably fine predictor grid and save the
result of this calculation for subsequent
fast lookup. A general inerface for such a lookup is provided
by the \cname{AbsMultivariateFunctor} class
(header file ``npstat/nm/AbsMultivariateFunctor.hh''). This class
can be used to represent results of both parametric and
nonparametric fits.
Persistent classes \cname{StorableInterpolationFunctor} and
\cname{StorableHistoNDFunctor} derived from \cname{AbsMultivariateFunctor}
are designed to represent
nonparametric regression results. Both of these classes
assume that the regression was
performed on a (hyper)rectangular grid of predictor
points. \cname{StorableInterpolationFunctor}
supports multilinear interpolation and extrapolation of results,
with flexible extrapolation (constant or linear) for each
dimension beyond the grid boundaries. This class essentially
combines the \cname{AbsMultivariateFunctor} interface with
the functionality of the \cname{LinInterpolatedTableND} class
discussed in more detail in Section~\ref{sec:utilities}.
The class \cname{StorableHistoNDFunctor} allows for constant,
multilinear, and multicubic interpolation\footnote{Multicubic
interpolation is supported for uniform grids only.}
inside the grid boundaries, and for constant extrapolation outside
the boundaries (the extrapolated response value is set to its value at
the closest boundary point). Both \cname{StorableInterpolationFunctor}
and \cname{StorableHistoNDFunctor} support arbitrary transformations
of the response variable via a user-provided functor. If a
transformation was initially applied to the
response values in order to simplify subsequent modeling,
this is a good place to perform the inverse.
\section{Unfolding with Smoothing}
\label{sec:unfolding}
In particle physics, the term {\it unfolding} is used to describe
methods of nonparametric reconstruction of probability densities
using observations affected by particle detector resolution and inefficiency
(see~\cite{ref:unfoldingrev} for a contemporary review).
In other natural sciences, the term {\it inverse problem} is commonly
used~\cite{ref:numrecipes}, while in the statistical literature
a more specific name, {\it deconvolution density estimate}, is
becoming the norm~\cite{ref:deconvolutionbook}.
\subsection{Unfolding Problem}
For the purpose of stating the unfolding problem,
it will be assumed that the detector can be described by an
operator $K$. This operator (also called {\it kernel},
{\it transfer function}, {\it observation function},
or {\it response function}, depending on the author and context)
converts probability
densities $p({\bf x})$
in the physical process space ${\bf x}$ into the densities $q({\bf y})$ in the
observation space ${\bf y}$:
$q = K p \equiv \int K({\bf y}, {\bf x}) p({\bf x}) {\bf dx}$.
The response function does not have
to be fully efficient: $q$ does not have to integrate to 1 when
$p$ is normalized. In the subsequent discussion, operator $K$ will be assumed
linear and exactly known but not necessarily invertible.
In many situations of interest, observations are described by the empirical
density function ({\it i.e.}, there is no error term associated with
each individual observation):
\begin{equation}
\rho_e({\bf y}) = \frac{1}{N} \sum_{i=1}^{N} \delta({\bf y} - {\bf y}_i).
\end{equation}
In this case, the probability to observe a point at ${\bf y}_i$ is given by
the normalized version of $q$ called $r$: $r = q/\epsilon$.
In case $p$ is normalized,
\begin{equation}
\epsilon = \int q({\bf y}) \,{\bf dy} = \int K p \,{\bf dy}
\label{eq:efficiency}
\end{equation}
is the overall detector
acceptance for the physical process under study.
The purpose of unfolding is to learn as much as possible about $p({\bf x})$
given $\rho_e({\bf y})$ when a~parametric model for $p({\bf x})$ is lacking.
The difficulty of this problem can be easily appreciated from the following
argument. $K$ typically acts as low pass filter. For measurements of
a~single scalar quantity, it can often be assumed that the detector
has resolution $\sigma$ and that $K(y, x) = {\cal N}(y - x, \sigma^2)$, so that
the detector simply convolves $p(x)$ with the normal distribution.
If $q$ was exactly known, the Fourier transform of $p$ could simply
be obtained from $p(\omega) = q(\omega)/K(\omega)$. As $q(\omega)$ is
not known, the closest available approximation is the characteristic
function of $\rho_e(y)$:
$\rho_e(\omega) = \int \rho_e(y) e^{i \omega y} dy = \frac{1}{N} \sum_{i=1}^{N} e^{i \omega y_i}$. As the characteristic function of the normal
distribution is just $K(\omega) = e^{-\sigma^2 \omega^2/2}$, the ratio
$\rho_e(\omega)/K(\omega)$ becomes arbitrarily large
as $\omega \rightarrow \infty$. The ``naive'' method of
estimating $p(\omega)$ as $\rho_e(\omega)/K(\omega)$ thus fails
miserably: the high frequency components of the noise contained
in the $\rho_e(\omega)$ are multiplied by an arbitrarily large factor
so that $\rho_e(\omega)/K(\omega)$ isn't even square-integrable.
A number of effective
approaches to solving the pure deconvolution problem just described
are discussed in~\cite{ref:deconvolutionbook}. These approaches invariably involve
introduction of additional smoothness assumptions about either
$p(x)$ or $q(y)$ or both. Such
assumptions essentially declare that the high frequency components of $p(x)$
are of little interest and, therefore, can be suppressed in the
$\rho_e(\omega)/K(\omega)$ ratio (so that the inverse Fourier transform
can exist). Introduction of new information by applying additional assumptions
which make an~originally ill-posed problem treatable is called
{\it regularization}.
In the problems of interest for particle physics, the action
of $K({\bf y}, {\bf x})$ on $p({\bf x})$ is usually more complicated than
simple convolution. At the time of this writing,
reconstruction of $p({\bf x})$ in particle physics applications is most often
performed by either the SVD unfolding~\cite{ref:svdunfold}
or the expectation-maximization
(a.k.a. D'Agostini, or Bayesian)
unfolding~\cite{ref:dagounfold}\footnote{In other disciplines,
utility of these methods has been discovered earlier~\cite{ref:unfoldingrev}.}.
Both of these methods start with the
assumption that ${\bf x}$ and ${\bf y}$ spaces are binned, and that
partitioning of these spaces is beyond the control of the method.
In the SVD unfolding, one-dimensional ${\bf x}$ is assumed, and the
regularization is performed by penalizing
discretized second derivative of the $p(x)$
density\footnote{This regularization technique has been reinvented
many times. Depending on the problem, is also called
the {\it constrained linear inversion method},
the {\it Phillips-Twomey method}, the {\it Tikhonov regularization},
or the {\it ridge-parameter approach}~\cite{ref:numrecipes, ref:deconvolutionbook}.}. In the expectation-maximization
unfolding, regularization usually consists in imposing
a~subjective limit on the number of iteration cycles. This
early stopping criterion penalizes
deviations from the prior distribution used to start the iterations.
However, due to the method nonlinearity, it is difficult to
augment this statement with analytical derivations
of the penalty applicable to arbitrary response functions.
It should be appreciated that, for these methods,
the sample binning itself
serves as a part of problem regularization. Just imagine
making very wide bins --- this leads to a~response
matrix which is almost diagonal and easily invertible.
However, information about small structures within each bin
is now lost.
\subsection{EMS Unfolding}
The unfolding method implemented in NPStat combines smoothing in the
${\bf x}$ space with the expectation-maximization iterations performed
until convergence.
This combination (abbreviated
as EMS --- expectation-maximization with smoothing)
has important advantages over SVD unfolding and
expectation-maximization with early stopping:
\begin{thinlist}
\item{{\it Ad hoc} binning is no longer needed. While the solution is
still implemented on a grid, the cell width
can be chosen sufficiently fine
so that discretization does not affect the results.}
\item{Precise response functions can be employed instead of their
coarsely binned versions affected by prior distribution
assumptions.}
\item{Problem regularization can be unambiguously described in terms of the
parameters of the smoothing filter used (bandwidth, {\it etc}).}
\end{thinlist}
While the empirical success of the EMS unfolding method
has already been reported in the statistical
literature~\cite{ref:nychka, ref:smoothedem, ref:dagounfold},
the procedures implemented in NPStat also address the issues
of uncertainty estimation for the reconstructed distribution and of
choosing the filter parameters according to an objective
criterion.
The method proceeds as follows. The standard expectation-maximization
iterations update the reconstructed values of $p({\bf x})$ according to the formula~\cite{ref:nychka, ref:smoothedem}
\begin{equation}
\lambda_j^{(k+1)} = \frac{\lambda_j^{(k)}}{\epsilon_j} \sum_{i=1}^{n} \frac{K_{ij} y_i}{\sum_{\rho=1}^{m} K_{i\rho} \lambda_{\rho}^{(k)}}.
\end{equation}
Here, $\lambda_j^{(k)}$ are the unnormalized $p({\bf x})$ values
(event counts) discretized on a sufficiently fine grid
in the physical process space ${\bf x}$
(whose cells are small in comparison with the typical size of response function
features), obtained on a~$k$\supers{th} iteration. The index $j = 1, ..., m$ refers to
the~{\it linearized} cell number in this (possibly multidimensional) grid.
All $\lambda_j^{(0)}$ values (the starting point for the iterations) can normally
be set to the same constant $c = N/(\epsilon \, m)$, where $N$ is the
number of observed events and $\epsilon$ is the overall detector efficiency
for a~constant $p({\bf x})$. $y_i$, $i = 1, ..., n$, denotes the number
of observed events inside the cell with linearized index $i$ in the space
of observations ${\bf y}$. Dimensionalities of the ${\bf x}$ and ${\bf y}$
spaces can be arbitrary and distinct. $K_{ij}$ is the discretized response
matrix. It is the probability that
an event from the physical cell $j$ causes an observation in the cell
$i$ of the ${\bf y}$ space. $\epsilon_j = \sum_{i=1}^{n} K_{ij}$ is the
detector efficiency for the physical cell $j$.
These iterations are modified by introducing a smoothing step. The updating
scheme becomes
\begin{eqnarray}
\lambda_j^{*(k+1)} & = & \frac{\lambda_j^{(k)}}{\epsilon_j} \sum_{i=1}^{n} \frac{K_{ij} y_i}{\sum_{\rho=1}^{m} K_{i\rho} \lambda_{\rho}^{(k)}}, \\
\lambda_r^{(k+1)} & = & \alpha^{(k+1)} \sum_{j=1}^m S_{r j} \lambda_j^{*(k+1)},
\label{eq:semscheme}
\end{eqnarray}
where $S_{r j}$ is the {\it smoothing matrix}, and
the smoothing step normalization constant, $\alpha^{(k+1)}$, preserves the
overall event count obtained during the preceding expectation-maximization
step (so that $\sum_{r=1}^m \lambda_r^{(k+1)} = \sum_{j=1}^m \lambda_j^{*(k+1)}$). The values $\lambda_j^{(\infty)}$
obtained upon iteration convergence are therefore solutions of the equation
\begin{equation}
\lambda_r^{(\infty)} = \alpha^{(\infty)} \sum_{j=1}^m S_{r j} \frac{\lambda_j^{(\infty)}}{\epsilon_j} \sum_{i=1}^{n} \frac{K_{ij} y_i}{\sum_{\rho=1}^{m} K_{i\rho} \lambda_{\rho}^{(\infty)}},
\label{eq:semeq}
\end{equation}
where $\alpha^{(\infty)} = \sum_{r=1}^m \lambda_r^{*(\infty)} / \sum_{r=1}^m \sum_{j=1}^m S_{r j} \lambda_j^{*(\infty)}$.
The equation for the error propagation matrix,
$J_{rs} \equiv \frac{\partial \lambda_r^{(\infty)}}{\partial y_s}$, can be obtained
by differentiating Eq.~\ref{eq:semeq} w.r.t. $y_s$. In the matrix notation, this
equation is
\begin{equation}
{\bf J} = (\alpha^{(\infty)} {\bf S} + {\bf A}) \left({\bf M} + {\bf B J}\right),
\label{eq:errprop}
\end{equation}
where
\begin{eqnarray}
A_{j q} & = & \frac{\left(1 - \alpha^{(\infty)} \sum_{r=1}^m S_{rq} \right) \lambda_j^{(\infty)}}{\sum_{i=1}^{m} \lambda_i^{(\infty)}}, \label{eq:errpropmatricesa} \\
B_{j q} & = & \frac{\delta_{j q}}{\epsilon_j} \sum_{i=1}^{n} \frac{K_{ij } y_i}{\sum_{\rho=1}^{m} K_{i\rho} \lambda_{\rho}^{(\infty)}} - \frac{\lambda_j^{(\infty)}}{\epsilon_j} \sum_{i=1}^{n} \frac{K_{iq} K_{ij} y_i}{\left(\sum_{\rho=1}^{m} K_{i\rho} \lambda_{\rho}^{(\infty)}\right)^2},\\
M_{j q} & = & \frac{\lambda_j^{(\infty)}}{\epsilon_j} \frac{K_{q j }}{\sum_{\rho=1}^{m} K_{q\rho} \lambda_{\rho}^{(\infty)}}.
\end{eqnarray}
The NPStat code solves the equivalent equation, $({\bf I} - (\alpha^{(\infty)} {\bf S} + {\bf A}) {\bf B}) \, {\bf J} = (\alpha^{(\infty)} {\bf S} + {\bf A}) {\bf M}$, using the LU
factorization algorithm as implemented in LAPACK~\cite{ref:lapack}, and then
runs iterative refinement cycles defined by Eq.~\ref{eq:errprop}
until convergence.
For unweighted samples, the covariance matrix of observations, ${\bf V}$,
can be derived automatically by NPStat according to either Poisson or
multinomial distribution assumptions
using $\hat{y}_i = \sum_{\rho=1}^{m} K_{i\rho} \lambda_{\rho}^{(\infty)}$ as mean values.
In more complicated situations, the user is expected to provide the
covariance matrix of observations\footnote{This may require running the
unfolding code twice, first to obtain $\lambda_{\rho}^{(\infty)}$, and then,
when the covariance matrix is constructed externally, to propagate the uncertainties.
Note that unfolding with the expectation-maximization algorithm intrinsically
assumes Poisson distribution of the observed counts. If the covariance matrix of observations
is highly inconsistent with this assumption, it will be impossible to interpret
the result as a maximum likelihood estimate.}.
The covariance matrix of unfolded values is then estimated according to
${\bf J} {\bf V} {\bf J}\supers{T}$.
While the original expectation-maximization algorithm is agnostic about the dimensionalities
of ${\bf x}$ and ${\bf y}$ spaces, the smoothing step is, of course, dimensionality-specific.
The unfolding code is using smoothing filters constructed with the help of facilities described
in Section~\ref{sec:densityestimation}. The following classes implement unfolding with
smoothing:
\cname{SmoothedEMUnfold1D} (header file ``npstat/stat/SmoothedEMUnfold1D.hh'') --- unfolds
one-dimensional distributions using objects of \cname{LocalPolyFilter1D} class as
smoothing filters.
\cname{SmoothedEMUnfoldND} (header file ``npstat/stat/SmoothedEMUnfoldND.hh'') --- unfolds
multivariate distributions. Dimensionalities of the ${\bf x}$ and ${\bf y}$ spaces
can be arbitrary
and distinct. Dimensionality of the smoothing filter is, of course, expected to be
consistent with the structure of ${\bf x}$. Typically, the filters will be objects of
either \cname{LocalPolyFilterND} or \cname{SequentialPolyFilterND} class, adapted
for unfolding use by the \cname{UnfoldingFilterND} template.
\cname{SmoothedEMUnfoldND} code is employing a more efficient
implementation of the response matrix than the \cname{Matrix} class
used by the rest of NPStat code, but Eq.~\ref{eq:errprop}
is still solved using normal dense matrices. Therefore, practically
usable number of cells in the discretization
of the ${\bf x}$ space will be at most
a~few thousands, as the computational complexity of the
algorithm based on dense matrices is proportional to this number cubed.
\subsection{Choosing the Smoothing Parameters}
For likelihood-based inference, a useful model selection principle
is provided by the Akaike information criterion (AIC)~\cite{ref:aic}.
The AIC criterion adjusted for the finite sample size is~\cite{ref:modelsel}
\begin{equation}
AIC_c = -2 \ln L + 2 k + \frac{2 k (k + 1)}{N - k - 1},
\label{eq:aicc}
\end{equation}
where $L$ is the maximized value of the model likelihood,
$N$ is the sample size, and $k$ the number of parameters
in the model. Selecting a model
by minimizing $AIC_c$ avoids overfitting by reaching a compromise
between complexity of the model and goodness-of-fit.
Application of the $AIC_c$ criterion to the EMS
unfolding procedure is, however, not completely straightforward.
While calculation of the likelihood
can be accomplished assuming Poisson distribution in the space
of observations,
it is not immediately obvious how to count the number of parameters
in the model. To overcome this difficulty,
NPStat assumes that the number of model parameters
can be estimated as the {\it effective rank} of the
${\bf K} {\bf J} {\bf J}\supers{T} {\bf K}\supers{T}$ matrix.
This assumption is based on the following reasoning\footnote{One can
also argue that the ${\bf K} {\bf J}$ matrix
plays similar role to the hat matrix in linear regression problems.
This leads to the same conclusion about the number of model parameters.}.
The covariance matrix of the fitted folded values ({\it i.e.}, $\hat{y}_i$) is
${\bf V}_{\hat{y}}({\bf V}) = {\bf K} {\bf J} {\bf V} {\bf J}\supers{T} {\bf K}\supers{T}$. If, using polynomial series, one fits multiple independent
samples of random points taken from the uniform distribution, with
the number of points per sample varying according to the Poisson distribution,
the rank of the covariance matrix of the
fitted unnormalized density values calculated over these samples
will be equal to the degree of the fittted polynomial plus one.
This is precisely the number of parameters of the fitted model.
It doesn't matter how many abscissae are used to construct the covariance
matrix of the fitted values as long as the number
of abscissae exceeds the degree of the polynomial and the
average number or points in a sample is ``sufficiently large''.
While the model fitted to the observed values by the EMS unfolding method
isn't polynomial, we can still identify some measure of the rank of
${\bf V}_{\hat{y}}({\bf I}) = {\bf K} {\bf J} {\bf J}\supers{T} {\bf K}\supers{T}$ with the number of model parameters.
Two different definitions of the effective
rank of a symmetric positive-semidefinite matrix
(say, {\bf Q}) are implemented.
The first one is the exponent
of the von Neumann entropy of ${\bf Q}/\mbox{tr} ({\bf Q})$. In terms
of the ${\bf Q}$ eigenvalues, $e_i$, it is expressed
as\footnote{Naturally, $\mbox{erank}_1({\bf Q})$ is also the exponent of the Shannon entropy of the normalized eigenspectrum.}
\begin{equation}
\mbox{erank}_1({\bf Q}) = \exp \left\{ -\sum_{i=1}^n \frac{e_i}{\|e\|} \ln \left( \frac{e_i}{\|e\|} \right) \right\},
\ \ \ \|e\| = \sum_{i=1}^n e_i.
\end{equation}
The second is the ratio of the matrix trace to the largest eigenvalue:
\begin{equation}
\mbox{erank}_2({\bf Q}) = \frac{\mbox{tr} ({\bf Q})}{\max_{1 \le i \le n} e_i} = \frac{\|e\|}{\max_{1 \le i \le n} e_i}.
\end{equation}
These effective ranks are calculated by the \cname{symPSDefEffectiveRank} method
of the NPStat \cname{Matrix} class.
From some initial experimentation with simple models, it appears that
setting $k$ in Eq.~\ref{eq:aicc} to either $\mbox{erank}_1({\bf K} {\bf J} {\bf J}\supers{T} {\bf K}\supers{T})$ or $\mbox{erank}_2({\bf K} {\bf J} {\bf J}\supers{T} {\bf K}\supers{T})$ works well, as both of these ranks have similar
derivatives w.r.t. filter bandwidth. The corresponding $AIC_c$ criteria
will be called $EAIC_c$ ($E$ in this abbreviation stands for ``entropic'')
and $TAIC_c$ ($T$ stands for ``trace'').
For any smoothing filter, the most important parameter regulating its
functionality is the bandwidth. NPStat provides the following classes
which facilitate the studies of $AIC_c$ criteria and unfolding performance
vs. filter bandwidth:
\cname{UnfoldingBandwidthScanner1D} (header ``npstat/stat/UnfoldingBandwidthScanner1D.hh'') --- simplifies and speeds up studies of 1-d EMS unfolding
results obtained with multiple
bandwidth values. Filters constructed for different
bandwidth settings are stored internally
and do not have to be recomputed when
multiple observed samples are processed. Effective ranks of the
${\bf K} {\bf J} {\bf J}\supers{T} {\bf K}\supers{T}$ matrices
are memoized as well.
The bandwidth which optimizes either
$EAIC_c$ or $TAIC_c$ criterion can be automatically searched for by
calling the \cname{processAICcBandwidth} method of this class.
Typical class usage is
illustrated by the ``ems\_unfold\_1d.cc'' example program located
in the ``examples/C++'' subdirectory of the NPStat package.
\cname{UnfoldingBandwidthScannerND} (header ``npstat/stat/UnfoldingBandwidthScannerND.hh'') --- assists in studies of multivariate EMS unfolding
results obtained with multiple bandwidth settings. \cname{SequentialPolyFilterND}
class is used internally to generate multivariate filters
according to the user-provided
bandwidth values in each dimension of the ${\bf x}$ space. The filters
and the effective ranks are memoized, but there is no automatic procedure
to determine the optimal bandwidth set\footnote{The
\cname{UnfoldingBandwidthScannerND} class
is designed to speed up repetitive mutidimensional grid scans.
For more sophisticated algorithms searching for a~function minimum
in multiple dimensions,
the strategy of memoizing a lot of intermediate
results for each point considered
will not be optimal.}.
Use of an effective rank to determine the number of model parameters
leads to the requirement that the number of discretization cells in
the observation space ${\bf y}$ should be substantially larger than
this rank. This condition should be verified once the
EMS unfolding is performed with the optimal filter.
If the unfolded values of $p({\bf x})$ are expected to become close
to 0 somewhere in the ${\bf x}$ region considered, it is important
to choose a type of filter that guarantees the non-negativity of
the smoothed result\footnote{The code will automatically truncate negative
filtered values but this truncation will lead to distortions in
the covariance matrix of unfolded results not taken into account by linear
error propagation formulae. A~highly inappropriate choice
of filter can even break the expectation-maximization iterations
by producing $\hat{y}_i = 0$ corresponding to a positive ${y}_i$ value.}.
All LOrPE filters of degree zero have this property.
\subsection{Large Problems with Sparse Matrices}
The standard matrix multiplication methods and the LU factorization algorithm
used to solve Eq.~\ref{eq:errprop} have $O(m^3)$ computational complexity,
where $m$ is the number of cells used to discretize the physical process space.
For programs running
on a single processor, this complexity effectively limits $m$
to a~few thousands. However, in many practically important applications
the response function is sufficiently short-ranged so that most elements
of its discretized representation $K_{ij}$ are zeros. In such situations
it may be advantageous to utilize techniques and algorithms developed for
sparse matrices in order to increase $m$.
NPStat provides a header-only EMS unfolding implementation based on sparse
matrix arithmetic in the namespace ``emsunfold''. In order to take advantage
of this code,
two additional software packages have to be installed by the user.
NPStat relies on Eigen~\cite{ref:eigen} for basic operations
with sparse matrices and for solving sparse linear systems. The package
TRLAN~\cite{ref:trlan} is used for calculating leading eigenvalues and
eigenvectors of large covariance matrices\footnote{I am not aware of any
method for calculating {\it all} eigenvalues and eigenvectors of
a symmetric positive-semidefinite matrix, sparse or not,
with computational complexity better than $O(m^3)$.}.
The following classes implement EMS unfolding with sparse matrices:
\cname{SmoothedEMSparseUnfoldND}
(header ``npstat/emsunfold/SmoothedEMSparseUnfoldND.hh'')
--- parallels the functionality of the \cname{SmoothedEMUnfoldND} class, with a few
peculiarities related to the sparseness of various matrices. In particular, only Poisson uncertainties
on the observed data can be calculated automatically, as multinomial covariances
would not be sparse. User-provided uncertainties are restricted to diagonal
matrices. The smoothing matrix should be doubly
stochastic\footnote{That is, each row and column
of the smoothing matrix should sum to 1, within the tolerance parameter
of the unfolding algorithm.
Construction of doubly stochastic filters with NPStat is discussed
in Section~\ref{sec:bernstein}. Double stochasticity is not
enforced by the code itself, as it is often useful to benchmark the results obtained with sparse
matrices against an implementation utilizing dense matrices.},
otherwise Eq.~\ref{eq:errpropmatricesa} will inject a dense matrix
into the system.
\cname{SparseUnfoldingBandwidthScannerND}
--- parallels the functionality of \cname{UnfoldingBandwidthScanner1D},
with additional input parameters and diagnostic outputs needed to drive determination of
eigenvalues and eigenvectors of large covariance matrices. This class
is declared in the header file
``npstat/emsunfold/SparseUnfoldingBandwidthScannerND.hh''.
\section{Pseudo- and Quasi-Random Numbers}
\label{sec:randomgen}
The C++11 Standard~\cite{ref:cpp11} defines an API for generating
pseudo-random numbers. Unfortunately, this API
suffers from a disconnect between modeling of statistical
distributions (including densities, cumulative distributions, {\it etc})
and generation of random numbers. Moreover,
quasi-random numbers~\cite{ref:Niederreiter} useful for
a large variety of simulation and data analysis purposes
are not represented by the Standard,
and there is no meaningful support for generating genuinely
multivariate random sequences.
NPStat adopts a different approach towards generation of pseudo-random,
quasi-random,
and non-random sequences ---
the one that is more appropriate in the context
of a~statistical
analysis package. A small number of high-quality generators is
implemented for
producing such sequences on a unit $d$-dimensional cube.
All such generators inherit from the same abstract base class
\cname{AbsRandomGenerator} (header file ``npstat/rng/AbsRandomGenerator.hh'').
Generators developed or ported by users can be seamlessly
added as well. Conversion of uniformly distributed sequences into other
types of distributions and to different support regions is performed
by the classes that represent statistical distributions --- in particular,
by those classes which inherit from \cname{AbsDistribution1D} and
\cname{AbsDistributionND} bases. Both of these bases have a virtual
method \cname{random} which takes a sequence generator instance as an
input and produces correspondingly distributed numbers (random or not)
on output.
By default, transformation of sequences is performed by the \cname{quantile}
method for one-dimensional distributions and by \cname{unitMap}
method for multivariate ones. However, it is expected that the
\cname{random} method itself will be overriden by the derived classes
when it is easier, for example, to make
a random sequence with the desired properties by the acceptance-rejection
technique. The following functions and classes are implemented:
\cname{MersenneTwister} (header file ``npstat/rng/MersenneTwister.hh'')
--- generates pseudo-random numbers using the
Mersenne Twister algorithm~\cite{ref:mercenne}.
\cname{SobolGenerator} (header file ``npstat/rng/SobolGenerator.hh'')
--- generates Sobol quasi-random
sequences~\cite{ref:sobol}.
\cname{HOSobolGenerator} (header file ``npstat/rng/HOSobolGenerator.hh'')
--- generates higher order scrambled Sobol
sequences~\cite{ref:hosobol}.
\cname{RandomSequenceRepeater} (header file ``npstat/rng/RandomSequenceRepeater.hh'')
--- this class can be used to produce
multiple repetitions of sequences created by other generators
whenever an instance of \cname{AbsRandomGenerator} is needed.
The whole
sequence is simply remembered (which can take a significant
amount of memory
for large sequences) and extended as necessary
by calling the original generator.
\cname{WrappedRandomGen} (header file ``npstat/rng/AbsRandomGenerator.hh'')
--- a simple adaptor class for ``old style''
random generator functions like ``drand48()''. Implements
\cname{AbsRandomGenerator} interface.
\cname{CPP11RandomGen} (header file ``npstat/rng/CPP11RandomGen.hh'')
--- a simple adaptor class for pseudo-random
generator engines defined in the C++11 standard. Implements
\cname{AbsRandomGenerator} interface.
\cname{EquidistantSampler1D} (header file ``npstat/rng/EquidistantSampler1D.hh'')
--- generates a sequence of equidistant
points, similar to bin centers of a~histogram with axis limits at 0 and 1.
\cname{RegularSampler1D} (header file ``npstat/rng/RegularSampler1D.hh'')
--- generates a sequence of points by
splitting the $[0, 1]$ interval by 2, then splitting all subintervals
by 2, {\it etc}. The points returned are the split locations.
Useful for generating $2^{k} - 1$ points when integer
$k$ is not known in advance.
\cname{convertToSphericalRandom} (header file ``npstat/rng/convertToSphericalRandom.hh'')
--- converts a multivariate random
number from a unit $d$-dimensional cube into a random direction
in $d$ dimensions and one additional random number between 0 and 1.
Useful for generating random numbers according to spherically
symmetrical distributions.
\section{Algorithms Related to Combinatorics}
The NPStat package implements several functions and algorithms related to
permutations of integers and combinatorics:
\cname{factorial} (header file ``npstat/rng/permutation.hh'')
--- this function returns an exact factorial if the
result does not exceed the largest unsigned long (up to $12!$ on 32-bit
systems and up to $20!$ on 64-bit ones).
\cname{ldfactorial} (header file ``npstat/rng/permutation.hh'')
--- this function returns an approximate
factorial up to $1754!$ as a long double.
\cname{logfactorial} (header file ``npstat/rng/permutation.hh'')
--- natural logarithm of a factorial, up to $\ln((2^{32} - 1)!)$,
as a long double.
\cname{binomialCoefficient} (header file ``npstat/nm/binomialCoefficient.hh'')
--- calculates the binomial coefficients
$C_N^M = \frac{N!}{M! (N-M)!}$ using an algorithm which avoids
overflows.
\cname{orderedPermutation} (header file ``npstat/rng/permutation.hh'')
--- this function can be used to iterate
over permutations of numbers $\{0, 1, ..., N-1\}$ in a systematic way.
It generates a unique permutation of such numbers given a~non-negative input
integer below $N!$.
\cname{permutationNumber} (header file ``npstat/rng/permutation.hh'')
--- inverse of \cname{orderedPermutation}:
maps a permutation of numbers $\{0, 1, ..., N-1\}$ into a unique
integer between $0$ and $N!$.
\cname{randomPermutation} (header file ``npstat/rng/permutation.hh'')
--- generates random permutations of numbers
$\{0, 1, ..., N-1\}$ in which probability of every permutation
is the same.
\cname{NMCombinationSequencer} (header file ``npstat/stat/NMCombinationSequencer.hh'')
--- this class iterates over all
possible choices of $j_1, ..., j_M$ from
$N$ possible values for each $j_k$ in such a way that all $j_1, ..., j_M$
are distinct and appear in the sequence in the increasing order,
last index changing most often. Naturally, the total number of
all such choices equals to the number of ways to pick $M$ distinct
items out of $N$: it is the binomial coefficient $C_N^M$.
\section{Numerical Analysis Utilities}
\label{sec:utilities}
The NPStat package includes a menagerie of numerical analysis utilities
designed, primarily, to support the statistical calculations described
in the previous sections. They are placed in the ``nm'' directory
of the package. A number of these utilities can be used as
stand-alone tools. If the corresponding header file
is not mentioned explicitly in the descriptions below,
it is ``npstat/nm/NNNN.hh'', where NNNN stands
for the actual name of the class or function.
\cname{ConvolutionEngine1D} and \cname{ConvolutionEngineND} --- These
classes encapsulate
the NPStat interface to the FFTW package~\cite{ref:fftw}.
They can be used to performs DFFT convolutions of one-dimensional
and multivariate functions, respectively.
\cname{EquidistantInLinearSpace} (header file ``npstat/nm/EquidistantSequence.hh'')
--- A sequence of equidistant points.
For use with algorithms that take a vector of points as one of
their parameters.
\cname{EquidistantInLogSpace} (header file ``npstat/nm/EquidistantSequence.hh'')
--- A sequence of points whose logarithms are equidistant.
\cname{findRootInLogSpace} --- templated numerical equation solving
for 1-$d$ functions (or for 1-$d$ subspaces of multivariate functions)
using interval division. It is assumed that the solution can be
represented as a product of some object ({\it e.g.,} a vector)
by a positive real number,
and that number is then searched for.
\cname{GaussHermiteQuadrature} and \cname{GaussLegendreQuadrature}
--- templated Gauss-Hermite and Gauss-Legendre quadratures for
one-dimensional functions. Internally, calculations
are performed in long double precision. Of course,
lower precision functions can be integrated as well, with
corresponding reduction in the precision of the result.
\cname{rectangleIntegralCenterAndSize} (header file ``npstat/nm/rectangleQuadrature.hh'')
--- Gauss-Legendre cubatures on
rectangular and hyperrectangular domains using tensor product
integration\footnote{``Tensor product integration'' simply means
that the locations at which the function is evaluated and
corresponding weights are determined by sequential application
of Gauss-Legendre quadratures in each dimension.}.
\cname{goldenSectionSearchInLogSpace} (header file ``npstat/nm/goldenSectionSearch.hh'')
--- templated numerical search
for a minimum of 1-$d$ functions (or for 1-$d$ subspaces of multivariate
functions) using the golden section method.
It is assumed that location of the minimum can be
represented as a product of some object ({\it e.g.,} a vector)
by a positive constant, and that constant is then searched for.
\cname{goldenSectionSearchOnAGrid} (header file ``npstat/nm/goldenSectionSearch.hh'')
--- search for a minimum using coordinates restricted to a user-defined
one-dimensional grid. Appropriate for use with functions which are expensive
to evaluate and for which the user has some prior idea about their smoothness.
\cname{parabolicExtremum} (header file ``npstat/nm/MathUtils.hh'')
-- determine extremum of a~parabola passing through three given points
on a plane. Can be used in combination with \cname{goldenSectionSearchOnAGrid}
to refine location of the minimum.
\cname{GridAxis} --- This class can be used to define an axis of
a rectangular grid with non-uniform spacing of points. The complementary
class \cname{UniformAxis} works more efficiently for representing
equidistant points, while the class \cname{DualAxis} can be
used to represent both uniform and non-uniform grids.
\cname{interpolate\_linear}, \cname{interpolate\_quadratic},
\cname{interpolate\_cubic} (these three functions are declared in the
header file ``npstat/nm/interpolate.hh'')
--- linear, quadratic, and cubic
polynomials with given values at two, three, and four equidistant
points, respectively.
\cname{LinInterpolatedTable1D} --- persistent one-dimensional lookup table
with linear interpolation between the tabulated values. Useful
for representing arbitrary one-dimensional functions in case
the full numerical precision is not required. If the table is
monotonous, the inverse table can be constructed automatically.
\cname{LinInterpolatedTableND} --- persistent multidimensional lookup table
with multilinear interpolation between the tabulated values,
as in Eq~\ref{eq:multilinear}. Extrapolation beyond
the grid boundaries is supported as well. This class is useful
for representing arbitrary functions in case the full numerical precision
is not required. \cname{GridAxis}, \cname{UniformAxis}, or \cname{DualAxis}
class (or user-developed
classes with similar sets of methods) can be used
to define grid point locations.
Note that simple location-based lookup of stored values
(without interpolation)
can be trivially performed with the \cname{closestBin} method of the
\cname{HistoND} class. Lookup of histogram bin values with interpolation
can be performed by the \cname{interpolateHistoND} function
(header file ``npstat/stat/interpolateHistoND.hh'').
\cname{LinearMapper1d}, \cname{LogMapper1d} --- linear and log-linear
transformations in 1-$d$ as functors
(with ``double operator()(const double\& x) const'' method defined).
The \cname{CircularMapper1d} class works similarly to \cname{LinearMapper1d}
in situations with circular data topologies.
\cname{Matrix} --- A templated matrix class. Useful for standard matrix
manipulations, solving linear systems, finding eigenvalues and
eigenvectors of symmetric matrices, singular value decomposition,
{\it etc.} Encapsulates NPStat interface to LAPACK~\cite{ref:lapack}.
\cname{findPeak3by3}, \cname{findPeak5by5} (header file ``npstat/nm/findPeak2D.hh'') ---
utilities which facilitate peak finding for two-dimensional surfaces
which can be contaminated by small amounts of noise ({\it e.g.}, from
round-off errors). The user can fit a 2-$d$ quadratic polynomial
inside a $3 \times 3$
or $5 \times 5$ window by least squares\footnote{A fast method
utilizing discrete orthogonal polynomial expansion is used internally.}
and check whether that polynomial has
an extremum inside the window.
Initially intended for studying 2-$d$ log-likelihoods using sliding
windows.
\cname{solveQuadratic}, \cname{solveCubic} (header file ``npstat/nm/MathUtils.hh'')
--- solutions of quadratic
and cubic equations, respectively, by numerically sound
methods\footnote{The textbook formula
$x_{1,2} = \frac{-b \pm \sqrt{b^2 - 4 a c}}{2 a}$
for the roots of quadratic
equation $a x^2 + b x + c =0$
is also a~prime example of a numerical analysis pitfall.
A minimal modification,
$x_1 = -\frac{b + (I(b \ge 0) - I(b < 0))\sqrt{b^2 - 4 a c}}{2 a}$,
$x_2 = \frac{c}{a x_1}$, avoids the subtractive
cancellation problem in the $x_{1,2}$ numerator.}.
\cname{ndUnitSphereVolume} (header file ``npstat/nm/MathUtils.hh'')
--- volume of the $n$-dimensional unit sphere.
\cname{polyAndDeriv} (header file ``npstat/nm/MathUtils.hh'')
--- monomial series $\sum_{k=0}^M c_k x^k$
and its derivative with respect to $x$, templated on the type
of coefficients $c_k$.
\cname{polySeriesSum}, \cname{legendreSeriesSum},
\cname{gegenbauerSeriesSum}, \cname{chebyshevSeriesSum} (all of these
functions are declared in the header file ``npstat/nm/MathUtils.hh'')
--- templated
series of one-dimensional monomials, Legendre polynomials, Gegenbauer
polynomials, and Chebyshev polynomials, respectively. Numerically
sound recursive formulae are used to generate the polynomials.
\cname{hermiteSeriesSumProb}, \cname{hermiteSeriesSumPhys} (header
file ``npstat/nm/MathUtils.hh'') --- templated series of
``probabilist'' and ``physicist'' Hermite polynomials, respectively.
\cname{chebyshevSeriesCoeffs} (header
file ``npstat/nm/MathUtils.hh'') --- utility for approximating
mathematical functions with Chebyshev polynomials.
\cname{OrthoPoly1D}, \cname{OrthoPolyND} --- uni- and multivariate
orthogonal polynomials on equidistant rectangular grids with
arbitrary weight functions. In addition to producing
the polynomials themselves, these classes can
be used to calculate polynomial series,
polynomial series expansion coefficients for gridded
functions, and polynomial filters defined by Eq.~\ref{eq:effk}
(but normalized so that the sum of filter coefficients is 1).
The NPStat package also includes implementations of various
special functions needed in statistical calculations (incomplete
gamma function and its inverse, incomplete beta function, {\it etc}).
These functions are declared in the header file
``npstat/nm/SpecialFunctions.hh''.
\cleardoublepage
\phantomsection
\addcontentsline{toc}{section}{References}
\begin{thebibliography}{99}
\bibitem{ref:minuit}
Minuit2 Minimization Package,
\href{http://www.cern.ch/minuit/}{http://www.cern.ch/minuit/}
\bibitem{ref:geners}
Geners —-- Generic Serialization for C++,
\href{http://geners.hepforge.org/}{http://geners.hepforge.org/}
\bibitem{ref:cpp11} ISO/IEC Standard 14882:2011
{\it Programming Language C++} (2011).
\bibitem{ref:linearbinning}
M.C.~Jones, and H.W.~Lotwick, ``On the Errors Involved in Computing the Empirical Characteristic Function'', {\it Journal of Statistical Computation and Simulation} {\bf 17}, 133 (1983).
\bibitem{ref:sampleskewkurt}
D.N. Joanes and C.A. Gill, ``Comparing measures of sample skewness
and kurtosis'', {\it The Statistician} {\bf 47}, 183 (1998).
\bibitem{ref:ratioofnormals}
D.V. Hinkley, ``On the ratio of two correlated normal random variables'',
{\it Biometrika} {\bf 56}, 635 (1969).
\bibitem{ref:johnson}
N.L. Johnson, ``Systems of Frequency Curves Generated by Methods of Translation'', {\it Biometrika} {\bf 36}, 149 (1949).
\bibitem{ref:johnsonbook}
W.P. Elderton and N.L. Johnson, ``Systems of Frequency Curves'',
Cambridge University Press (1969).
\bibitem{ref:hahnshap}
G.J. Hahn and S.S. Shapiro, ``Statistical Models in Engineering'',
Wiley (1994).
\bibitem{ref:draper}
J. Draper, ``Properties of Distributions Resulting from Certain Simple Transformations of the Normal Distribution'', {\it Biometrika} {\bf 39}, 290 (1952).
\bibitem{ref:hill}
I.D. Hill, R. Hill, and R. L. Holder,
``Algorithm AS 99: Fitting Johnson Curves by Moments'',
{\it Applied Statistics} {\bf 25}, 180 (1976).
\bibitem{ref:handcock}
M.S. Handcock and M. Morris, ``Relative Distribution Methods'',
{\it Sociological Methodology} {\bf 28}, 53 (1998).
\bibitem{ref:kdetransform}
L. Yang and J.S. Marron, ``Iterated Transformation-Kernel Density Estimation'',
{\it Journal of the American Statistical Association} {\bf 94}, 580 (1999).
\bibitem{ref:thas}
O. Thas, ``Comparing Distributions'', Springer Series in Statistics (2009).
\bibitem{ref:smoothtest}
J. Neyman, ``'Smooth Test' for Goodness of Fit'', {\it Skandinavisk Aktuarietidskrift} {\bf 20}, 150~(1937).
\bibitem{ref:copulabook}
R.B.~Nelsen, ``An Introduction to Copulas'', 2\supers{nd} Ed.,
Springer Series in Statistics (2006).
\bibitem{ref:eightqueens}
\href{http://en.wikipedia.org/wiki/Eight_queens_puzzle}{``Eight queens puzzle'', Wikipedia.}
\bibitem{ref:eightrooks}
\href{http://en.wikipedia.org/wiki/Rook_polynomial}{``Rook polynomial'', Wikipedia.}
\bibitem{ref:copulaentropy}
J.~Ma and Z.~Sun, ``Mutual information is copula entropy'',
\href{http://arxiv.org/abs/0808.0845}{arXiv:0808.0845v1} (2008).
\bibitem{ref:read}
A.L.~Read, ``Linear interpolation of histograms'',
{\it Nucl. Instr. Meth.} {\bf A 425}, 357 (1999).
\bibitem{ref:silverman} B.W.~Silverman, ``Density Estimation for Statistics
and Data Analysis'', Chapman \& Hall (1986).
\bibitem{ref:izenmann} A.J.~Izenman, ``Recent Developments in Nonparametric Density Estimation'', {\it Journal of the American Statistical Association} {\bf 86}, 205 (1991).
\bibitem{ref:kde} D.W.~Scott,
``Multivariate Density Estimation: Theory, Practice, and Visualization'',
Wiley (1992).
\bibitem{ref:bwopt} M.C.~Jones, J.S.~Marron and S.J.~Sheather,
``A Brief Survey of Bandwidth Selection for Density Estimation'',
{\it Journal of the American Statistical Association} {\bf 91}, 401 (1996).
\bibitem{ref:loaderbw} C.R.~Loader,
``Bandwidth Selection: Classical or Plug-in?'',
{\it The Annals of Statistics} {\bf 27}, 415 (1999).
\bibitem{ref:kernsmooth} M.P.~Wand and M.C.~Jones,
``Kernel Smoothing'', Chapman \& Hall (1995).
\bibitem{ref:chiu2} S.-T. Chiu,
``An Automatic Bandwidth Selector for Kernel Density Estimation'',
{\it Biometrika} {\bf 79}, 771 (1992).
\bibitem{ref:chiumultivar} T.-J. Wu and M.-H. Tsai,
``Root n bandwidths selectors in multivariate kernel density estimation'',
{\it Probab. Theory Relat. Fields} {\bf 129}, 537 (2004).
\bibitem{ref:discretizedkde} M.C.~Jones,
``Discretized and Interpolated Kernel Density Estimates'',
{\it Journal of the American Statistical Association} {\bf 84}, 733 (1989).
\bibitem{ref:fastkde1} C.~Yang, R.~Duraiswami, N.A.~Gumerov and L.~Davis,
``Improved Fast Gauss Transform and Efficient Kernel Density Estimation'',
in {\it Proceedings of the Ninth IEEE International
Conference on Computer Vision} (2003).
\bibitem{ref:fastkde2} D.~Lee, A.G.~Gray, and A.W. Moore,
``Dual-Tree Fast Gauss Transforms'',
\href{http://arxiv.org/abs/1102.2878}{arXiv:1102.2878v1} (2011).
\bibitem{ref:bwgaussmix} J.S. Marron and M.P. Wand,
``Exact Mean Integrated Squared Error'', {\it The Annals Of Statistics}
{\bf 20}, 712 (1992).
\bibitem{ref:abramson} I.S.~Abramson, ``On Bandwidth Variation in Kernel
Estimates --- A Square Root Law'', {\it The Annals Of Statistics}
{\bf 10}, 1217 (1982).
\bibitem{ref:placida} P.D.A. Dassanayake,
``Local Orthogonal Polynomial Expansion for Density Estimation'',
M.S. Thesis, Texas Tech University (2012).
\bibitem{ref:betakern} S.X. Chen, ``Beta kernel estimators for
density functions'', {\it Computational Statistics \& Data Analysis}
{\bf 31}, 131 (1999).
\bibitem{ref:babu} G.J. Babu, A.J. Canty, and Y.P. Chaubey,
``Application of Bernstein Polynomials for smooth estimation
of a distribution and density function'', {\it Journal of Statistical
Planning and Inference} {\bf 105}, 377 (2002).
\bibitem{ref:sinkhorn} R. Sinkhorn and P. Knopp,
``Concerning Nonnegative Matrices and Doubly Stochastic Matrices'',
{\it Pacific Journal of Mathematics} {\bf 21}, 343 (1967).
\bibitem{ref:khoury} R. Khoury, ``Closest Matrices in the Space of
Generalized Doubly Stochastic Matrices'', {\it Journal of
Mathematical Analysis and Applications} {\bf 222}, 562 (1998).
\bibitem{ref:lapack}
LAPACK —-- Linear Algebra PACKage,
\href{http://www.netlib.org/lapack/}{http://www.netlib.org/lapack/}
\bibitem{ref:localreg} T.J.~Hastie, ``Non-parametric Logistic Regression'',
SLAC PUB-3160, June 1983.
\bibitem{ref:locfit}
CRAN - Package locfit,
\href{http://cran.r-project.org/web/packages/locfit/}
{http://cran.r-project.org/web/packages/locfit/}
\bibitem{ref:lvm} K.~Levenberg, ``A Method for the Solution
of Certain Non-Linear Problems in Least Squares'',
{\it Quarterly of Applied Mathematics} {\bf 2}, 164 (1944).
D.~Marquardt, ``An Algorithm for Least-Squares Estimation of Nonlinear Parameters'', {\it SIAM Journal on Applied Mathematics} {\bf 11}, 431 (1963).
\bibitem{ref:kdtree} J.L.~Bentley, ``Multidimensional
Binary Search Trees Used for Associative Searching'',
{\it Communications of the ACM} {\bf 18}, 509 (1975).
\bibitem{ref:localquantreg} H.J. Wang and L. Wang,
``Locally Weighted Censored Quantile Regression'',
{\it Journal of the American Statistical Association} {\bf 104}, 487 (2009).
\bibitem{ref:unfoldingrev}
F.~Span\`o, ``Unfolding in particle physics:
a window on solving inverse problems'',
{\it EPJ Web of Conferences} {\bf 55}, 03002 (2013).
\bibitem{ref:numrecipes}
W.H.~Press, S.A.~Teukolsky, W.T.~Vetterling, and B.P.~Flannery,
``Numerical Recipes: the Art of Scientific Computing'', 3rd ed.,
Cambridge University Press (2007).
\bibitem{ref:deconvolutionbook}
A.~Meister, ``Deconvolution Problems in Nonparametric Statistics'',
Springer Lecture Notes in Statistics, Vol. 193 (2009).
\bibitem{ref:svdunfold}
A. H\"ocker and V. Kartvelishvili, ``SVD approach to data unfolding'',
{\it Nuclear Instruments and Methods in Physics Research A}
{\bf 372}, 469 (1996).
\bibitem{ref:dagounfold}
G. D'Agostini, ``A multidimensional unfolding method based on Bayes' theorem'',
{\it Nuclear Instruments and Methods in Physics Research A}
{\bf 362}, 487 (1995).
\bibitem{ref:nychka}
D. Nychka, ``Some Properties of Adding a Smoothing Step to the EM Algorithm'',
{\it Statistics \& Probability Letters} {\bf 9}, 187 (1990).
\bibitem{ref:smoothedem}
B.W. Silverman \etal, ``A Smoothed EM Approach to Indirect Estimation Problems,
with Particular Reference to Stereology and Emission Tomography'',
{\it Journal of the Royal Statistical Society B} {\bf 52}, 271 (1990).
\bibitem{ref:aic}
H.~Akaike, ``A new look at the statistical model identification'',
{\it IEEE Transactions on Automatic Control} {\bf 19}, 716 (1974).
\bibitem{ref:modelsel}
K.P. Burnham and D.R. Anderson, ``Model Selection and Multi-Model Inference:
A~Practical Information-Theoretic Approach'', 2nd ed., Springer (2004).
\bibitem{ref:eigen}
Eigen --- a C++ template library for linear algebra:
matrices, vectors, numerical solvers, and related algorithms,
\href{http://eigen.tuxfamily.org/}{http://eigen.tuxfamily.org/}
\bibitem{ref:trlan}
K.~Wu and H.~Simon, ``Thick-Restart Lanczos Method
for Large Symmetric Eigenvalue Problems'',
{\it SIAM Journal on Matrix Analysis and Applications} {\bf 22}, 602 (2000).
\bibitem{ref:Niederreiter}
H. Niederreiter, ``Random Number Generation and Quasi-Monte
Carlo Methods'', Society for Industrial and Applied Mathematics (SIAM),
Philadelphia (1992).
\bibitem{ref:mercenne} M. Matsumoto and T. Nishimura, ``Mersenne Twister:
A 623-Dimensionally Equidistributed Uniform Pseudo-Random Number Generator'',
{\it ACM Transactions on Modeling and Computer Simulation} {\bf 8}, 3 (1998).
\bibitem{ref:sobol} I. Sobol and Y.L. Levitan,
``The Production of Points Uniformly Distributed in a Multidimensional Cube'',
Tech. Rep. 40, Institute of Applied Mathematics,
USSR Academy of Sciences, 1976 (in Russian).
\bibitem{ref:hosobol} J. Dick, ``Higher order scrambled digital nets achieve the optimal rate of the root mean square error for smooth integrands'',
\href{http://arxiv.org/abs/1007.0842}{arXiv:1007.0842} (2010).
\bibitem{ref:fftw}
FFTW (Fastest Fourier Transform in the West),
\href{http://www.fftw.org/}{http://www.fftw.org/}
\end{thebibliography}
\cleardoublepage
\phantomsection
\addcontentsline{toc}{section}{Functions and Classes}
\printindex
\end{document}
Index: trunk/doc/stat_methods.pdf
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/pdf
Index: trunk/npstat/interfaces/Makefile
===================================================================
--- trunk/npstat/interfaces/Makefile (revision 579)
+++ trunk/npstat/interfaces/Makefile (revision 580)
@@ -1,593 +1,593 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# npstat/interfaces/Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
subdir = npstat/interfaces
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(include_HEADERS) \
$(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
SOURCES =
DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(includedir)"
HEADERS = $(include_HEADERS)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__DIST_COMMON = $(srcdir)/Makefile.in
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk/npstat/interfaces
abs_srcdir = /home/igv/Hepforge/npstat/trunk/npstat/interfaces
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include/npstat/interfaces
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix = ../../
top_builddir = ../..
top_srcdir = ../..
AM_CPPFLAGS = -I../../
include_HEADERS = fitCompositeJohnson.hh \
fitCompositeJohnson.icc \
MinuitDensityFitFcn1D.hh \
minuitFitJohnsonCurves.hh \
minuitFitJohnsonCurves.icc \
MinuitLocalLogisticFcn.hh \
minuitLocalQuantileRegression1D.hh \
minuitLocalQuantileRegression1D.icc \
minuitLocalRegression.hh \
minuitLocalRegression.icc \
MinuitLOrPEBgCVFcn1D.hh \
MinuitSemiparametricFitFcn1D.hh \
MinuitSemiparametricFitUtils.hh \
MinuitQuantileRegression1DFcn.hh \
minuitQuantileRegression.hh \
minuitQuantileRegression.icc \
MinuitQuantileRegressionNDFcn.hh \
MinuitUnbinnedFitFcn1D.hh \
ScalableDensityConstructor1D.hh \
weightedLocalQuantileRegression1D.hh \
weightedLocalQuantileRegression1D.icc
EXTRA_DIST = 00README.txt npsi_doxy.hh
all: all-am
.SUFFIXES:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign npstat/interfaces/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign npstat/interfaces/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
install-includeHEADERS: $(include_HEADERS)
@$(NORMAL_INSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
$(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
done
uninstall-includeHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(HEADERS)
installdirs:
for dir in "$(DESTDIR)$(includedir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool mostlyclean-am
distclean: distclean-am
-rm -f Makefile
distclean-am: clean-am distclean-generic distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-includeHEADERS
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-includeHEADERS
.MAKE: install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \
clean-libtool cscopelist-am ctags ctags-am distclean \
distclean-generic distclean-libtool distclean-tags distdir dvi \
dvi-am html html-am info info-am install install-am \
install-data install-data-am install-dvi install-dvi-am \
install-exec install-exec-am install-html install-html-am \
install-includeHEADERS install-info install-info-am \
install-man install-pdf install-pdf-am install-ps \
install-ps-am install-strip installcheck installcheck-am \
installdirs maintainer-clean maintainer-clean-generic \
mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \
ps ps-am tags tags-am uninstall uninstall-am \
uninstall-includeHEADERS
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/npstat/rng/Makefile
===================================================================
--- trunk/npstat/rng/Makefile (revision 579)
+++ trunk/npstat/rng/Makefile (revision 580)
@@ -1,678 +1,678 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# npstat/rng/Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
subdir = npstat/rng
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(include_HEADERS) \
$(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
LTLIBRARIES = $(noinst_LTLIBRARIES)
librng_la_LIBADD =
am_librng_la_OBJECTS = permutation.lo RandomSequenceRepeater.lo \
convertToSphericalRandom.lo SobolGenerator.lo \
MersenneTwister.lo HOSobolGenerator.lo RegularSampler1D.lo \
EquidistantSampler1D.lo
librng_la_OBJECTS = $(am_librng_la_OBJECTS)
AM_V_lt = $(am__v_lt_$(V))
am__v_lt_ = $(am__v_lt_$(AM_DEFAULT_VERBOSITY))
am__v_lt_0 = --silent
am__v_lt_1 =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
DEFAULT_INCLUDES = -I.
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CXXFLAGS) $(CXXFLAGS)
AM_V_CXX = $(am__v_CXX_$(V))
am__v_CXX_ = $(am__v_CXX_$(AM_DEFAULT_VERBOSITY))
am__v_CXX_0 = @echo " CXX " $@;
am__v_CXX_1 =
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
$(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CXXLD = $(am__v_CXXLD_$(V))
am__v_CXXLD_ = $(am__v_CXXLD_$(AM_DEFAULT_VERBOSITY))
am__v_CXXLD_0 = @echo " CXXLD " $@;
am__v_CXXLD_1 =
SOURCES = $(librng_la_SOURCES)
DIST_SOURCES = $(librng_la_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(includedir)"
HEADERS = $(include_HEADERS)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk/npstat/rng
abs_srcdir = /home/igv/Hepforge/npstat/trunk/npstat/rng
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include/npstat/rng
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix = ../../
top_builddir = ../..
top_srcdir = ../..
AM_CPPFLAGS = -I../../ $(DEPS_CFLAGS)
noinst_LTLIBRARIES = librng.la
librng_la_SOURCES = permutation.cc RandomSequenceRepeater.cc \
convertToSphericalRandom.cc SobolGenerator.cc MersenneTwister.cc \
HOSobolGenerator.cc RegularSampler1D.cc EquidistantSampler1D.cc
include_HEADERS = AbsRandomGenerator.hh \
convertToSphericalRandom.hh \
CPP11RandomGen.hh \
EquidistantSampler1D.hh \
HOSobolGenerator.hh \
MersenneTwister.hh \
permutation.hh \
RandomSequenceRepeater.hh \
RegularSampler1D.hh \
SobolGenerator.hh
EXTRA_DIST = MersenneTwisterImpl.hh 00README.txt
all: all-am
.SUFFIXES:
.SUFFIXES: .cc .lo .o .obj
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign npstat/rng/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign npstat/rng/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
clean-noinstLTLIBRARIES:
-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
@list='$(noinst_LTLIBRARIES)'; \
locs=`for p in $$list; do echo $$p; done | \
sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \
sort -u`; \
test -z "$$locs" || { \
echo rm -f $${locs}; \
rm -f $${locs}; \
}
librng.la: $(librng_la_OBJECTS) $(librng_la_DEPENDENCIES) $(EXTRA_librng_la_DEPENDENCIES)
$(AM_V_CXXLD)$(CXXLINK) $(librng_la_OBJECTS) $(librng_la_LIBADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
include ./$(DEPDIR)/EquidistantSampler1D.Plo
include ./$(DEPDIR)/HOSobolGenerator.Plo
include ./$(DEPDIR)/MersenneTwister.Plo
include ./$(DEPDIR)/RandomSequenceRepeater.Plo
include ./$(DEPDIR)/RegularSampler1D.Plo
include ./$(DEPDIR)/SobolGenerator.Plo
include ./$(DEPDIR)/convertToSphericalRandom.Plo
include ./$(DEPDIR)/permutation.Plo
.cc.o:
$(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CXX)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(CXXCOMPILE) -c -o $@ $<
.cc.obj:
$(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CXX)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cc.lo:
$(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
# $(AM_V_CXX)source='$<' object='$@' libtool=yes \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(LTCXXCOMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
install-includeHEADERS: $(include_HEADERS)
@$(NORMAL_INSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
$(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
done
uninstall-includeHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(LTLIBRARIES) $(HEADERS)
installdirs:
for dir in "$(DESTDIR)$(includedir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \
mostlyclean-am
distclean: distclean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-includeHEADERS
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-includeHEADERS
.MAKE: install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \
clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \
ctags-am distclean distclean-compile distclean-generic \
distclean-libtool distclean-tags distdir dvi dvi-am html \
html-am info info-am install install-am install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am \
install-includeHEADERS install-info install-info-am \
install-man install-pdf install-pdf-am install-ps \
install-ps-am install-strip installcheck installcheck-am \
installdirs maintainer-clean maintainer-clean-generic \
mostlyclean mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \
uninstall-am uninstall-includeHEADERS
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/npstat/wrap/Makefile
===================================================================
--- trunk/npstat/wrap/Makefile (revision 579)
+++ trunk/npstat/wrap/Makefile (revision 580)
@@ -1,585 +1,585 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# npstat/wrap/Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
subdir = npstat/wrap
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(include_HEADERS) \
$(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
SOURCES =
DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(includedir)"
HEADERS = $(include_HEADERS)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__DIST_COMMON = $(srcdir)/Makefile.in
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk/npstat/wrap
abs_srcdir = /home/igv/Hepforge/npstat/trunk/npstat/wrap
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include/npstat/wrap
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix = ../../
top_builddir = ../..
top_srcdir = ../..
include_HEADERS = arrayNDFromNumpy.hh \
arrayNDToNumpy.hh \
histoBars.hh \
histoOutline1D.hh \
ntupleColumnToNumpy.hh \
NumpyTypecode.hh \
OneShotReadBuf.hh \
pickle_item.hh \
PyClassId.hh \
PythonRecord.hh \
PythonReference.hh \
scanDensity1D.hh \
StorableNone.hh \
stringArchiveToBinary.hh \
templateParameters.hh
all: all-am
.SUFFIXES:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign npstat/wrap/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign npstat/wrap/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
install-includeHEADERS: $(include_HEADERS)
@$(NORMAL_INSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
$(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
done
uninstall-includeHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(HEADERS)
installdirs:
for dir in "$(DESTDIR)$(includedir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool mostlyclean-am
distclean: distclean-am
-rm -f Makefile
distclean-am: clean-am distclean-generic distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-includeHEADERS
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-includeHEADERS
.MAKE: install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \
clean-libtool cscopelist-am ctags ctags-am distclean \
distclean-generic distclean-libtool distclean-tags distdir dvi \
dvi-am html html-am info info-am install install-am \
install-data install-data-am install-dvi install-dvi-am \
install-exec install-exec-am install-html install-html-am \
install-includeHEADERS install-info install-info-am \
install-man install-pdf install-pdf-am install-ps \
install-ps-am install-strip installcheck installcheck-am \
installdirs maintainer-clean maintainer-clean-generic \
mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \
ps ps-am tags tags-am uninstall uninstall-am \
uninstall-includeHEADERS
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/npstat/nm/Makefile
===================================================================
--- trunk/npstat/nm/Makefile (revision 579)
+++ trunk/npstat/nm/Makefile (revision 580)
@@ -1,813 +1,817 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# npstat/nm/Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
subdir = npstat/nm
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(include_HEADERS) \
$(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
LTLIBRARIES = $(noinst_LTLIBRARIES)
libnm_la_LIBADD =
am_libnm_la_OBJECTS = ArrayShape.lo ArrayRange.lo SpecialFunctions.lo \
ConvolutionEngine1D.lo EquidistantSequence.lo \
GaussHermiteQuadrature.lo GaussLegendreQuadrature.lo \
MathUtils.lo OrthoPoly1D.lo GridAxis.lo rectangleQuadrature.lo \
LinInterpolatedTable1D.lo findPeak2D.lo bilinearSection.lo \
ConvolutionEngineND.lo FourierImage.lo SvdMethod.lo \
binomialCoefficient.lo UniformAxis.lo ArrayNDScanner.lo \
DualAxis.lo DiscreteBernsteinPoly1D.lo definiteIntegrals.lo \
EigenMethod.lo goldenSectionSearch.lo timestamp.lo \
OrthoPolyMethod.lo ContOrthoPoly1D.lo lapack_interface.lo \
AbsClassicalOrthoPoly1D.lo ClassicalOrthoPolys1D.lo \
- matrixIndexPairs.lo truncatedInverseSqrt.lo FejerQuadrature.lo
+ matrixIndexPairs.lo truncatedInverseSqrt.lo FejerQuadrature.lo \
+ StorablePolySeries1D.lo
libnm_la_OBJECTS = $(am_libnm_la_OBJECTS)
AM_V_lt = $(am__v_lt_$(V))
am__v_lt_ = $(am__v_lt_$(AM_DEFAULT_VERBOSITY))
am__v_lt_0 = --silent
am__v_lt_1 =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
DEFAULT_INCLUDES = -I.
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CXXFLAGS) $(CXXFLAGS)
AM_V_CXX = $(am__v_CXX_$(V))
am__v_CXX_ = $(am__v_CXX_$(AM_DEFAULT_VERBOSITY))
am__v_CXX_0 = @echo " CXX " $@;
am__v_CXX_1 =
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
$(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CXXLD = $(am__v_CXXLD_$(V))
am__v_CXXLD_ = $(am__v_CXXLD_$(AM_DEFAULT_VERBOSITY))
am__v_CXXLD_0 = @echo " CXXLD " $@;
am__v_CXXLD_1 =
SOURCES = $(libnm_la_SOURCES)
DIST_SOURCES = $(libnm_la_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(includedir)"
HEADERS = $(include_HEADERS)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk/npstat/nm
abs_srcdir = /home/igv/Hepforge/npstat/trunk/npstat/nm
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include/npstat/nm
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix = ../../
top_builddir = ../..
top_srcdir = ../..
AM_CPPFLAGS = -I../../ $(DEPS_CFLAGS)
noinst_LTLIBRARIES = libnm.la
libnm_la_SOURCES = ArrayShape.cc ArrayRange.cc SpecialFunctions.cc \
ConvolutionEngine1D.cc EquidistantSequence.cc GaussHermiteQuadrature.cc \
GaussLegendreQuadrature.cc MathUtils.cc OrthoPoly1D.cc GridAxis.cc \
rectangleQuadrature.cc LinInterpolatedTable1D.cc findPeak2D.cc \
bilinearSection.cc ConvolutionEngineND.cc FourierImage.cc SvdMethod.cc \
binomialCoefficient.cc UniformAxis.cc ArrayNDScanner.cc DualAxis.cc \
DiscreteBernsteinPoly1D.cc definiteIntegrals.cc EigenMethod.cc \
goldenSectionSearch.cc timestamp.cc OrthoPolyMethod.cc ContOrthoPoly1D.cc \
lapack_interface.cc AbsClassicalOrthoPoly1D.cc ClassicalOrthoPolys1D.cc \
- matrixIndexPairs.cc truncatedInverseSqrt.cc FejerQuadrature.cc
+ matrixIndexPairs.cc truncatedInverseSqrt.cc FejerQuadrature.cc \
+ StorablePolySeries1D.cc
include_HEADERS = AbsArrayProjector.hh \
AbsClassicalOrthoPoly1D.hh \
AbsClassicalOrthoPoly1D.icc \
absDifference.hh \
AbsMultivariateFunctor.hh \
AbsVisitor.hh \
allocators.hh \
areAllElementsUnique.hh \
ArrayND.hh \
ArrayND.icc \
ArrayNDScanner.hh \
ArrayRange.hh \
ArrayShape.hh \
bilinearSection.hh \
binomialCoefficient.hh \
BoxND.hh \
BoxND.icc \
BoxNDScanner.hh \
BoxNDScanner.icc \
CircularMapper1d.hh \
ClassicalOrthoPolys1D.hh \
closeWithinTolerance.hh \
CompareByIndex.hh \
ComplexComparesAbs.hh \
ComplexComparesFalse.hh \
ConstSubscriptMap.hh \
ContOrthoPoly1D.hh \
ContOrthoPoly1D.icc \
ConvolutionEngine1D.hh \
ConvolutionEngine1D.icc \
ConvolutionEngineND.hh \
ConvolutionEngineND.icc \
CoordinateSelector.hh \
definiteIntegrals.hh \
DiscreteBernsteinPoly1D.hh \
discretizedDistance.hh \
discretizedDistance.icc \
DualAxis.hh \
EigenMethod.hh \
EquidistantSequence.hh \
ExpMapper1d.hh \
FejerQuadrature.hh \
fillArrayCentersPreservingAreas.hh \
findPeak2D.hh \
findRootInLogSpace.hh \
findRootInLogSpace.icc \
findRootNewtonRaphson.hh \
findRootNewtonRaphson.icc \
findRootUsingBisections.hh \
findRootUsingBisections.icc \
FourierImage.hh \
GaussHermiteQuadrature.hh \
GaussHermiteQuadrature.icc \
GaussLegendreQuadrature.hh \
GaussLegendreQuadrature.icc \
GeneralizedComplex.hh \
goldenSectionSearch.hh \
goldenSectionSearch.icc \
GridAxis.hh \
interpolate.hh \
interpolate.icc \
Interval.hh \
Interval.icc \
isMonotonous.hh \
KDTree.hh \
KDTree.icc \
lapack.h \
lapack_interface.hh \
lapack_interface.icc \
lapack_interface_double.icc \
lapack_interface_float.icc \
LinearMapper1d.hh \
LinInterpolatedTable1D.hh \
LinInterpolatedTable1D.icc \
LinInterpolatedTableND.hh \
LinInterpolatedTableND.icc \
LogMapper1d.hh \
MathUtils.hh \
MathUtils.icc \
Matrix.hh \
matrixIndexPairs.hh \
Matrix.icc \
MinSearchStatus1D.hh \
MultivariateFunctorScanner.hh \
OrthoPoly1D.hh \
OrthoPoly1D.icc \
OrthoPolyMethod.hh \
OrthoPolyND.hh \
OrthoPolyND.icc \
PairCompare.hh \
PointDimensionality.hh \
PreciseType.hh \
ProperDblFromCmpl.hh \
PtrBufferHandle.hh \
rectangleQuadrature.hh \
rescanArray.hh \
rescanArray.icc \
SimpleFunctors.hh \
SpecialFunctions.hh \
+ StorablePolySeries1D.hh \
SvdMethod.hh \
timestamp.hh \
Triple.hh \
truncatedInverseSqrt.hh \
UniformAxis.hh
EXTRA_DIST = 00README.txt
all: all-am
.SUFFIXES:
.SUFFIXES: .cc .lo .o .obj
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign npstat/nm/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign npstat/nm/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
clean-noinstLTLIBRARIES:
-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
@list='$(noinst_LTLIBRARIES)'; \
locs=`for p in $$list; do echo $$p; done | \
sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \
sort -u`; \
test -z "$$locs" || { \
echo rm -f $${locs}; \
rm -f $${locs}; \
}
libnm.la: $(libnm_la_OBJECTS) $(libnm_la_DEPENDENCIES) $(EXTRA_libnm_la_DEPENDENCIES)
$(AM_V_CXXLD)$(CXXLINK) $(libnm_la_OBJECTS) $(libnm_la_LIBADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
include ./$(DEPDIR)/AbsClassicalOrthoPoly1D.Plo
include ./$(DEPDIR)/ArrayNDScanner.Plo
include ./$(DEPDIR)/ArrayRange.Plo
include ./$(DEPDIR)/ArrayShape.Plo
include ./$(DEPDIR)/ClassicalOrthoPolys1D.Plo
include ./$(DEPDIR)/ContOrthoPoly1D.Plo
include ./$(DEPDIR)/ConvolutionEngine1D.Plo
include ./$(DEPDIR)/ConvolutionEngineND.Plo
include ./$(DEPDIR)/DiscreteBernsteinPoly1D.Plo
include ./$(DEPDIR)/DualAxis.Plo
include ./$(DEPDIR)/EigenMethod.Plo
include ./$(DEPDIR)/EquidistantSequence.Plo
include ./$(DEPDIR)/FejerQuadrature.Plo
include ./$(DEPDIR)/FourierImage.Plo
include ./$(DEPDIR)/GaussHermiteQuadrature.Plo
include ./$(DEPDIR)/GaussLegendreQuadrature.Plo
include ./$(DEPDIR)/GridAxis.Plo
include ./$(DEPDIR)/LinInterpolatedTable1D.Plo
include ./$(DEPDIR)/MathUtils.Plo
include ./$(DEPDIR)/OrthoPoly1D.Plo
include ./$(DEPDIR)/OrthoPolyMethod.Plo
include ./$(DEPDIR)/SpecialFunctions.Plo
+include ./$(DEPDIR)/StorablePolySeries1D.Plo
include ./$(DEPDIR)/SvdMethod.Plo
include ./$(DEPDIR)/UniformAxis.Plo
include ./$(DEPDIR)/bilinearSection.Plo
include ./$(DEPDIR)/binomialCoefficient.Plo
include ./$(DEPDIR)/definiteIntegrals.Plo
include ./$(DEPDIR)/findPeak2D.Plo
include ./$(DEPDIR)/goldenSectionSearch.Plo
include ./$(DEPDIR)/lapack_interface.Plo
include ./$(DEPDIR)/matrixIndexPairs.Plo
include ./$(DEPDIR)/rectangleQuadrature.Plo
include ./$(DEPDIR)/timestamp.Plo
include ./$(DEPDIR)/truncatedInverseSqrt.Plo
.cc.o:
$(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CXX)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(CXXCOMPILE) -c -o $@ $<
.cc.obj:
$(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CXX)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cc.lo:
$(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
# $(AM_V_CXX)source='$<' object='$@' libtool=yes \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(LTCXXCOMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
install-includeHEADERS: $(include_HEADERS)
@$(NORMAL_INSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
$(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
done
uninstall-includeHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(LTLIBRARIES) $(HEADERS)
installdirs:
for dir in "$(DESTDIR)$(includedir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \
mostlyclean-am
distclean: distclean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-includeHEADERS
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-includeHEADERS
.MAKE: install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \
clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \
ctags-am distclean distclean-compile distclean-generic \
distclean-libtool distclean-tags distdir dvi dvi-am html \
html-am info info-am install install-am install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am \
install-includeHEADERS install-info install-info-am \
install-man install-pdf install-pdf-am install-ps \
install-ps-am install-strip installcheck installcheck-am \
installdirs maintainer-clean maintainer-clean-generic \
mostlyclean mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \
uninstall-am uninstall-includeHEADERS
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/npstat/stat/Makefile
===================================================================
--- trunk/npstat/stat/Makefile (revision 579)
+++ trunk/npstat/stat/Makefile (revision 580)
@@ -1,1156 +1,1165 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# npstat/stat/Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
subdir = npstat/stat
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(include_HEADERS) \
$(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
LTLIBRARIES = $(noinst_LTLIBRARIES)
libstat_la_LIBADD =
am_libstat_la_OBJECTS = AbsDistribution1D.lo AbsUnfoldND.lo \
AbsDistributionND.lo amiseOptimalBandwidth.lo \
CompositeDistribution1D.lo CompositeDistributionND.lo \
CopulaInterpolationND.lo SymbetaParams1D.lo \
Distribution1DFactory.lo Distribution1DReader.lo \
DistributionNDReader.lo Distributions1D.lo DistributionsND.lo \
CrossCovarianceAccumulator.lo fitSbParameters.lo \
StatAccumulatorArr.lo HistoAxis.lo ResponseMatrix.lo \
InterpolatedDistribution1D.lo JohnsonCurves.lo \
JohnsonKDESmoother.lo LocalPolyFilter1D.lo \
logLikelihoodPeak.lo PolyFilterCollection1D.lo \
SbMomentsBigGamma.lo SbMomentsCalculator.lo \
gaussianResponseMatrix.lo SequentialCopulaSmoother.lo \
SequentialPolyFilterND.lo StatAccumulator.lo \
UnitMapInterpolationND.lo WeightedStatAccumulator.lo \
AbsNtuple.lo QuadraticOrthoPolyND.lo NMCombinationSequencer.lo \
Filter1DBuilders.lo StatAccumulatorPair.lo GridRandomizer.lo \
ConstantBandwidthSmoother1D.lo GaussianMixture1D.lo \
HistoNDCdf.lo scanDensityAsWeight.lo NUHistoAxis.lo \
distributionReadError.lo WeightedStatAccumulatorPair.lo \
AbsUnfold1D.lo ProductSymmetricBetaNDCdf.lo DualHistoAxis.lo \
multinomialCovariance1D.lo StorableMultivariateFunctor.lo \
StorableMultivariateFunctorReader.lo \
TruncatedDistribution1D.lo neymanPearsonWindow1D.lo \
AsinhTransform1D.lo LOrPEMarginalSmoother.lo \
LeftCensoredDistribution.lo QuantileTable1D.lo \
RightCensoredDistribution.lo AbsDiscreteDistribution1D.lo \
convertAxis.lo DiscreteDistribution1DReader.lo \
DiscreteDistributions1D.lo lorpeMise1D.lo \
BernsteinFilter1DBuilder.lo BetaFilter1DBuilder.lo \
AbsFilter1DBuilder.lo continuousDegreeTaper.lo \
RatioOfNormals.lo AbsCVCopulaSmoother.lo DensityScan1D.lo \
BoundaryHandling.lo SmoothedEMUnfold1D.lo Copulas.lo \
PearsonsChiSquared.lo BinnedKSTest1D.lo \
MultiscaleEMUnfold1D.lo AbsBinnedComparison1D.lo \
BinnedADTest1D.lo LocalPolyFilter1DReader.lo \
MemoizingSymbetaFilterProvider.lo UGaussConvolution1D.lo \
BinSummary.lo SmoothedEMUnfoldND.lo UnfoldingFilterNDReader.lo \
AbsUnfoldingFilterND.lo UnfoldingBandwidthScannerND.lo \
DistributionMix1D.lo ScalableGaussND.lo \
InterpolatedDistro1D1P.lo AbsDistributionTransform1D.lo \
LogTransform1D.lo DistributionTransform1DReader.lo \
TransformedDistribution1D.lo WeightTableFilter1DBuilder.lo \
VerticallyInterpolatedDistribution1D.lo LocalMultiFilter1D.lo \
LogRatioTransform1D.lo IdentityTransform1D.lo \
VariableBandwidthSmoother1D.lo AbsMarginalSmootherBase.lo \
OSDE1D.lo buildInterpolatedHelpers.lo \
GridInterpolatedDistribution.lo AbsCopulaSmootherBase.lo \
BernsteinCopulaSmoother.lo distro1DStats.lo \
SequentialGroupedCopulaSmoother.lo \
UnfoldingBandwidthScanner1D.lo InterpolatedDistro1DNP.lo \
volumeDensityFromBinnedRadial.lo statUncertainties.lo \
- LocationScaleFamily1D.lo SinhAsinhTransform1D.lo
+ LocationScaleFamily1D.lo SinhAsinhTransform1D.lo \
+ AbsKDE1DKernel.lo KDE1DHOSymbetaKernel.lo
libstat_la_OBJECTS = $(am_libstat_la_OBJECTS)
AM_V_lt = $(am__v_lt_$(V))
am__v_lt_ = $(am__v_lt_$(AM_DEFAULT_VERBOSITY))
am__v_lt_0 = --silent
am__v_lt_1 =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
DEFAULT_INCLUDES = -I.
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CXXFLAGS) $(CXXFLAGS)
AM_V_CXX = $(am__v_CXX_$(V))
am__v_CXX_ = $(am__v_CXX_$(AM_DEFAULT_VERBOSITY))
am__v_CXX_0 = @echo " CXX " $@;
am__v_CXX_1 =
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
$(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CXXLD = $(am__v_CXXLD_$(V))
am__v_CXXLD_ = $(am__v_CXXLD_$(AM_DEFAULT_VERBOSITY))
am__v_CXXLD_0 = @echo " CXXLD " $@;
am__v_CXXLD_1 =
SOURCES = $(libstat_la_SOURCES)
DIST_SOURCES = $(libstat_la_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(includedir)"
HEADERS = $(include_HEADERS)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk/npstat/stat
abs_srcdir = /home/igv/Hepforge/npstat/trunk/npstat/stat
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include/npstat/stat
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix = ../../
top_builddir = ../..
top_srcdir = ../..
AM_CPPFLAGS = -I../../ $(DEPS_CFLAGS)
noinst_LTLIBRARIES = libstat.la
libstat_la_SOURCES = AbsDistribution1D.cc AbsUnfoldND.cc \
AbsDistributionND.cc amiseOptimalBandwidth.cc CompositeDistribution1D.cc \
CompositeDistributionND.cc CopulaInterpolationND.cc SymbetaParams1D.cc \
Distribution1DFactory.cc Distribution1DReader.cc DistributionNDReader.cc \
Distributions1D.cc DistributionsND.cc CrossCovarianceAccumulator.cc \
fitSbParameters.cc StatAccumulatorArr.cc HistoAxis.cc ResponseMatrix.cc \
InterpolatedDistribution1D.cc JohnsonCurves.cc JohnsonKDESmoother.cc \
LocalPolyFilter1D.cc logLikelihoodPeak.cc PolyFilterCollection1D.cc \
SbMomentsBigGamma.cc SbMomentsCalculator.cc gaussianResponseMatrix.cc \
SequentialCopulaSmoother.cc SequentialPolyFilterND.cc StatAccumulator.cc \
UnitMapInterpolationND.cc WeightedStatAccumulator.cc AbsNtuple.cc \
QuadraticOrthoPolyND.cc NMCombinationSequencer.cc Filter1DBuilders.cc \
StatAccumulatorPair.cc GridRandomizer.cc ConstantBandwidthSmoother1D.cc \
GaussianMixture1D.cc HistoNDCdf.cc scanDensityAsWeight.cc NUHistoAxis.cc \
distributionReadError.cc WeightedStatAccumulatorPair.cc AbsUnfold1D.cc \
ProductSymmetricBetaNDCdf.cc DualHistoAxis.cc multinomialCovariance1D.cc \
StorableMultivariateFunctor.cc StorableMultivariateFunctorReader.cc \
TruncatedDistribution1D.cc neymanPearsonWindow1D.cc AsinhTransform1D.cc \
LOrPEMarginalSmoother.cc LeftCensoredDistribution.cc QuantileTable1D.cc \
RightCensoredDistribution.cc AbsDiscreteDistribution1D.cc convertAxis.cc \
DiscreteDistribution1DReader.cc DiscreteDistributions1D.cc lorpeMise1D.cc \
BernsteinFilter1DBuilder.cc BetaFilter1DBuilder.cc AbsFilter1DBuilder.cc \
continuousDegreeTaper.cc RatioOfNormals.cc AbsCVCopulaSmoother.cc \
DensityScan1D.cc BoundaryHandling.cc SmoothedEMUnfold1D.cc Copulas.cc \
PearsonsChiSquared.cc BinnedKSTest1D.cc MultiscaleEMUnfold1D.cc \
AbsBinnedComparison1D.cc BinnedADTest1D.cc LocalPolyFilter1DReader.cc \
MemoizingSymbetaFilterProvider.cc UGaussConvolution1D.cc BinSummary.cc \
SmoothedEMUnfoldND.cc UnfoldingFilterNDReader.cc AbsUnfoldingFilterND.cc \
UnfoldingBandwidthScannerND.cc DistributionMix1D.cc ScalableGaussND.cc \
InterpolatedDistro1D1P.cc AbsDistributionTransform1D.cc LogTransform1D.cc \
DistributionTransform1DReader.cc TransformedDistribution1D.cc \
WeightTableFilter1DBuilder.cc VerticallyInterpolatedDistribution1D.cc \
LocalMultiFilter1D.cc LogRatioTransform1D.cc IdentityTransform1D.cc \
VariableBandwidthSmoother1D.cc AbsMarginalSmootherBase.cc OSDE1D.cc \
buildInterpolatedHelpers.cc GridInterpolatedDistribution.cc \
AbsCopulaSmootherBase.cc BernsteinCopulaSmoother.cc distro1DStats.cc \
SequentialGroupedCopulaSmoother.cc UnfoldingBandwidthScanner1D.cc \
InterpolatedDistro1DNP.cc volumeDensityFromBinnedRadial.cc \
- statUncertainties.cc LocationScaleFamily1D.cc SinhAsinhTransform1D.cc
+ statUncertainties.cc LocationScaleFamily1D.cc SinhAsinhTransform1D.cc \
+ AbsKDE1DKernel.cc KDE1DHOSymbetaKernel.cc
include_HEADERS = AbsBandwidthCV.hh \
AbsBandwidthGCV.hh \
AbsBinnedComparison1D.hh \
AbsBinnedComparison1D.icc \
AbsCompositeDistroBuilder.hh \
AbsCompositeDistroBuilder.icc \
AbsCopulaSmootherBase.hh \
AbsCopulaSmootherBase.icc \
AbsCVCopulaSmoother.hh \
AbsDiscreteDistribution1D.hh \
AbsDistribution1D.hh \
AbsDistributionND.hh \
AbsDistributionND.icc \
AbsDistributionTransform1D.hh \
AbsDistro1DBuilder.hh \
AbsDistro1DBuilder.icc \
AbsFilter1DBuilder.hh \
AbsGridInterpolatedDistribution.hh \
AbsInterpolatedDistribution1D.hh \
AbsInterpolationAlgoND.hh \
+ AbsKDE1DKernel.hh \
+ AbsKDE1DKernel.icc \
AbsLossCalculator.hh \
AbsMarginalSmootherBase.hh \
AbsMarginalSmootherBase.icc \
AbsNtuple.hh \
AbsNtuple.icc \
AbsPolyFilter1D.hh \
AbsPolyFilterND.hh \
AbsResponseBoxBuilder.hh \
AbsResponseIntervalBuilder.hh \
AbsSymbetaFilterProvider.hh \
AbsUnfold1D.hh \
AbsUnfoldingFilterND.hh \
AbsUnfoldND.hh \
amiseOptimalBandwidth.hh \
amiseOptimalBandwidth.icc \
ArchivedNtuple.hh \
ArchivedNtuple.icc \
ArrayProjectors.hh \
ArrayProjectors.icc \
arrayStats.hh \
arrayStats.icc \
AsinhTransform1D.hh \
BandwidthCVLeastSquares1D.hh \
BandwidthCVLeastSquares1D.icc \
BandwidthCVLeastSquaresND.hh \
BandwidthCVLeastSquaresND.icc \
BandwidthCVPseudoLogli1D.hh \
BandwidthCVPseudoLogli1D.icc \
BandwidthCVPseudoLogliND.hh \
BandwidthCVPseudoLogliND.icc \
BandwidthGCVLeastSquares1D.hh \
BandwidthGCVLeastSquares1D.icc \
BandwidthGCVLeastSquaresND.hh \
BandwidthGCVLeastSquaresND.icc \
BandwidthGCVPseudoLogli1D.hh \
BandwidthGCVPseudoLogli1D.icc \
BandwidthGCVPseudoLogliND.hh \
BandwidthGCVPseudoLogliND.icc \
BernsteinCopulaSmoother.hh \
BernsteinFilter1DBuilder.hh \
betaKernelsBandwidth.hh \
betaKernelsBandwidth.icc \
BetaFilter1DBuilder.hh \
BinnedADTest1D.hh \
BinnedKSTest1D.hh \
BinSummary.hh \
BinSummary.icc \
BoundaryHandling.hh \
BoundaryMethod.hh \
buildInterpolatedCompositeDistroND.hh \
buildInterpolatedCompositeDistroND.icc \
buildInterpolatedDistro1DNP.hh \
buildInterpolatedDistro1DNP.icc \
buildInterpolatedHelpers.hh \
CensoredQuantileRegression.hh \
CensoredQuantileRegression.icc \
CircularBuffer.hh \
CircularBuffer.icc \
Column.hh \
Column.icc \
CompositeDistribution1D.hh \
CompositeDistributionND.hh \
CompositeDistributionND.icc \
CompositeDistros1D.hh \
ConstantBandwidthSmoother1D.hh \
ConstantBandwidthSmootherND.hh \
ConstantBandwidthSmootherND.icc \
continuousDegreeTaper.hh \
convertAxis.hh \
CopulaInterpolationND.hh \
Copulas.hh \
CrossCovarianceAccumulator.hh \
CrossCovarianceAccumulator.icc \
CVCopulaSmoother.hh \
CVCopulaSmoother.icc \
DensityAveScanND.hh \
DensityScan1D.hh \
DensityScanND.hh \
DiscreteDistribution1DReader.hh \
DiscreteDistributions1D.hh \
discretizationErrorND.hh \
Distribution1DFactory.hh \
Distribution1DReader.hh \
DistributionTransform1DReader.hh \
DistributionMix1D.hh \
DistributionNDReader.hh \
Distributions1D.hh \
Distributions1D.icc \
DistributionsND.hh \
DistributionsND.icc \
distributionReadError.hh \
distro1DStats.hh \
DualHistoAxis.hh \
DummyCompositeDistroBuilder.hh \
DummyDistro1DBuilder.hh \
DummyResponseBoxBuilder.hh \
DummyResponseIntervalBuilder.hh \
empiricalCopula.hh \
empiricalCopulaHisto.hh \
empiricalCopulaHisto.icc \
empiricalCopula.icc \
fillHistoFromText.hh \
fillHistoFromText.icc \
Filter1DBuilders.hh \
FitUtils.hh \
FitUtils.icc \
GaussianMixture1D.hh \
gaussianResponseMatrix.hh \
GCVCopulaSmoother.hh \
GCVCopulaSmoother.icc \
griddedRobustRegression.hh \
griddedRobustRegression.icc \
GriddedRobustRegressionStop.hh \
GridInterpolatedDistribution.hh \
GridRandomizer.hh \
HistoAxis.hh \
HistoND.hh \
HistoND.icc \
HistoNDCdf.hh \
HistoNDFunctorInstances.hh \
histoStats.hh \
histoStats.icc \
histoUtils.hh \
histoUtils.icc \
IdentityTransform1D.hh \
InMemoryNtuple.hh \
InMemoryNtuple.icc \
InterpolatedDistribution1D.hh \
InterpolatedDistro1D1P.hh \
InterpolatedDistro1DNP.hh \
interpolateHistoND.hh \
interpolateHistoND.icc \
InterpolationFunctorInstances.hh \
JohnsonCurves.hh \
JohnsonKDESmoother.hh \
+ KDE1D.hh \
+ KDE1DCV.hh \
+ KDE1DHOSymbetaKernel.hh \
KDECopulaSmoother.hh \
KDECopulaSmoother.icc \
KDEGroupedCopulaSmoother.hh \
KDEGroupedCopulaSmoother.icc \
KDEFilterND.hh \
KDEFilterND.icc \
kendallsTau.hh \
kendallsTau.icc \
LeftCensoredDistribution.hh \
LocalLogisticRegression.hh \
LocalLogisticRegression.icc \
LocalMultiFilter1D.hh \
LocalMultiFilter1D.icc \
LocalPolyFilter1D.hh \
LocalPolyFilter1D.icc \
LocalPolyFilter1DReader.hh \
LocalPolyFilterND.hh \
LocalPolyFilterND.icc \
LocalQuadraticLeastSquaresND.hh \
LocalQuadraticLeastSquaresND.icc \
LocalQuantileRegression.hh \
LocalQuantileRegression.icc \
LocationScaleFamily1D.hh \
LocationScaleTransform1.hh \
LocationScaleTransform1.icc \
logLikelihoodPeak.hh \
LogRatioTransform1D.hh \
LogTransform1D.hh \
LOrPECopulaSmoother.hh \
LOrPECopulaSmoother.icc \
LOrPEGroupedCopulaSmoother.hh \
LOrPEGroupedCopulaSmoother.icc \
LOrPEMarginalSmoother.hh \
lorpeBackgroundCVDensity1D.hh \
lorpeBackgroundCVDensity1D.icc \
lorpeBackground1D.hh \
lorpeBackground1D.icc \
lorpeMise1D.hh \
lorpeSmooth1D.hh \
lorpeSmooth1D.icc \
MemoizingSymbetaFilterProvider.hh \
MemoizingSymbetaFilterProvider.icc \
mergeTwoHistos.hh \
mergeTwoHistos.icc \
mirrorWeight.hh \
MultiscaleEMUnfold1D.hh \
multinomialCovariance1D.hh \
MultivariateSumAccumulator.hh \
MultivariateSumsqAccumulator.hh \
MultivariateSumsqAccumulator.icc \
MultivariateWeightedSumAccumulator.hh \
MultivariateWeightedSumsqAccumulator.hh \
MultivariateWeightedSumsqAccumulator.icc \
neymanPearsonWindow1D.hh \
NMCombinationSequencer.hh \
NonparametricCompositeBuilder.hh \
NonparametricCompositeBuilder.icc \
NonparametricDistro1DBuilder.hh \
NonparametricDistro1DBuilder.icc \
NtHistoFill.hh \
NtNtupleFill.hh \
NtRectangularCut.hh \
NtRectangularCut.icc \
NtupleBuffer.hh \
NtupleBuffer.icc \
NtupleRecordTypes.hh \
NtupleRecordTypesFwd.hh \
NtupleReference.hh \
NUHistoAxis.hh \
OrderedPointND.hh \
OrderedPointND.icc \
orthoPoly1DVProducts.hh \
orthoPoly1DVProducts.icc \
OSDE1D.hh \
OSDE1D.icc \
PearsonsChiSquared.hh \
PolyFilterCollection1D.hh \
productResponseMatrix.hh \
productResponseMatrix.icc \
ProductSymmetricBetaNDCdf.hh \
QuadraticOrthoPolyND.hh \
QuadraticOrthoPolyND.icc \
QuantileRegression1D.hh \
QuantileRegression1D.icc \
QuantileTable1D.hh \
RatioOfNormals.hh \
RatioResponseBoxBuilder.hh \
RatioResponseBoxBuilder.icc \
RatioResponseIntervalBuilder.hh \
RatioResponseIntervalBuilder.icc \
ResponseMatrix.hh \
RightCensoredDistribution.hh \
SampleAccumulator.hh \
SampleAccumulator.icc \
SbMomentsCalculator.hh \
ScalableGaussND.hh \
scanDensityAsWeight.hh \
SequentialCopulaSmoother.hh \
SequentialGroupedCopulaSmoother.hh \
SequentialPolyFilterND.hh \
SequentialPolyFilterND.icc \
SinhAsinhTransform1D.hh \
SmoothedEMUnfold1D.hh \
SmoothedEMUnfoldND.hh \
spearmansRho.hh \
spearmansRho.icc \
StatAccumulator.hh \
StatAccumulatorArr.hh \
StatAccumulatorPair.hh \
statUncertainties.hh \
StatUtils.hh \
StatUtils.icc \
StorableHistoNDFunctor.hh \
StorableHistoNDFunctor.icc \
StorableInterpolationFunctor.hh \
StorableInterpolationFunctor.icc \
StorableMultivariateFunctor.hh \
StorableMultivariateFunctorReader.hh \
SymbetaParams1D.hh \
TransformedDistribution1D.hh \
TruncatedDistribution1D.hh \
TwoPointsLTSLoss.hh \
TwoPointsLTSLoss.icc \
UGaussConvolution1D.hh \
UnfoldingBandwidthScanner1D.hh \
UnfoldingBandwidthScannerND.hh \
UnfoldingFilterNDReader.hh \
UnitMapInterpolationND.hh \
variableBandwidthSmooth1D.hh \
variableBandwidthSmooth1D.icc \
VariableBandwidthSmoother1D.hh \
VerticallyInterpolatedDistribution1D.hh \
volumeDensityFromBinnedRadial.hh \
weightedCopulaHisto.hh \
weightedCopulaHisto.icc \
WeightedDistro1DPtr.hh \
WeightedLTSLoss.hh \
WeightedLTSLoss.icc \
WeightedSampleAccumulator.hh \
WeightedSampleAccumulator.icc \
WeightedStatAccumulator.hh \
WeightedStatAccumulatorPair.hh \
WeightTableFilter1DBuilder.hh
EXTRA_DIST = 00README.txt npstat_doxy.hh
all: all-am
.SUFFIXES:
.SUFFIXES: .cc .lo .o .obj
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign npstat/stat/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign npstat/stat/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
clean-noinstLTLIBRARIES:
-test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES)
@list='$(noinst_LTLIBRARIES)'; \
locs=`for p in $$list; do echo $$p; done | \
sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \
sort -u`; \
test -z "$$locs" || { \
echo rm -f $${locs}; \
rm -f $${locs}; \
}
libstat.la: $(libstat_la_OBJECTS) $(libstat_la_DEPENDENCIES) $(EXTRA_libstat_la_DEPENDENCIES)
$(AM_V_CXXLD)$(CXXLINK) $(libstat_la_OBJECTS) $(libstat_la_LIBADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
include ./$(DEPDIR)/AbsBinnedComparison1D.Plo
include ./$(DEPDIR)/AbsCVCopulaSmoother.Plo
include ./$(DEPDIR)/AbsCopulaSmootherBase.Plo
include ./$(DEPDIR)/AbsDiscreteDistribution1D.Plo
include ./$(DEPDIR)/AbsDistribution1D.Plo
include ./$(DEPDIR)/AbsDistributionND.Plo
include ./$(DEPDIR)/AbsDistributionTransform1D.Plo
include ./$(DEPDIR)/AbsFilter1DBuilder.Plo
+include ./$(DEPDIR)/AbsKDE1DKernel.Plo
include ./$(DEPDIR)/AbsMarginalSmootherBase.Plo
include ./$(DEPDIR)/AbsNtuple.Plo
include ./$(DEPDIR)/AbsUnfold1D.Plo
include ./$(DEPDIR)/AbsUnfoldND.Plo
include ./$(DEPDIR)/AbsUnfoldingFilterND.Plo
include ./$(DEPDIR)/AsinhTransform1D.Plo
include ./$(DEPDIR)/BernsteinCopulaSmoother.Plo
include ./$(DEPDIR)/BernsteinFilter1DBuilder.Plo
include ./$(DEPDIR)/BetaFilter1DBuilder.Plo
include ./$(DEPDIR)/BinSummary.Plo
include ./$(DEPDIR)/BinnedADTest1D.Plo
include ./$(DEPDIR)/BinnedKSTest1D.Plo
include ./$(DEPDIR)/BoundaryHandling.Plo
include ./$(DEPDIR)/CompositeDistribution1D.Plo
include ./$(DEPDIR)/CompositeDistributionND.Plo
include ./$(DEPDIR)/ConstantBandwidthSmoother1D.Plo
include ./$(DEPDIR)/CopulaInterpolationND.Plo
include ./$(DEPDIR)/Copulas.Plo
include ./$(DEPDIR)/CrossCovarianceAccumulator.Plo
include ./$(DEPDIR)/DensityScan1D.Plo
include ./$(DEPDIR)/DiscreteDistribution1DReader.Plo
include ./$(DEPDIR)/DiscreteDistributions1D.Plo
include ./$(DEPDIR)/Distribution1DFactory.Plo
include ./$(DEPDIR)/Distribution1DReader.Plo
include ./$(DEPDIR)/DistributionMix1D.Plo
include ./$(DEPDIR)/DistributionNDReader.Plo
include ./$(DEPDIR)/DistributionTransform1DReader.Plo
include ./$(DEPDIR)/Distributions1D.Plo
include ./$(DEPDIR)/DistributionsND.Plo
include ./$(DEPDIR)/DualHistoAxis.Plo
include ./$(DEPDIR)/Filter1DBuilders.Plo
include ./$(DEPDIR)/GaussianMixture1D.Plo
include ./$(DEPDIR)/GridInterpolatedDistribution.Plo
include ./$(DEPDIR)/GridRandomizer.Plo
include ./$(DEPDIR)/HistoAxis.Plo
include ./$(DEPDIR)/HistoNDCdf.Plo
include ./$(DEPDIR)/IdentityTransform1D.Plo
include ./$(DEPDIR)/InterpolatedDistribution1D.Plo
include ./$(DEPDIR)/InterpolatedDistro1D1P.Plo
include ./$(DEPDIR)/InterpolatedDistro1DNP.Plo
include ./$(DEPDIR)/JohnsonCurves.Plo
include ./$(DEPDIR)/JohnsonKDESmoother.Plo
+include ./$(DEPDIR)/KDE1DHOSymbetaKernel.Plo
include ./$(DEPDIR)/LOrPEMarginalSmoother.Plo
include ./$(DEPDIR)/LeftCensoredDistribution.Plo
include ./$(DEPDIR)/LocalMultiFilter1D.Plo
include ./$(DEPDIR)/LocalPolyFilter1D.Plo
include ./$(DEPDIR)/LocalPolyFilter1DReader.Plo
include ./$(DEPDIR)/LocationScaleFamily1D.Plo
include ./$(DEPDIR)/LogRatioTransform1D.Plo
include ./$(DEPDIR)/LogTransform1D.Plo
include ./$(DEPDIR)/MemoizingSymbetaFilterProvider.Plo
include ./$(DEPDIR)/MultiscaleEMUnfold1D.Plo
include ./$(DEPDIR)/NMCombinationSequencer.Plo
include ./$(DEPDIR)/NUHistoAxis.Plo
include ./$(DEPDIR)/OSDE1D.Plo
include ./$(DEPDIR)/PearsonsChiSquared.Plo
include ./$(DEPDIR)/PolyFilterCollection1D.Plo
include ./$(DEPDIR)/ProductSymmetricBetaNDCdf.Plo
include ./$(DEPDIR)/QuadraticOrthoPolyND.Plo
include ./$(DEPDIR)/QuantileTable1D.Plo
include ./$(DEPDIR)/RatioOfNormals.Plo
include ./$(DEPDIR)/ResponseMatrix.Plo
include ./$(DEPDIR)/RightCensoredDistribution.Plo
include ./$(DEPDIR)/SbMomentsBigGamma.Plo
include ./$(DEPDIR)/SbMomentsCalculator.Plo
include ./$(DEPDIR)/ScalableGaussND.Plo
include ./$(DEPDIR)/SequentialCopulaSmoother.Plo
include ./$(DEPDIR)/SequentialGroupedCopulaSmoother.Plo
include ./$(DEPDIR)/SequentialPolyFilterND.Plo
include ./$(DEPDIR)/SinhAsinhTransform1D.Plo
include ./$(DEPDIR)/SmoothedEMUnfold1D.Plo
include ./$(DEPDIR)/SmoothedEMUnfoldND.Plo
include ./$(DEPDIR)/StatAccumulator.Plo
include ./$(DEPDIR)/StatAccumulatorArr.Plo
include ./$(DEPDIR)/StatAccumulatorPair.Plo
include ./$(DEPDIR)/StorableMultivariateFunctor.Plo
include ./$(DEPDIR)/StorableMultivariateFunctorReader.Plo
include ./$(DEPDIR)/SymbetaParams1D.Plo
include ./$(DEPDIR)/TransformedDistribution1D.Plo
include ./$(DEPDIR)/TruncatedDistribution1D.Plo
include ./$(DEPDIR)/UGaussConvolution1D.Plo
include ./$(DEPDIR)/UnfoldingBandwidthScanner1D.Plo
include ./$(DEPDIR)/UnfoldingBandwidthScannerND.Plo
include ./$(DEPDIR)/UnfoldingFilterNDReader.Plo
include ./$(DEPDIR)/UnitMapInterpolationND.Plo
include ./$(DEPDIR)/VariableBandwidthSmoother1D.Plo
include ./$(DEPDIR)/VerticallyInterpolatedDistribution1D.Plo
include ./$(DEPDIR)/WeightTableFilter1DBuilder.Plo
include ./$(DEPDIR)/WeightedStatAccumulator.Plo
include ./$(DEPDIR)/WeightedStatAccumulatorPair.Plo
include ./$(DEPDIR)/amiseOptimalBandwidth.Plo
include ./$(DEPDIR)/buildInterpolatedHelpers.Plo
include ./$(DEPDIR)/continuousDegreeTaper.Plo
include ./$(DEPDIR)/convertAxis.Plo
include ./$(DEPDIR)/distributionReadError.Plo
include ./$(DEPDIR)/distro1DStats.Plo
include ./$(DEPDIR)/fitSbParameters.Plo
include ./$(DEPDIR)/gaussianResponseMatrix.Plo
include ./$(DEPDIR)/logLikelihoodPeak.Plo
include ./$(DEPDIR)/lorpeMise1D.Plo
include ./$(DEPDIR)/multinomialCovariance1D.Plo
include ./$(DEPDIR)/neymanPearsonWindow1D.Plo
include ./$(DEPDIR)/scanDensityAsWeight.Plo
include ./$(DEPDIR)/statUncertainties.Plo
include ./$(DEPDIR)/volumeDensityFromBinnedRadial.Plo
.cc.o:
$(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CXX)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(CXXCOMPILE) -c -o $@ $<
.cc.obj:
$(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CXX)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cc.lo:
$(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
# $(AM_V_CXX)source='$<' object='$@' libtool=yes \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(LTCXXCOMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
install-includeHEADERS: $(include_HEADERS)
@$(NORMAL_INSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
$(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
done
uninstall-includeHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(LTLIBRARIES) $(HEADERS)
installdirs:
for dir in "$(DESTDIR)$(includedir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool clean-noinstLTLIBRARIES \
mostlyclean-am
distclean: distclean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-includeHEADERS
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-includeHEADERS
.MAKE: install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \
clean-libtool clean-noinstLTLIBRARIES cscopelist-am ctags \
ctags-am distclean distclean-compile distclean-generic \
distclean-libtool distclean-tags distdir dvi dvi-am html \
html-am info info-am install install-am install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am \
install-includeHEADERS install-info install-info-am \
install-man install-pdf install-pdf-am install-ps \
install-ps-am install-strip installcheck installcheck-am \
installdirs maintainer-clean maintainer-clean-generic \
mostlyclean mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \
uninstall-am uninstall-includeHEADERS
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/npstat/emsunfold/Makefile
===================================================================
--- trunk/npstat/emsunfold/Makefile (revision 579)
+++ trunk/npstat/emsunfold/Makefile (revision 580)
@@ -1,586 +1,586 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# npstat/emsunfold/Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
subdir = npstat/emsunfold
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(include_HEADERS) \
$(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
SOURCES =
DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(includedir)"
HEADERS = $(include_HEADERS)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__DIST_COMMON = $(srcdir)/Makefile.in
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk/npstat/emsunfold
abs_srcdir = /home/igv/Hepforge/npstat/trunk/npstat/emsunfold
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include/npstat/emsunfold
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix = ../../
top_builddir = ../..
top_srcdir = ../..
AM_CPPFLAGS = -I../../
include_HEADERS = AbsSparseUnfoldND.hh \
AbsSparseUnfoldND.icc \
AbsSparseUnfoldingFilterND.hh \
EigenParameters.hh \
EigenParameters.icc \
pruneCovariance.hh \
pruneCovariance.icc \
SmoothedEMSparseUnfoldND.hh \
SmoothedEMSparseUnfoldND.icc \
SparseUnfoldingBandwidthScannerND.hh \
SparseUnfoldingBandwidthScannerND.icc \
trlan77.h \
trlanEigensystem.hh \
trlanEigensystem.icc
EXTRA_DIST = 00README.txt emsunfold_doxy.hh
all: all-am
.SUFFIXES:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign npstat/emsunfold/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign npstat/emsunfold/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
install-includeHEADERS: $(include_HEADERS)
@$(NORMAL_INSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \
$(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_HEADER) $$files '$(DESTDIR)$(includedir)'"; \
$(INSTALL_HEADER) $$files "$(DESTDIR)$(includedir)" || exit $$?; \
done
uninstall-includeHEADERS:
@$(NORMAL_UNINSTALL)
@list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir)
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(HEADERS)
installdirs:
for dir in "$(DESTDIR)$(includedir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool mostlyclean-am
distclean: distclean-am
-rm -f Makefile
distclean-am: clean-am distclean-generic distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-includeHEADERS
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-includeHEADERS
.MAKE: install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \
clean-libtool cscopelist-am ctags ctags-am distclean \
distclean-generic distclean-libtool distclean-tags distdir dvi \
dvi-am html html-am info info-am install install-am \
install-data install-data-am install-dvi install-dvi-am \
install-exec install-exec-am install-html install-html-am \
install-includeHEADERS install-info install-info-am \
install-man install-pdf install-pdf-am install-ps \
install-ps-am install-strip installcheck installcheck-am \
installdirs maintainer-clean maintainer-clean-generic \
mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \
ps ps-am tags tags-am uninstall uninstall-am \
uninstall-includeHEADERS
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/npstat/Makefile
===================================================================
--- trunk/npstat/Makefile (revision 579)
+++ trunk/npstat/Makefile (revision 580)
@@ -1,559 +1,559 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# npstat/Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
subdir = npstat
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(libdir)"
LTLIBRARIES = $(lib_LTLIBRARIES)
libnpstat_la_DEPENDENCIES = stat/libstat.la rng/librng.la nm/libnm.la
am_libnpstat_la_OBJECTS =
libnpstat_la_OBJECTS = $(am_libnpstat_la_OBJECTS)
AM_V_lt = $(am__v_lt_$(V))
am__v_lt_ = $(am__v_lt_$(AM_DEFAULT_VERBOSITY))
am__v_lt_0 = --silent
am__v_lt_1 =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
DEFAULT_INCLUDES = -I.
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CFLAGS) $(CFLAGS)
AM_V_CC = $(am__v_CC_$(V))
am__v_CC_ = $(am__v_CC_$(AM_DEFAULT_VERBOSITY))
am__v_CC_0 = @echo " CC " $@;
am__v_CC_1 =
CCLD = $(CC)
LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \
$(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CCLD = $(am__v_CCLD_$(V))
am__v_CCLD_ = $(am__v_CCLD_$(AM_DEFAULT_VERBOSITY))
am__v_CCLD_0 = @echo " CCLD " $@;
am__v_CCLD_1 =
SOURCES = $(libnpstat_la_SOURCES)
DIST_SOURCES = $(libnpstat_la_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
am__DIST_COMMON = $(srcdir)/Makefile.in
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk/npstat
abs_srcdir = /home/igv/Hepforge/npstat/trunk/npstat
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix = ../
top_builddir = ..
top_srcdir = ..
lib_LTLIBRARIES = libnpstat.la
libnpstat_la_SOURCES =
libnpstat_la_LIBADD = stat/libstat.la rng/librng.la nm/libnm.la
EXTRA_DIST = 00README.txt doxygen.conf
all: all-am
.SUFFIXES:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign npstat/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign npstat/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
install-libLTLIBRARIES: $(lib_LTLIBRARIES)
@$(NORMAL_INSTALL)
@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
list2=; for p in $$list; do \
if test -f $$p; then \
list2="$$list2 $$p"; \
else :; fi; \
done; \
test -z "$$list2" || { \
echo " $(MKDIR_P) '$(DESTDIR)$(libdir)'"; \
$(MKDIR_P) "$(DESTDIR)$(libdir)" || exit 1; \
echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \
$(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \
}
uninstall-libLTLIBRARIES:
@$(NORMAL_UNINSTALL)
@list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \
for p in $$list; do \
$(am__strip_dir) \
echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f '$(DESTDIR)$(libdir)/$$f'"; \
$(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=uninstall rm -f "$(DESTDIR)$(libdir)/$$f"; \
done
clean-libLTLIBRARIES:
-test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES)
@list='$(lib_LTLIBRARIES)'; \
locs=`for p in $$list; do echo $$p; done | \
sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \
sort -u`; \
test -z "$$locs" || { \
echo rm -f $${locs}; \
rm -f $${locs}; \
}
libnpstat.la: $(libnpstat_la_OBJECTS) $(libnpstat_la_DEPENDENCIES) $(EXTRA_libnpstat_la_DEPENDENCIES)
$(AM_V_CCLD)$(LINK) -rpath $(libdir) $(libnpstat_la_OBJECTS) $(libnpstat_la_LIBADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
tags TAGS:
ctags CTAGS:
cscope cscopelist:
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(LTLIBRARIES)
installdirs:
for dir in "$(DESTDIR)$(libdir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libLTLIBRARIES clean-libtool \
mostlyclean-am
distclean: distclean-am
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am: install-libLTLIBRARIES
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-libLTLIBRARIES
.MAKE: install-am install-strip
.PHONY: all all-am check check-am clean clean-generic \
clean-libLTLIBRARIES clean-libtool cscopelist-am ctags-am \
distclean distclean-compile distclean-generic \
distclean-libtool distdir dvi dvi-am html html-am info info-am \
install install-am install-data install-data-am install-dvi \
install-dvi-am install-exec install-exec-am install-html \
install-html-am install-info install-info-am \
install-libLTLIBRARIES install-man install-pdf install-pdf-am \
install-ps install-ps-am install-strip installcheck \
installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags-am uninstall uninstall-am uninstall-libLTLIBRARIES
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/NEWS
===================================================================
--- trunk/NEWS (revision 579)
+++ trunk/NEWS (revision 580)
@@ -1,702 +1,702 @@
-Version 4.9.0 - development
+Version 4.9.0 - Dec 18 2018, by I. Volobouev
* Added "integratePoly" and "jointIntegral" methods to the
AbsClassicalOrthoPoly1D class.
* Added utility functions "truncatedInverseSqrt" and "matrixIndexPairs".
* Added a number of functions to "orthoPoly1DVProducts.hh".
* Added classes ChebyshevOrthoPoly1st and ChebyshevOrthoPoly2nd inheriting
from AbsClassicalOrthoPoly1D.
* Added class HermiteProbOrthoPoly1D.
* Added FejerQuadrature class.
* Added classe IsoscelesTriangle1D and Logistic1D.
* Added classes AbsKDE1DKernel and KDE1DHOSymbetaKernel.
* Added static function "optimalDegreeHart" to OSDE1D class.
Version 4.8.0 - Jul 9 2018, by I. Volobouev
* Added ShiftedLegendreOrthoPoly1D class.
* Added Lanczos method to generate recurrence coefficients for the
ContOrthoPoly1D class.
* Added npstat/stat/orthoPoly1DVProducts.hh file with various utilities
for statistical analyis of chaos polynomials.
Version 4.7.0 - Jan 13 2018, by I. Volobouev
* Added "UniPareto1D" distribution (uniform with Pareto tail to the right).
* More coordinate/weight cases for the GaussLegendreQuadrature class.
* Added ContOrthoPoly1D class -- continuous orthogonal polynomials with
discrete measure.
* Added functions "linearLeastSquares" and "tdSymEigen" to the Matrix class.
* Added OSDE1D class.
* Added classes LocationScaleFamily1D and SinhAsinhTransform1D.
* Added new functors (CdfFunctor1D, etc) as AbsDistribution1D helpers.
* Small fix in StatAccumulatorArr.cc.
Version 4.6.0 - Jan 23 2017, by I. Volobouev
* Updated 1-d LOrPE cross validation code (classes AbsBandwidthCV,
BandwidthCVLeastSquares1D, BandwidthCVPseudoLogli1D) for use with
weighted samples in the case the sample itself is available at the
point cross validation is run.
Version 4.5.0 - Aug 01 2016, by I. Volobouev
* A small fix in OrthoPolyND.icc (switched from cycling over unsigned to
unsigned long in the scalar product function).
* Implemented Gauss-Hermite quadrature with Gaussian density weight.
* Changed the meaning of Quadratic1D and LogQuadratic1D parameters to be
consistent with Legendre polynomial coefficients on [-1, 1] (new
parameters are 1/2 of old).
* Added class MinuitUnbinnedFitFcn1D (to interfaces).
* Added function "findRootNewtonRaphson".
* Added "statUncertainties" header with various functions.
Version 4.4.0 - May 9 2016, by I. Volobouev
* Added "timestamp" function.
* Improved implementation of BinnedDensity1D::unscaledQuantile function.
Certain problems caused by round-off errors are now fixed.
* Added the capability to use the single closest parameter cells (thus
disabling actual interpolation between parameter values, for speed)
to "GridInterpolatedDistribution".
Version 4.3.0 - March 19 2016, by I. Volobouev
* Put additional assert statements inside OrthoPolyND.icc to prevent
phony "array subscript is above array bounds" messages in g++ 4.9.2.
* Improved CmdLine.hh.
* Additional methods in CopulaInterpolationND and GridInterpolatedDistribution.
* Added function "volumeDensityFromBinnedRadial".
* Added convenience method "globalFilter" to the OrthoPoly1D class.
* Initiated the transition of the Python API from Python 2 to Python 3.
Version 4.2.0 - October 29 2015, by I. Volobouev
* Added interpolation methods for the marginals to classes
"CopulaInterpolationND" and "GridInterpolatedDistribution".
* Removed assert on underflow in the "igamc" function. Now
in such cases this function simply returns 0.0.
Version 4.1.0 - July 27 2015, by I. Volobouev
* Made a few additional methods virtual in AbsNtuple.
* Declared methods "columnIndices" of AbsNtuple const (as they should be).
* Added function "weightedCopulaHisto" to build copulas for samples of
weighted points.
* Added function "weightedVariableBandwidthSmooth1D" to use variable
bandwidth smoothing with weighted histograms.
* Added "AbsWeightedMarginalSmoother" interface class for smoothing samples
of weighted points. Modified classes ConstantBandwidthSmoother1D,
JohnsonKDESmoother, and LOrPEMarginalSmoother to support this interface.
* Added class "VariableBandwidthSmoother1D" which implements both
AbsMarginalSmoother and AbsWeightedMarginalSmoother interfaces.
* Implemented cross-validation for weighted samples.
* Implemented "buildInterpolatedCompositeDistroND" for generic construction
of multivariate transfer functions.
* Implemented "buildInterpolatedDistro1DNP" for generic construction
of univariate transfer functions.
Version 4.0.1 - June 17 2015, by I. Volobouev
* Added "dump_qmc" example executable.
Version 4.0.0 - June 10 2015, by I. Volobouev
* Complete overhaul of 1-d filter-building code. Addition of new boundary
methods is a lot easier now. The user API for choosing a LOrPE boundary
method is encapsulated in the new "BoundaryHandling" class.
* Implemented a number of new filter builders with different boundary
treatments.
* Updated the "LocalPolyFilter1D" class so that it holds the local
bandwidth factors derived by the filter builders.
Version 3.8.0 - June 1 2015, by I. Volobouev
* Implemented class ConstSqFilter1DBuilder (declared in the header file
npstat/stat/Filter1DBuilders.hh). The "BoundaryMethod" enum has been
extended accordingly. Other files using this enum have been updated.
* Implemented class FoldingSqFilter1DBuilder. Similar to ConstSqFilter1DBuilder
but it also folds the kernel in addition to stretching it.
* Added virtual destructors to a number of classes.
* Added "externalMemArrayND" with various signatures to allow the use of
ArrayND with memory not managed by ArrayND.
* Added move constructors and move assignment operators to ArrayND and
Matrix classes.
Version 3.7.0 - May 10 2015, by I. Volobouev
* Better numerical derivative calculation in InterpolatedDistribution1D.cc.
* Added class "LocalMultiFilter1D" for fast generation of filters which
correspond to each orthogonal polynomial separately.
* Added a function calculating the area of n-dimensional sphere.
* Added I/O capabilities to the RadialProfileND class.
* Added class "LogRatioTransform1D".
* Added utility function "multiFill1DHistoWithCDFWeights" (header file
histoUtils.hh).
* Avoiding underflow of the incomplete gamma in "convertToSphericalRandom".
Version 3.6.0 - April 6 2015, by I. Volobouev
* Fixed Marsaglia's code calculating the Anderson-Darling statistics
(it was breaking down for large values of z).
* Added high-level driver function "simpleVariableBandwidthSmooth1D" to
automatically build the pilot estimate for "variableBandwidthSmooth1D".
* Swithched to log of sigma as Minuit parameter in "minuitFitJohnsonCurves"
instead of sigma (linear sigma would sometimes break the fit when Minuit
would come up with 0 or negative trial value for it).
* Extended "MinuitDensityFitFcn1D" class so that it could be used to fit
non-uniformly binned histograms.
* Adapted "minuitFitJohnsonCurves" function so that it could be used with
histograms templated upon DualHistoAxis.
* Added functions "fillArrayCentersPreservingAreas" and
"canFillArrayCentersPreservingAreas".
* Implemented an interface to monotonous coordinate transformations with
the intent of using them in constructing densities. Implemented a number
of transforms.
* Implemented class "TransformedDistribution1D".
* Added class "VerticallyInterpolatedDistro1D1P".
* Added utility function "fill1DHistoWithCDFWeights".
Version 3.5.0 - February 21 2015, by I. Volobouev
* Added "symPDEigenInv" method to the Matrix class.
* Added "variableCount" method to unfolding bandwidth scanner classes.
* Increased the tolerance parameters in JohnsonSu::initialize and in
JohnsonFit constructor.
* Bug fix in function "fillHistoFromText".
Version 3.4.4 - January 13 2015, by I. Volobouev
* Corrected handling of some "assert" statements so that the code compiles
correctly with the -DNDEBUG option.
Version 3.4.3 - January 5 2015, by I. Volobouev
* Implemented class MirroredGauss1D.
* Added method "getOracleData" to class UnfoldingBandwidthScanner1D.
* Bug fix in FoldingFilter1DBuilder::makeOrthoPoly.
Version 3.4.2 - December 15 2014, by I. Volobouev
* Implemented InterpolatedDistro1D1P class.
Version 3.4.1 - November 07 2014, by I. Volobouev
* Implemented "divideTransforms" function for deconvolutions.
* Implemented the Moyal distribution.
* Added "fillHistoFromText" utility function.
* Added "apply_lorpe_1d" example.
Version 3.4.0 - October 01 2014, by I. Volobouev
* Implemented Hadamard product and Hadamard ratio for matrices.
* Bug fix in the "solve_linear_system" lapack interface function.
Version 3.3.1 - August 08 2014, by I. Volobouev
* Terminate iterative refinement of the unfolding error propagation
matrix early in case the solution does not seem to improve.
Version 3.3.0 - August 05 2014, by I. Volobouev
* Added correction factors to the determination of the number of fitted
parameters in various unfolding procedures.
Version 3.2.0 - July 25 2014, by I. Volobouev
* Added "gaussianResponseMatrix" function for non-uniform binning.
* Added Pareto distribution.
* Implemented EMS unfolding with sparse matrices.
* Added methods "getObservedShape" and "getUnfoldedShape" to the AbsUnfoldND
class.
* Bug fix in the assignment operator of ProductDistributionND class. Made
class ProductDistributionND persistent.
* Bug fix in the error propagation for unfolding, in the code which takes
into account the extra normalization constant.
* Added "productResponseMatrix" function to assist in making sparse response
matrices.
* Bug fix in the factory constructor of the Cauchy1D class.
* Completed implementation of the "RatioOfNormals" class.
Version 3.1.0 - June 29 2014, by I. Volobouev
* Improved (again) random number generator for the 1-d Gaussian distribution.
Something about expectation values of normalized Hermite polynomials over
random numbers made by this generator is still not fully understood. The
standard deviation of these expectations decreases with the polynomial
order (while it should stay constant). It is possible that the numbers
of points used are simply insufficient to sample the tails correctly.
* Implemented smoothed expectation-maximization (a.k.a. D'Agostini) unfolding
for 1-d distributions in classes SmoothedEMUnfold1D and MultiscaleEMUnfold1D.
In certain usage scenarios, MultiscaleEMUnfold1D can be more efficient than
SmoothedEMUnfold1D.
* Implemented smoothed expectation-maximization unfolding for multivariate
distributions in a class SmoothedEMUnfoldND.
* Added class "UnfoldingBandwidthScanner1D" to study 1-d unfolding behavior
as a function of filter bandwidth.
* Added class "UnfoldingBandwidthScannerND" to study multivariate unfolding
behavior as a function of provided bandwidth values.
* Added DummyLocalPolyFilter1D class useful when a filter is needed which
does not smooth anything.
* Added function "poissonLogLikelihood" (header file npstat/stat/arrayStats.hh).
* Added function "pooledDiscreteTabulated1D" (header file
npstat/stat/DiscreteDistributions1D.hh).
* Implemented class UGaussConvolution1D (convolution of uniform distribution
with a Gaussian).
* Implemented gamma distribution (class Gamma1D).
* Defined interface for comparing binned distributions, AbsBinnedComparison1D.
* Implemented several comparison classes for comparing binned distributions:
PearsonsChiSquared, BinnedKSTest1D, BinnedADTest1D. Class BinnedKSTest1D
pulled in dependence on the "kstest" package.
* Made classes LocalPolyFilter1D, LocalPolyFilterND, and SequentialPolyFilterND
persistent.
* Added code generating dense filter matrices to LocalPolyFilterND and
SequentialPolyFilterND (as needed for unfolding).
* Made class MemoizingSymbetaFilterProvider persistent.
* Implemented function goldenSectionSearchOnAGrid (header file
npstat/nm/goldenSectionSearch.hh).
* Implemented function parabolicExtremum (header npstat/nm/MathUtils.hh).
* Added class DistributionMix1D (header npstat/stat/DistributionMix1D.hh).
* Added interface to solving A*X = B, with matrices X and B, to the Matrix
class (method "solveLinearSystems").
* Added "reshape" methods to the ArrayND class.
* Added "gaussianResponseMatrix" function.
* Added a section on unfolding to the documentation.
* Added "ems_unfold_1d" example program.
Version 3.0.0 - March 14 2014, by I. Volobouev
* Added interface to the LAPACK SVD routines.
* Added function "lorpeMise1D" to calculate MISE for arbitrary distributions.
* Added FoldingFilter1DBuilder class.
* Changed interfaces for several high-level functions to use
FoldingFilter1DBuilder. The major version number got bumped up.
* Split DensityScan1D.hh away from AbsDistribution1D.hh.
Version 2.7.0 - March 10 2014, by I. Volobouev
* Added code to optimize operations with diagonal matrices.
* Added discretizedDistance.hh file for simple L1 and L2 distance calculations
with numeric arrays.
* Added base class for future unfolding code.
* The "reset" method of the Matrix class was renamed into "uninitialize"
in order to be consistent with ArrayND.
* Added function "multinomialCovariance1D".
* Added "polyTimesWeight" method to the OrthoPoly1D class.
* Added methods "TtimesThis" and "timesT" to the Matrix class. These
methods are more efficient than transpose followed by multiplication.
Version 2.6.0 - January 30 2014, by I. Volobouev
* Added function "lorpeBackgroundCVDensity1D" which linearizes calculation
of the cross validation likelihood in semiparametric fits. Argument
"linearizeCrossValidation" was added to MinuitSemiparametricFitFcn1D
constructor, "lorpeBackground1D" function, etc.
* Added the ability to build filters with center point removed to classes
WeightTableFilter1DBuilder and StretchingFilter1DBuilder. The function
"symbetaLOrPEFilter1D" now has an appropriate switch.
* Added "removeRowAndColumn" method to the Matrix class.
* Added CircularBuffer class.
* Added various plugin bandwidth functions which work with non-integer
polynomial degrees.
* Switched to the Legendre polynomial basis for calculating all 1-d
orthogonal polynomials (instead of monomial basis).
* Added MemoizingSymbetaFilterProvider class.
* Added "operator+=" method to the MultivariateSumAccumulator class.
* Simplified implementation of the PolyFilterCollection1D class.
File PolyFilterCollection1D.icc is removed.
* Added "RatioOfNormals" 1-d distribution function. Only the density is
currently implemented but not the CDF.
* Added ExpMapper1d class.
Version 2.5.0 - October 15 2013, by I. Volobouev
* Added "getFilterMatrix" method to the LocalPolyFilter1D class.
* Added "genEigen" method to the Matrix class (for determination of
eigenvalues and eigenvectors of general real matrices).
* Refactored the LAPACK interface so that interface functions to floats
are automatically generated from interface functions to doubles. See
the comment at the end of the "lapack_interface.icc" file for the shell
commands to do this.
Version 2.4.0 - October 6 2013, by I. Volobouev
* Added functions "lorpeBackground1D", "lorpeBgCVPseudoLogli1D", and
"lorpeBgLogli1D".
* Added minuit interface classes "MinuitLOrPEBgCVFcn1D" and
"MinuitSemiparametricFitFcn1D".
* Added "ScalableDensityConstructor1D" class for use with Minuit interface
functions.
* Added classes AbsSymbetaFilterProvider and SymbetaPolyCollection1D.
Version 2.3.0 - October 1 2013, by I. Volobouev
* Allowed point dimensionality to be larger than the histogram dimensionality
in the "empiricalCopulaHisto" function.
* Added "keepAllFilters" method to AbsFilter1DBuilder and all derived classes.
* Implemented exclusion regions for WeightTableFilter1DBuilder and
StretchingFilter1DBuilder.
* "symbetaLOrPEFilter1D" function (in the header LocalPolyFilter1D.hh)
is updated to take the exclusion mask argument.
* Added "continuousDegreeTaper" function which can do something meaningful
with the continuous LOrPE degree parameter.
Version 2.2.0 - June 30 2013, by I. Volobouev
* Added classes DiscreteBernsteinPoly1D and BernsteinFilter1DBuilder.
* Added classes DiscreteBeta1D and BetaFilter1DBuilder.
* Added BifurcatedGauss1D class to model Gaussian-like distributions with
different sigmas on the left and right sides.
* Added virtual destructors to the classes declared in the Filter1DBuilders.hh
header.
* Added a method to the Matrix template to calculate Frobenius norm.
* Added methods to the Matrix template to calculate row and column sums.
* Added "directSum" method to the Matrix template.
* Added constructor from a subrange of another matrix to the Matrix template.
* Added code to the LocalPolyFilter1D class that generates a doubly
stochastic filter out of an arbitrary filter.
* Added "npstat/nm/definiteIntegrals.hh" header and corresponding .cc file
for various infrequently used integrals.
* Added "betaKernelsBandwidth" function.
Version 2.1.0 - June 20 2013, by I. Volobouev
* Fixed couple problems which showed up in the robust regression code
due to compiler update.
* Fixed CensoredQuantileRegressionOnKDTree::process method (needed this->
dereference for some member).
Version 2.0.0 - June 15 2013, by I. Volobouev
* Updated to use "Geners" version 1.3.0. A few interfaces were changed
(API for the string archives was removed because Geners own string archive
facilities are now adequate) so the major version number was bumped up.
Version 1.6.0 - June 12 2013, by I. Volobouev
* Updated some documentation.
* Updated fitCompositeJohnson.icc to use simplified histogram constructors.
* Bug fix in the "minuitLocalQuantileRegression1D" function.
* Changed the "quantileBinFromCdf" function to use unsigned long type for
array indices.
* Added "weightedLocalQuantileRegression1D" function (namespace npsi) for
local regression with single predictor on weighted points.
Version 1.5.0 - May 23 2013, by I. Volobouev
* Added interfaces to LAPACK routines DSYEVD, DSYEVR, and corresponding
single precision versions.
* Added the "symPSDefEffectiveRank" method to the Matrix class for
calculating effective ranks of symmetric positive semidefinite matrices.
* Added converting constructor and assignment operator to the Matrix class.
* Run the Gram-Schmidt procedure twice when orthogonal polynomials are
derived in order to improve orthogonality.
Version 1.4.0 - May 20 2013, by I. Volobouev
* Added the "append" method to the AbsNtuple class.
Version 1.3.0 - May 10 2013, by I. Volobouev
* Added the code for Hermite polynomial series.
* Improved random number generator for the 1-d Gaussian distribution.
* Added a framework for discrete 1-d distributions as well as two
concrete distribution classes (Poisson1D, DiscreteTabulated1D).
* Added functions "readCompressedStringArchiveExt" and
"writeCompressedStringArchiveExt" which can read/write either compressed
or uncompressed string archives, distinguished by file extension.
Version 1.2.1 - March 22 2013, by I. Volobouev
* Improved CmdLine.hh in the "examples/C++" directory.
* Added class QuantileTable1D.
* Added classes LeftCensoredDistribution and RightCensoredDistribution.
Version 1.2.0 - March 13 2013, by I. Volobouev
* Added convenience "fill" methods to work with the ntuples which have
small number of columns (up to 10).
* Fixed a bug in AbsRandomGenerator for univariate generators making
multivariate points.
* Added LOrPEMarginalSmoother class.
Version 1.1.1 - March 11 2013, by I. Volobouev
* Added utility function "symbetaLOrPEFilter1D" which creates 1-d LOrPE
filters using kernels from the symmetric beta family (and the Gaussian).
* Added high level driver function "lorpeSmooth1D".
* Allowed variables with zero variances for calculation of correlation
coefficients in "MultivariateSumsqAccumulator". Such variables will
have zero correlation coefficients with all other variables.
* Added rebinning constructor to the HistoND class.
Version 1.1.0 - March 8 2013, by I. Volobouev
* Changed NUHistoAxis::fltBinNumber method to produce correct results
with interpolation degree 0. It is not yet obvious which method would
work best for higher interpolation degrees.
* Added functions for converting between StringArchive and python bytearray.
They have been placed in a new header: wrap/stringArchiveToBinary.hh.
* Added methods "exportMemSlice" and "importMemSlice" to ArrayND. These
methods allow for filling array slices from unstructured memory buffers
and for exporting array slices to such memory buffers.
* Added "simpleColumnNames" function (header file AbsNtuple.hh) to generate
trivial column names when ntuple column names are not important.
* Added functions "neymanPearsonWindow1D" and "signalToBgMaximum1D".
They are declared in a new header npstat/neymanPearsonWindow1D.hh.
Version 1.0.5 - December 17 2012, by I. Volobouev
* Flush string archives before writing them out in stringArchiveIO.cc.
* Added class TruncatedDistribution1D.
Version 1.0.4 - November 14 2012, by I. Volobouev
* Added utilities for reading/writing Geners string archives to files.
* Added BinSummary class.
* Doxygen documentation improved. Every header file in stat, nm, rng,
and interfaces now has a brief description.
Version 1.0.3 - September 27 2012, by I. Volobouev
* Fixed some bugs related to moving StorableMultivariateFunctor code
from "nm" to "stat".
Version 1.0.2 - August 6 2012, by I. Volobouev
* Added converting copy constructor to the "LinInterpolatedTableND" class.
* Added StorableMultivariateFunctor class (together with the corresponding
reader class).
* Added StorableInterpolationFunctor class which inherits from the above
and can be used with interpolation tables.
* Added StorableHistoNDFunctor class which inherits from
StorableMultivariateFunctor and can be used to interpolate histogram bins.
* Added "transpose" method to HistoND class.
* Created DualAxis class.
* Created DualHistoAxis class.
* Added conversion functions between histogram and grid axes.
* Added "mergeTwoHistos" function for smooth merging of two histograms.
* Added "ProductSymmetricBetaNDCdf" functor to be used as weight in
merging histograms.
* Added CoordinateSelector class.
Version 1.0.1 - June 29 2012, by I. Volobouev
* Implemented class LinInterpolatedTableND with related supporting code.
Index: trunk/Makefile
===================================================================
--- trunk/Makefile (revision 579)
+++ trunk/Makefile (revision 580)
@@ -1,866 +1,866 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
subdir = .
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(top_srcdir)/configure \
$(am__configure_deps) $(am__DIST_COMMON)
am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
configure.lineno config.status.lineno
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES = npstat.pc
CONFIG_CLEAN_VPATH_FILES =
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
SOURCES =
DIST_SOURCES =
RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \
ctags-recursive dvi-recursive html-recursive info-recursive \
install-data-recursive install-dvi-recursive \
install-exec-recursive install-html-recursive \
install-info-recursive install-pdf-recursive \
install-ps-recursive install-recursive installcheck-recursive \
installdirs-recursive pdf-recursive ps-recursive \
tags-recursive uninstall-recursive
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(pkgconfigdir)"
DATA = $(pkgconfig_DATA)
RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \
distclean-recursive maintainer-clean-recursive
am__recursive_targets = \
$(RECURSIVE_TARGETS) \
$(RECURSIVE_CLEAN_TARGETS) \
$(am__extra_recursive_targets)
AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \
cscope distdir dist dist-all distcheck
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
CSCOPE = cscope
DIST_SUBDIRS = $(SUBDIRS)
am__DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/npstat.pc.in AUTHORS \
INSTALL NEWS compile config.guess config.sub install-sh \
ltmain.sh missing
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
distdir = $(PACKAGE)-$(VERSION)
top_distdir = $(distdir)
am__remove_distdir = \
if test -d "$(distdir)"; then \
find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \
&& rm -rf "$(distdir)" \
|| { sleep 5 && rm -rf "$(distdir)"; }; \
else :; fi
am__post_remove_distdir = $(am__remove_distdir)
am__relativize = \
dir0=`pwd`; \
sed_first='s,^\([^/]*\)/.*$$,\1,'; \
sed_rest='s,^[^/]*/*,,'; \
sed_last='s,^.*/\([^/]*\)$$,\1,'; \
sed_butlast='s,/*[^/]*$$,,'; \
while test -n "$$dir1"; do \
first=`echo "$$dir1" | sed -e "$$sed_first"`; \
if test "$$first" != "."; then \
if test "$$first" = ".."; then \
dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \
dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \
else \
first2=`echo "$$dir2" | sed -e "$$sed_first"`; \
if test "$$first2" = "$$first"; then \
dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \
else \
dir2="../$$dir2"; \
fi; \
dir0="$$dir0"/"$$first"; \
fi; \
fi; \
dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \
done; \
reldir="$$dir2"
DIST_ARCHIVES = $(distdir).tar.gz
GZIP_ENV = --best
DIST_TARGETS = dist-gzip
distuninstallcheck_listfiles = find . -type f -print
am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \
| sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$'
distcleancheck_listfiles = find . -type f -print
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk
abs_srcdir = /home/igv/Hepforge/npstat/trunk
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix =
top_builddir = .
top_srcdir = .
SUBDIRS = npstat/nm npstat/rng npstat/stat npstat/wrap npstat/interfaces npstat/emsunfold npstat examples/C++
pkgconfigdir = $(libdir)/pkgconfig
pkgconfig_DATA = npstat.pc
EXTRA_DIST = doc npstat.pc.in AUTHORS LICENSE INSTALL NEWS
ACLOCAL_AMFLAGS = -I m4
all: all-recursive
.SUFFIXES:
am--refresh: Makefile
@:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \
$(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \
&& exit 0; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
echo ' $(SHELL) ./config.status'; \
$(SHELL) ./config.status;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
$(SHELL) ./config.status --recheck
$(top_srcdir)/configure: $(am__configure_deps)
$(am__cd) $(srcdir) && $(AUTOCONF)
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
$(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
$(am__aclocal_m4_deps):
npstat.pc: $(top_builddir)/config.status $(srcdir)/npstat.pc.in
cd $(top_builddir) && $(SHELL) ./config.status $@
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
distclean-libtool:
-rm -f libtool config.lt
install-pkgconfigDATA: $(pkgconfig_DATA)
@$(NORMAL_INSTALL)
@list='$(pkgconfig_DATA)'; test -n "$(pkgconfigdir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(pkgconfigdir)'"; \
$(MKDIR_P) "$(DESTDIR)$(pkgconfigdir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgconfigdir)'"; \
$(INSTALL_DATA) $$files "$(DESTDIR)$(pkgconfigdir)" || exit $$?; \
done
uninstall-pkgconfigDATA:
@$(NORMAL_UNINSTALL)
@list='$(pkgconfig_DATA)'; test -n "$(pkgconfigdir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(pkgconfigdir)'; $(am__uninstall_files_from_dir)
# This directory's subdirectories are mostly independent; you can cd
# into them and run 'make' without going through this Makefile.
# To change the values of 'make' variables: instead of editing Makefiles,
# (1) if the variable is set in 'config.status', edit 'config.status'
# (which will cause the Makefiles to be regenerated when you run 'make');
# (2) otherwise, pass the desired values on the 'make' command line.
$(am__recursive_targets):
@fail=; \
if $(am__make_keepgoing); then \
failcom='fail=yes'; \
else \
failcom='exit 1'; \
fi; \
dot_seen=no; \
target=`echo $@ | sed s/-recursive//`; \
case "$@" in \
distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
*) list='$(SUBDIRS)' ;; \
esac; \
for subdir in $$list; do \
echo "Making $$target in $$subdir"; \
if test "$$subdir" = "."; then \
dot_seen=yes; \
local_target="$$target-am"; \
else \
local_target="$$target"; \
fi; \
($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
|| eval $$failcom; \
done; \
if test "$$dot_seen" = "no"; then \
$(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
fi; test -z "$$fail"
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-recursive
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
include_option=--etags-include; \
empty_fix=.; \
else \
include_option=--include; \
empty_fix=; \
fi; \
list='$(SUBDIRS)'; for subdir in $$list; do \
if test "$$subdir" = .; then :; else \
test ! -f $$subdir/TAGS || \
set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \
fi; \
done; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-recursive
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscope: cscope.files
test ! -s cscope.files \
|| $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS)
clean-cscope:
-rm -f cscope.files
cscope.files: clean-cscope cscopelist
cscopelist: cscopelist-recursive
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
-rm -f cscope.out cscope.in.out cscope.po.out cscope.files
distdir: $(DISTFILES)
$(am__remove_distdir)
test -d "$(distdir)" || mkdir "$(distdir)"
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
@list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
if test "$$subdir" = .; then :; else \
$(am__make_dryrun) \
|| test -d "$(distdir)/$$subdir" \
|| $(MKDIR_P) "$(distdir)/$$subdir" \
|| exit 1; \
dir1=$$subdir; dir2="$(distdir)/$$subdir"; \
$(am__relativize); \
new_distdir=$$reldir; \
dir1=$$subdir; dir2="$(top_distdir)"; \
$(am__relativize); \
new_top_distdir=$$reldir; \
echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \
echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \
($(am__cd) $$subdir && \
$(MAKE) $(AM_MAKEFLAGS) \
top_distdir="$$new_top_distdir" \
distdir="$$new_distdir" \
am__remove_distdir=: \
am__skip_length_check=: \
am__skip_mode_fix=: \
distdir) \
|| exit 1; \
fi; \
done
-test -n "$(am__skip_mode_fix)" \
|| find "$(distdir)" -type d ! -perm -755 \
-exec chmod u+rwx,go+rx {} \; -o \
! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
! -type d ! -perm -400 -exec chmod a+r {} \; -o \
! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \
|| chmod -R a+r "$(distdir)"
dist-gzip: distdir
tardir=$(distdir) && $(am__tar) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).tar.gz
$(am__post_remove_distdir)
dist-bzip2: distdir
tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2
$(am__post_remove_distdir)
dist-lzip: distdir
tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz
$(am__post_remove_distdir)
dist-xz: distdir
tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz
$(am__post_remove_distdir)
dist-tarZ: distdir
@echo WARNING: "Support for distribution archives compressed with" \
"legacy program 'compress' is deprecated." >&2
@echo WARNING: "It will be removed altogether in Automake 2.0" >&2
tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z
$(am__post_remove_distdir)
dist-shar: distdir
@echo WARNING: "Support for shar distribution archives is" \
"deprecated." >&2
@echo WARNING: "It will be removed altogether in Automake 2.0" >&2
shar $(distdir) | eval GZIP= gzip $(GZIP_ENV) -c >$(distdir).shar.gz
$(am__post_remove_distdir)
dist-zip: distdir
-rm -f $(distdir).zip
zip -rq $(distdir).zip $(distdir)
$(am__post_remove_distdir)
dist dist-all:
$(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:'
$(am__post_remove_distdir)
# This target untars the dist file and tries a VPATH configuration. Then
# it guarantees that the distribution is self-contained by making another
# tarfile.
distcheck: dist
case '$(DIST_ARCHIVES)' in \
*.tar.gz*) \
eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).tar.gz | $(am__untar) ;;\
*.tar.bz2*) \
bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\
*.tar.lz*) \
lzip -dc $(distdir).tar.lz | $(am__untar) ;;\
*.tar.xz*) \
xz -dc $(distdir).tar.xz | $(am__untar) ;;\
*.tar.Z*) \
uncompress -c $(distdir).tar.Z | $(am__untar) ;;\
*.shar.gz*) \
eval GZIP= gzip $(GZIP_ENV) -dc $(distdir).shar.gz | unshar ;;\
*.zip*) \
unzip $(distdir).zip ;;\
esac
chmod -R a-w $(distdir)
chmod u+w $(distdir)
mkdir $(distdir)/_build $(distdir)/_build/sub $(distdir)/_inst
chmod a-w $(distdir)
test -d $(distdir)/_build || exit 0; \
dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \
&& dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
&& am__cwd=`pwd` \
&& $(am__cd) $(distdir)/_build/sub \
&& ../../configure \
$(AM_DISTCHECK_CONFIGURE_FLAGS) \
$(DISTCHECK_CONFIGURE_FLAGS) \
--srcdir=../.. --prefix="$$dc_install_base" \
&& $(MAKE) $(AM_MAKEFLAGS) \
&& $(MAKE) $(AM_MAKEFLAGS) dvi \
&& $(MAKE) $(AM_MAKEFLAGS) check \
&& $(MAKE) $(AM_MAKEFLAGS) install \
&& $(MAKE) $(AM_MAKEFLAGS) installcheck \
&& $(MAKE) $(AM_MAKEFLAGS) uninstall \
&& $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
distuninstallcheck \
&& chmod -R a-w "$$dc_install_base" \
&& ({ \
(cd ../.. && umask 077 && mkdir "$$dc_destdir") \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
} || { rm -rf "$$dc_destdir"; exit 1; }) \
&& rm -rf "$$dc_destdir" \
&& $(MAKE) $(AM_MAKEFLAGS) dist \
&& rm -rf $(DIST_ARCHIVES) \
&& $(MAKE) $(AM_MAKEFLAGS) distcleancheck \
&& cd "$$am__cwd" \
|| exit 1
$(am__post_remove_distdir)
@(echo "$(distdir) archives ready for distribution: "; \
list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \
sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x'
distuninstallcheck:
@test -n '$(distuninstallcheck_dir)' || { \
echo 'ERROR: trying to run $@ with an empty' \
'$$(distuninstallcheck_dir)' >&2; \
exit 1; \
}; \
$(am__cd) '$(distuninstallcheck_dir)' || { \
echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \
exit 1; \
}; \
test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \
|| { echo "ERROR: files left after uninstall:" ; \
if test -n "$(DESTDIR)"; then \
echo " (check DESTDIR support)"; \
fi ; \
$(distuninstallcheck_listfiles) ; \
exit 1; } >&2
distcleancheck: distclean
@if test '$(srcdir)' = . ; then \
echo "ERROR: distcleancheck can only run from a VPATH build" ; \
exit 1 ; \
fi
@test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
|| { echo "ERROR: files left in build directory after distclean:" ; \
$(distcleancheck_listfiles) ; \
exit 1; } >&2
check-am: all-am
check: check-recursive
all-am: Makefile $(DATA)
installdirs: installdirs-recursive
installdirs-am:
for dir in "$(DESTDIR)$(pkgconfigdir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-recursive
install-exec: install-exec-recursive
install-data: install-data-recursive
uninstall: uninstall-recursive
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-recursive
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-recursive
clean-am: clean-generic clean-libtool mostlyclean-am
distclean: distclean-recursive
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -f Makefile
distclean-am: clean-am distclean-generic distclean-libtool \
distclean-tags
dvi: dvi-recursive
dvi-am:
html: html-recursive
html-am:
info: info-recursive
info-am:
install-data-am: install-pkgconfigDATA
install-dvi: install-dvi-recursive
install-dvi-am:
install-exec-am:
install-html: install-html-recursive
install-html-am:
install-info: install-info-recursive
install-info-am:
install-man:
install-pdf: install-pdf-recursive
install-pdf-am:
install-ps: install-ps-recursive
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-recursive
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -rf $(top_srcdir)/autom4te.cache
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-recursive
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
pdf: pdf-recursive
pdf-am:
ps: ps-recursive
ps-am:
uninstall-am: uninstall-pkgconfigDATA
.MAKE: $(am__recursive_targets) install-am install-strip
.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \
am--refresh check check-am clean clean-cscope clean-generic \
clean-libtool cscope cscopelist-am ctags ctags-am dist \
dist-all dist-bzip2 dist-gzip dist-lzip dist-shar dist-tarZ \
dist-xz dist-zip distcheck distclean distclean-generic \
distclean-libtool distclean-tags distcleancheck distdir \
distuninstallcheck dvi dvi-am html html-am info info-am \
install install-am install-data install-data-am install-dvi \
install-dvi-am install-exec install-exec-am install-html \
install-html-am install-info install-info-am install-man \
install-pdf install-pdf-am install-pkgconfigDATA install-ps \
install-ps-am install-strip installcheck installcheck-am \
installdirs installdirs-am maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-generic \
mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \
uninstall-am uninstall-pkgconfigDATA
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/examples/C++/Makefile
===================================================================
--- trunk/examples/C++/Makefile (revision 579)
+++ trunk/examples/C++/Makefile (revision 580)
@@ -1,1068 +1,1068 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# examples/C++/Makefile. Generated from Makefile.in by configure.
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/npstat
pkgincludedir = $(includedir)/npstat
pkglibdir = $(libdir)/npstat
pkglibexecdir = $(libexecdir)/npstat
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = x86_64-pc-linux-gnu
host_triplet = x86_64-pc-linux-gnu
noinst_PROGRAMS = lorpe_1d$(EXEEXT) apply_lorpe_1d$(EXEEXT) \
lorpe_multivariate$(EXEEXT) n_poly_coeffs$(EXEEXT) \
ems_unfold_1d$(EXEEXT) dump_qmc$(EXEEXT)
subdir = examples/C++
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
PROGRAMS = $(noinst_PROGRAMS)
am_apply_lorpe_1d_OBJECTS = apply_lorpe_1d.$(OBJEXT)
apply_lorpe_1d_OBJECTS = $(am_apply_lorpe_1d_OBJECTS)
apply_lorpe_1d_LDADD = $(LDADD)
am__DEPENDENCIES_1 =
apply_lorpe_1d_DEPENDENCIES = $(am__DEPENDENCIES_1) \
$(am__DEPENDENCIES_1)
AM_V_lt = $(am__v_lt_$(V))
am__v_lt_ = $(am__v_lt_$(AM_DEFAULT_VERBOSITY))
am__v_lt_0 = --silent
am__v_lt_1 =
am_dump_qmc_OBJECTS = dump_qmc.$(OBJEXT)
dump_qmc_OBJECTS = $(am_dump_qmc_OBJECTS)
dump_qmc_LDADD = $(LDADD)
dump_qmc_DEPENDENCIES = $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1)
am_ems_unfold_1d_OBJECTS = ems_unfold_1d.$(OBJEXT)
ems_unfold_1d_OBJECTS = $(am_ems_unfold_1d_OBJECTS)
ems_unfold_1d_LDADD = $(LDADD)
ems_unfold_1d_DEPENDENCIES = $(am__DEPENDENCIES_1) \
$(am__DEPENDENCIES_1)
am_lorpe_1d_OBJECTS = lorpe_1d.$(OBJEXT)
lorpe_1d_OBJECTS = $(am_lorpe_1d_OBJECTS)
lorpe_1d_LDADD = $(LDADD)
lorpe_1d_DEPENDENCIES = $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1)
am_lorpe_multivariate_OBJECTS = lorpe_multivariate.$(OBJEXT)
lorpe_multivariate_OBJECTS = $(am_lorpe_multivariate_OBJECTS)
lorpe_multivariate_LDADD = $(LDADD)
lorpe_multivariate_DEPENDENCIES = $(am__DEPENDENCIES_1) \
$(am__DEPENDENCIES_1)
am_n_poly_coeffs_OBJECTS = n_poly_coeffs.$(OBJEXT)
n_poly_coeffs_OBJECTS = $(am_n_poly_coeffs_OBJECTS)
n_poly_coeffs_LDADD = $(LDADD)
n_poly_coeffs_DEPENDENCIES = $(am__DEPENDENCIES_1) \
$(am__DEPENDENCIES_1)
AM_V_P = $(am__v_P_$(V))
am__v_P_ = $(am__v_P_$(AM_DEFAULT_VERBOSITY))
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_$(V))
am__v_GEN_ = $(am__v_GEN_$(AM_DEFAULT_VERBOSITY))
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_$(V))
am__v_at_ = $(am__v_at_$(AM_DEFAULT_VERBOSITY))
am__v_at_0 = @
am__v_at_1 =
DEFAULT_INCLUDES = -I.
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__depfiles_maybe = depfiles
am__mv = mv -f
CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
$(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \
$(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
$(AM_CXXFLAGS) $(CXXFLAGS)
AM_V_CXX = $(am__v_CXX_$(V))
am__v_CXX_ = $(am__v_CXX_$(AM_DEFAULT_VERBOSITY))
am__v_CXX_0 = @echo " CXX " $@;
am__v_CXX_1 =
CXXLD = $(CXX)
CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
$(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
$(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
AM_V_CXXLD = $(am__v_CXXLD_$(V))
am__v_CXXLD_ = $(am__v_CXXLD_$(AM_DEFAULT_VERBOSITY))
am__v_CXXLD_0 = @echo " CXXLD " $@;
am__v_CXXLD_1 =
SOURCES = $(apply_lorpe_1d_SOURCES) $(dump_qmc_SOURCES) \
$(ems_unfold_1d_SOURCES) $(lorpe_1d_SOURCES) \
$(lorpe_multivariate_SOURCES) $(n_poly_coeffs_SOURCES)
DIST_SOURCES = $(apply_lorpe_1d_SOURCES) $(dump_qmc_SOURCES) \
$(ems_unfold_1d_SOURCES) $(lorpe_1d_SOURCES) \
$(lorpe_multivariate_SOURCES) $(n_poly_coeffs_SOURCES)
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
# Read a list of newline-separated strings from the standard input,
# and print each of them once, without duplicates. Input order is
# *not* preserved.
am__uniquify_input = $(AWK) '\
BEGIN { nonempty = 0; } \
{ items[$$0] = 1; nonempty = 1; } \
END { if (nonempty) { for (i in items) print i; }; } \
'
# Make sure the list of sources is unique. This is necessary because,
# e.g., the same source file might be shared among _SOURCES variables
# for different programs/libraries.
am__define_uniq_tagged_files = \
list='$(am__tagged_files)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | $(am__uniquify_input)`
ETAGS = etags
CTAGS = ctags
am__tty_colors_dummy = \
mgn= red= grn= lgn= blu= brg= std=; \
am__color_tests=no
am__tty_colors = { \
$(am__tty_colors_dummy); \
if test "X$(AM_COLOR_TESTS)" = Xno; then \
am__color_tests=no; \
elif test "X$(AM_COLOR_TESTS)" = Xalways; then \
am__color_tests=yes; \
elif test "X$$TERM" != Xdumb && { test -t 1; } 2>/dev/null; then \
am__color_tests=yes; \
fi; \
if test $$am__color_tests = yes; then \
red='[0;31m'; \
grn='[0;32m'; \
lgn='[1;32m'; \
blu='[1;34m'; \
mgn='[0;35m'; \
brg='[1m'; \
std='[m'; \
fi; \
}
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__recheck_rx = ^[ ]*:recheck:[ ]*
am__global_test_result_rx = ^[ ]*:global-test-result:[ ]*
am__copy_in_global_log_rx = ^[ ]*:copy-in-global-log:[ ]*
# A command that, given a newline-separated list of test names on the
# standard input, print the name of the tests that are to be re-run
# upon "make recheck".
am__list_recheck_tests = $(AWK) '{ \
recheck = 1; \
while ((rc = (getline line < ($$0 ".trs"))) != 0) \
{ \
if (rc < 0) \
{ \
if ((getline line2 < ($$0 ".log")) < 0) \
recheck = 0; \
break; \
} \
else if (line ~ /$(am__recheck_rx)[nN][Oo]/) \
{ \
recheck = 0; \
break; \
} \
else if (line ~ /$(am__recheck_rx)[yY][eE][sS]/) \
{ \
break; \
} \
}; \
if (recheck) \
print $$0; \
close ($$0 ".trs"); \
close ($$0 ".log"); \
}'
# A command that, given a newline-separated list of test names on the
# standard input, create the global log from their .trs and .log files.
am__create_global_log = $(AWK) ' \
function fatal(msg) \
{ \
print "fatal: making $@: " msg | "cat >&2"; \
exit 1; \
} \
function rst_section(header) \
{ \
print header; \
len = length(header); \
for (i = 1; i <= len; i = i + 1) \
printf "="; \
printf "\n\n"; \
} \
{ \
copy_in_global_log = 1; \
global_test_result = "RUN"; \
while ((rc = (getline line < ($$0 ".trs"))) != 0) \
{ \
if (rc < 0) \
fatal("failed to read from " $$0 ".trs"); \
if (line ~ /$(am__global_test_result_rx)/) \
{ \
sub("$(am__global_test_result_rx)", "", line); \
sub("[ ]*$$", "", line); \
global_test_result = line; \
} \
else if (line ~ /$(am__copy_in_global_log_rx)[nN][oO]/) \
copy_in_global_log = 0; \
}; \
if (copy_in_global_log) \
{ \
rst_section(global_test_result ": " $$0); \
while ((rc = (getline line < ($$0 ".log"))) != 0) \
{ \
if (rc < 0) \
fatal("failed to read from " $$0 ".log"); \
print line; \
}; \
printf "\n"; \
}; \
close ($$0 ".trs"); \
close ($$0 ".log"); \
}'
# Restructured Text title.
am__rst_title = { sed 's/.*/ & /;h;s/./=/g;p;x;s/ *$$//;p;g' && echo; }
# Solaris 10 'make', and several other traditional 'make' implementations,
# pass "-e" to $(SHELL), and POSIX 2008 even requires this. Work around it
# by disabling -e (using the XSI extension "set +e") if it's set.
am__sh_e_setup = case $$- in *e*) set +e;; esac
# Default flags passed to test drivers.
am__common_driver_flags = \
--color-tests "$$am__color_tests" \
--enable-hard-errors "$$am__enable_hard_errors" \
--expect-failure "$$am__expect_failure"
# To be inserted before the command running the test. Creates the
# directory for the log if needed. Stores in $dir the directory
# containing $f, in $tst the test, in $log the log. Executes the
# developer- defined test setup AM_TESTS_ENVIRONMENT (if any), and
# passes TESTS_ENVIRONMENT. Set up options for the wrapper that
# will run the test scripts (or their associated LOG_COMPILER, if
# thy have one).
am__check_pre = \
$(am__sh_e_setup); \
$(am__vpath_adj_setup) $(am__vpath_adj) \
$(am__tty_colors); \
srcdir=$(srcdir); export srcdir; \
case "$@" in \
*/*) am__odir=`echo "./$@" | sed 's|/[^/]*$$||'`;; \
*) am__odir=.;; \
esac; \
test "x$$am__odir" = x"." || test -d "$$am__odir" \
|| $(MKDIR_P) "$$am__odir" || exit $$?; \
if test -f "./$$f"; then dir=./; \
elif test -f "$$f"; then dir=; \
else dir="$(srcdir)/"; fi; \
tst=$$dir$$f; log='$@'; \
if test -n '$(DISABLE_HARD_ERRORS)'; then \
am__enable_hard_errors=no; \
else \
am__enable_hard_errors=yes; \
fi; \
case " $(XFAIL_TESTS) " in \
*[\ \ ]$$f[\ \ ]* | *[\ \ ]$$dir$$f[\ \ ]*) \
am__expect_failure=yes;; \
*) \
am__expect_failure=no;; \
esac; \
$(AM_TESTS_ENVIRONMENT) $(TESTS_ENVIRONMENT)
# A shell command to get the names of the tests scripts with any registered
# extension removed (i.e., equivalently, the names of the test logs, with
# the '.log' extension removed). The result is saved in the shell variable
# '$bases'. This honors runtime overriding of TESTS and TEST_LOGS. Sadly,
# we cannot use something simpler, involving e.g., "$(TEST_LOGS:.log=)",
# since that might cause problem with VPATH rewrites for suffix-less tests.
# See also 'test-harness-vpath-rewrite.sh' and 'test-trs-basic.sh'.
am__set_TESTS_bases = \
bases='$(TEST_LOGS)'; \
bases=`for i in $$bases; do echo $$i; done | sed 's/\.log$$//'`; \
bases=`echo $$bases`
RECHECK_LOGS = $(TEST_LOGS)
AM_RECURSIVE_TARGETS = check recheck
TEST_SUITE_LOG = test-suite.log
TEST_EXTENSIONS = .test
LOG_DRIVER = $(SHELL) $(top_srcdir)/test-driver
LOG_COMPILE = $(LOG_COMPILER) $(AM_LOG_FLAGS) $(LOG_FLAGS)
am__set_b = \
case '$@' in \
*/*) \
case '$*' in \
*/*) b='$*';; \
*) b=`echo '$@' | sed 's/\.log$$//'`; \
esac;; \
*) \
b='$*';; \
esac
am__test_logs1 = $(TESTS:=.log)
am__test_logs2 = $(am__test_logs1:.log=.log)
TEST_LOGS = $(am__test_logs2:.test.log=.log)
TEST_LOG_DRIVER = $(SHELL) $(top_srcdir)/test-driver
TEST_LOG_COMPILE = $(TEST_LOG_COMPILER) $(AM_TEST_LOG_FLAGS) \
$(TEST_LOG_FLAGS)
am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp \
$(top_srcdir)/test-driver
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15
AMTAR = $${TAR-tar}
AM_DEFAULT_VERBOSITY = 1
AR = ar
AUTOCONF = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf
AUTOHEADER = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader
AUTOMAKE = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -std=c++11 -O3 -Wall -W -Werror
CYGPATH_W = echo
DEFS = -DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"
DEPDIR = .deps
DEPS_CFLAGS = -I/usr/local/include
DEPS_LIBS = -L/usr/local/lib -lfftw3 -lgeners -lkstest
DLLTOOL = false
DSYMUTIL =
DUMPBIN =
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = /bin/grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
FGREP = /bin/grep -F
FLIBS = -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
GREP = /bin/grep
INSTALL = /usr/bin/install -c
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = $(install_sh) -c -s
-LD = /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
+LD = /usr/bin/ld -m elf_x86_64
LDFLAGS =
LIBOBJS =
LIBS =
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LIPO =
LN_S = ln -s
LTLIBOBJS =
LT_SYS_LIBRARY_PATH =
MAKEINFO = ${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo
MANIFEST_TOOL = :
MKDIR_P = /bin/mkdir -p
NM = /usr/bin/nm -B
NMEDIT =
OBJDUMP = objdump
OBJEXT = o
OTOOL =
OTOOL64 =
PACKAGE = npstat
PACKAGE_BUGREPORT =
PACKAGE_NAME = npstat
PACKAGE_STRING = npstat 4.9.0
PACKAGE_TARNAME = npstat
PACKAGE_URL =
PACKAGE_VERSION = 4.9.0
PATH_SEPARATOR = :
PKG_CONFIG = /usr/bin/pkg-config
PKG_CONFIG_LIBDIR =
PKG_CONFIG_PATH = /usr/local/lib/pkgconfig
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/bash
STRIP = strip
VERSION = 4.9.0
abs_builddir = /home/igv/Hepforge/npstat/trunk/examples/C++
abs_srcdir = /home/igv/Hepforge/npstat/trunk/examples/C++
abs_top_builddir = /home/igv/Hepforge/npstat/trunk
abs_top_srcdir = /home/igv/Hepforge/npstat/trunk
ac_ct_AR = ar
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DUMPBIN =
ac_ct_F77 = g77
am__include = include
am__leading_dot = .
am__quote =
am__tar = $${TAR-tar} chof - "$$tardir"
am__untar = $${TAR-tar} xf -
bindir = ${exec_prefix}/bin
build = x86_64-pc-linux-gnu
build_alias =
build_cpu = x86_64
build_os = linux-gnu
build_vendor = pc
builddir = .
datadir = ${datarootdir}
datarootdir = ${prefix}/share
docdir = ${datarootdir}/doc/${PACKAGE_TARNAME}
dvidir = ${docdir}
exec_prefix = ${prefix}
host = x86_64-pc-linux-gnu
host_alias =
host_cpu = x86_64
host_os = linux-gnu
host_vendor = pc
htmldir = ${docdir}
includedir = ${prefix}/include
infodir = ${datarootdir}/info
install_sh = ${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localedir = ${datarootdir}/locale
localstatedir = ${prefix}/var
mandir = ${datarootdir}/man
mkdir_p = $(MKDIR_P)
oldincludedir = /usr/include
pdfdir = ${docdir}
prefix = /usr/local
program_transform_name = s,x,x,
psdir = ${docdir}
runstatedir = ${localstatedir}/run
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
srcdir = .
sysconfdir = ${prefix}/etc
target_alias =
top_build_prefix = ../../
top_builddir = ../..
top_srcdir = ../..
lorpe_1d_SOURCES = lorpe_1d.cc
apply_lorpe_1d_SOURCES = apply_lorpe_1d.cc
lorpe_multivariate_SOURCES = lorpe_multivariate.cc
n_poly_coeffs_SOURCES = n_poly_coeffs.cc
ems_unfold_1d_SOURCES = ems_unfold_1d.cc
dump_qmc_SOURCES = dump_qmc.cc
LDADD = -L../../npstat -lnpstat $(DEPS_LIBS) -llapack -lblas -lbz2 -lz $(FLIBS)
AM_CPPFLAGS = -I../../ -I. $(DEPS_CFLAGS)
TESTS = $(noinst_PROGRAMS)
EXTRA_DIST = CmdLine.hh 00README.txt
all: all-am
.SUFFIXES:
.SUFFIXES: .cc .lo .log .o .obj .test .test$(EXEEXT) .trs
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign examples/C++/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign examples/C++/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
clean-noinstPROGRAMS:
@list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \
echo " rm -f" $$list; \
rm -f $$list || exit $$?; \
test -n "$(EXEEXT)" || exit 0; \
list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
echo " rm -f" $$list; \
rm -f $$list
apply_lorpe_1d$(EXEEXT): $(apply_lorpe_1d_OBJECTS) $(apply_lorpe_1d_DEPENDENCIES) $(EXTRA_apply_lorpe_1d_DEPENDENCIES)
@rm -f apply_lorpe_1d$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(apply_lorpe_1d_OBJECTS) $(apply_lorpe_1d_LDADD) $(LIBS)
dump_qmc$(EXEEXT): $(dump_qmc_OBJECTS) $(dump_qmc_DEPENDENCIES) $(EXTRA_dump_qmc_DEPENDENCIES)
@rm -f dump_qmc$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(dump_qmc_OBJECTS) $(dump_qmc_LDADD) $(LIBS)
ems_unfold_1d$(EXEEXT): $(ems_unfold_1d_OBJECTS) $(ems_unfold_1d_DEPENDENCIES) $(EXTRA_ems_unfold_1d_DEPENDENCIES)
@rm -f ems_unfold_1d$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(ems_unfold_1d_OBJECTS) $(ems_unfold_1d_LDADD) $(LIBS)
lorpe_1d$(EXEEXT): $(lorpe_1d_OBJECTS) $(lorpe_1d_DEPENDENCIES) $(EXTRA_lorpe_1d_DEPENDENCIES)
@rm -f lorpe_1d$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(lorpe_1d_OBJECTS) $(lorpe_1d_LDADD) $(LIBS)
lorpe_multivariate$(EXEEXT): $(lorpe_multivariate_OBJECTS) $(lorpe_multivariate_DEPENDENCIES) $(EXTRA_lorpe_multivariate_DEPENDENCIES)
@rm -f lorpe_multivariate$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(lorpe_multivariate_OBJECTS) $(lorpe_multivariate_LDADD) $(LIBS)
n_poly_coeffs$(EXEEXT): $(n_poly_coeffs_OBJECTS) $(n_poly_coeffs_DEPENDENCIES) $(EXTRA_n_poly_coeffs_DEPENDENCIES)
@rm -f n_poly_coeffs$(EXEEXT)
$(AM_V_CXXLD)$(CXXLINK) $(n_poly_coeffs_OBJECTS) $(n_poly_coeffs_LDADD) $(LIBS)
mostlyclean-compile:
-rm -f *.$(OBJEXT)
distclean-compile:
-rm -f *.tab.c
include ./$(DEPDIR)/apply_lorpe_1d.Po
include ./$(DEPDIR)/dump_qmc.Po
include ./$(DEPDIR)/ems_unfold_1d.Po
include ./$(DEPDIR)/lorpe_1d.Po
include ./$(DEPDIR)/lorpe_multivariate.Po
include ./$(DEPDIR)/n_poly_coeffs.Po
.cc.o:
$(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CXX)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(CXXCOMPILE) -c -o $@ $<
.cc.obj:
$(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
# $(AM_V_CXX)source='$<' object='$@' libtool=no \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
.cc.lo:
$(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
$(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
# $(AM_V_CXX)source='$<' object='$@' libtool=yes \
# DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) \
# $(AM_V_CXX_no)$(LTCXXCOMPILE) -c -o $@ $<
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
ID: $(am__tagged_files)
$(am__define_uniq_tagged_files); mkid -fID $$unique
tags: tags-am
TAGS: tags
tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
set x; \
here=`pwd`; \
$(am__define_uniq_tagged_files); \
shift; \
if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
test -n "$$unique" || unique=$$empty_fix; \
if test $$# -gt 0; then \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
"$$@" $$unique; \
else \
$(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$unique; \
fi; \
fi
ctags: ctags-am
CTAGS: ctags
ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
$(am__define_uniq_tagged_files); \
test -z "$(CTAGS_ARGS)$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& $(am__cd) $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) "$$here"
cscopelist: cscopelist-am
cscopelist-am: $(am__tagged_files)
list='$(am__tagged_files)'; \
case "$(srcdir)" in \
[\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
*) sdir=$(subdir)/$(srcdir) ;; \
esac; \
for i in $$list; do \
if test -f "$$i"; then \
echo "$(subdir)/$$i"; \
else \
echo "$$sdir/$$i"; \
fi; \
done >> $(top_builddir)/cscope.files
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
# Recover from deleted '.trs' file; this should ensure that
# "rm -f foo.log; make foo.trs" re-run 'foo.test', and re-create
# both 'foo.log' and 'foo.trs'. Break the recipe in two subshells
# to avoid problems with "make -n".
.log.trs:
rm -f $< $@
$(MAKE) $(AM_MAKEFLAGS) $<
# Leading 'am--fnord' is there to ensure the list of targets does not
# expand to empty, as could happen e.g. with make check TESTS=''.
am--fnord $(TEST_LOGS) $(TEST_LOGS:.log=.trs): $(am__force_recheck)
am--force-recheck:
@:
$(TEST_SUITE_LOG): $(TEST_LOGS)
@$(am__set_TESTS_bases); \
am__f_ok () { test -f "$$1" && test -r "$$1"; }; \
redo_bases=`for i in $$bases; do \
am__f_ok $$i.trs && am__f_ok $$i.log || echo $$i; \
done`; \
if test -n "$$redo_bases"; then \
redo_logs=`for i in $$redo_bases; do echo $$i.log; done`; \
redo_results=`for i in $$redo_bases; do echo $$i.trs; done`; \
if $(am__make_dryrun); then :; else \
rm -f $$redo_logs && rm -f $$redo_results || exit 1; \
fi; \
fi; \
if test -n "$$am__remaking_logs"; then \
echo "fatal: making $(TEST_SUITE_LOG): possible infinite" \
"recursion detected" >&2; \
elif test -n "$$redo_logs"; then \
am__remaking_logs=yes $(MAKE) $(AM_MAKEFLAGS) $$redo_logs; \
fi; \
if $(am__make_dryrun); then :; else \
st=0; \
errmsg="fatal: making $(TEST_SUITE_LOG): failed to create"; \
for i in $$redo_bases; do \
test -f $$i.trs && test -r $$i.trs \
|| { echo "$$errmsg $$i.trs" >&2; st=1; }; \
test -f $$i.log && test -r $$i.log \
|| { echo "$$errmsg $$i.log" >&2; st=1; }; \
done; \
test $$st -eq 0 || exit 1; \
fi
@$(am__sh_e_setup); $(am__tty_colors); $(am__set_TESTS_bases); \
ws='[ ]'; \
results=`for b in $$bases; do echo $$b.trs; done`; \
test -n "$$results" || results=/dev/null; \
all=` grep "^$$ws*:test-result:" $$results | wc -l`; \
pass=` grep "^$$ws*:test-result:$$ws*PASS" $$results | wc -l`; \
fail=` grep "^$$ws*:test-result:$$ws*FAIL" $$results | wc -l`; \
skip=` grep "^$$ws*:test-result:$$ws*SKIP" $$results | wc -l`; \
xfail=`grep "^$$ws*:test-result:$$ws*XFAIL" $$results | wc -l`; \
xpass=`grep "^$$ws*:test-result:$$ws*XPASS" $$results | wc -l`; \
error=`grep "^$$ws*:test-result:$$ws*ERROR" $$results | wc -l`; \
if test `expr $$fail + $$xpass + $$error` -eq 0; then \
success=true; \
else \
success=false; \
fi; \
br='==================='; br=$$br$$br$$br$$br; \
result_count () \
{ \
if test x"$$1" = x"--maybe-color"; then \
maybe_colorize=yes; \
elif test x"$$1" = x"--no-color"; then \
maybe_colorize=no; \
else \
echo "$@: invalid 'result_count' usage" >&2; exit 4; \
fi; \
shift; \
desc=$$1 count=$$2; \
if test $$maybe_colorize = yes && test $$count -gt 0; then \
color_start=$$3 color_end=$$std; \
else \
color_start= color_end=; \
fi; \
echo "$${color_start}# $$desc $$count$${color_end}"; \
}; \
create_testsuite_report () \
{ \
result_count $$1 "TOTAL:" $$all "$$brg"; \
result_count $$1 "PASS: " $$pass "$$grn"; \
result_count $$1 "SKIP: " $$skip "$$blu"; \
result_count $$1 "XFAIL:" $$xfail "$$lgn"; \
result_count $$1 "FAIL: " $$fail "$$red"; \
result_count $$1 "XPASS:" $$xpass "$$red"; \
result_count $$1 "ERROR:" $$error "$$mgn"; \
}; \
{ \
echo "$(PACKAGE_STRING): $(subdir)/$(TEST_SUITE_LOG)" | \
$(am__rst_title); \
create_testsuite_report --no-color; \
echo; \
echo ".. contents:: :depth: 2"; \
echo; \
for b in $$bases; do echo $$b; done \
| $(am__create_global_log); \
} >$(TEST_SUITE_LOG).tmp || exit 1; \
mv $(TEST_SUITE_LOG).tmp $(TEST_SUITE_LOG); \
if $$success; then \
col="$$grn"; \
else \
col="$$red"; \
test x"$$VERBOSE" = x || cat $(TEST_SUITE_LOG); \
fi; \
echo "$${col}$$br$${std}"; \
echo "$${col}Testsuite summary for $(PACKAGE_STRING)$${std}"; \
echo "$${col}$$br$${std}"; \
create_testsuite_report --maybe-color; \
echo "$$col$$br$$std"; \
if $$success; then :; else \
echo "$${col}See $(subdir)/$(TEST_SUITE_LOG)$${std}"; \
if test -n "$(PACKAGE_BUGREPORT)"; then \
echo "$${col}Please report to $(PACKAGE_BUGREPORT)$${std}"; \
fi; \
echo "$$col$$br$$std"; \
fi; \
$$success || exit 1
check-TESTS:
@list='$(RECHECK_LOGS)'; test -z "$$list" || rm -f $$list
@list='$(RECHECK_LOGS:.log=.trs)'; test -z "$$list" || rm -f $$list
@test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
@set +e; $(am__set_TESTS_bases); \
log_list=`for i in $$bases; do echo $$i.log; done`; \
trs_list=`for i in $$bases; do echo $$i.trs; done`; \
log_list=`echo $$log_list`; trs_list=`echo $$trs_list`; \
$(MAKE) $(AM_MAKEFLAGS) $(TEST_SUITE_LOG) TEST_LOGS="$$log_list"; \
exit $$?;
recheck: all
@test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
@set +e; $(am__set_TESTS_bases); \
bases=`for i in $$bases; do echo $$i; done \
| $(am__list_recheck_tests)` || exit 1; \
log_list=`for i in $$bases; do echo $$i.log; done`; \
log_list=`echo $$log_list`; \
$(MAKE) $(AM_MAKEFLAGS) $(TEST_SUITE_LOG) \
am__force_recheck=am--force-recheck \
TEST_LOGS="$$log_list"; \
exit $$?
lorpe_1d.log: lorpe_1d$(EXEEXT)
@p='lorpe_1d$(EXEEXT)'; \
b='lorpe_1d'; \
$(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
--log-file $$b.log --trs-file $$b.trs \
$(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
"$$tst" $(AM_TESTS_FD_REDIRECT)
apply_lorpe_1d.log: apply_lorpe_1d$(EXEEXT)
@p='apply_lorpe_1d$(EXEEXT)'; \
b='apply_lorpe_1d'; \
$(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
--log-file $$b.log --trs-file $$b.trs \
$(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
"$$tst" $(AM_TESTS_FD_REDIRECT)
lorpe_multivariate.log: lorpe_multivariate$(EXEEXT)
@p='lorpe_multivariate$(EXEEXT)'; \
b='lorpe_multivariate'; \
$(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
--log-file $$b.log --trs-file $$b.trs \
$(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
"$$tst" $(AM_TESTS_FD_REDIRECT)
n_poly_coeffs.log: n_poly_coeffs$(EXEEXT)
@p='n_poly_coeffs$(EXEEXT)'; \
b='n_poly_coeffs'; \
$(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
--log-file $$b.log --trs-file $$b.trs \
$(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
"$$tst" $(AM_TESTS_FD_REDIRECT)
ems_unfold_1d.log: ems_unfold_1d$(EXEEXT)
@p='ems_unfold_1d$(EXEEXT)'; \
b='ems_unfold_1d'; \
$(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
--log-file $$b.log --trs-file $$b.trs \
$(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
"$$tst" $(AM_TESTS_FD_REDIRECT)
dump_qmc.log: dump_qmc$(EXEEXT)
@p='dump_qmc$(EXEEXT)'; \
b='dump_qmc'; \
$(am__check_pre) $(LOG_DRIVER) --test-name "$$f" \
--log-file $$b.log --trs-file $$b.trs \
$(am__common_driver_flags) $(AM_LOG_DRIVER_FLAGS) $(LOG_DRIVER_FLAGS) -- $(LOG_COMPILE) \
"$$tst" $(AM_TESTS_FD_REDIRECT)
.test.log:
@p='$<'; \
$(am__set_b); \
$(am__check_pre) $(TEST_LOG_DRIVER) --test-name "$$f" \
--log-file $$b.log --trs-file $$b.trs \
$(am__common_driver_flags) $(AM_TEST_LOG_DRIVER_FLAGS) $(TEST_LOG_DRIVER_FLAGS) -- $(TEST_LOG_COMPILE) \
"$$tst" $(AM_TESTS_FD_REDIRECT)
#.test$(EXEEXT).log:
# @p='$<'; \
# $(am__set_b); \
# $(am__check_pre) $(TEST_LOG_DRIVER) --test-name "$$f" \
# --log-file $$b.log --trs-file $$b.trs \
# $(am__common_driver_flags) $(AM_TEST_LOG_DRIVER_FLAGS) $(TEST_LOG_DRIVER_FLAGS) -- $(TEST_LOG_COMPILE) \
# "$$tst" $(AM_TESTS_FD_REDIRECT)
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
check: check-am
all-am: Makefile $(PROGRAMS)
installdirs:
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
-test -z "$(TEST_LOGS)" || rm -f $(TEST_LOGS)
-test -z "$(TEST_LOGS:.log=.trs)" || rm -f $(TEST_LOGS:.log=.trs)
-test -z "$(TEST_SUITE_LOG)" || rm -f $(TEST_SUITE_LOG)
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \
mostlyclean-am
distclean: distclean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
distclean-am: clean-am distclean-compile distclean-generic \
distclean-tags
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am:
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -rf ./$(DEPDIR)
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-compile mostlyclean-generic \
mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am:
.MAKE: check-am install-am install-strip
.PHONY: CTAGS GTAGS TAGS all all-am check check-TESTS check-am clean \
clean-generic clean-libtool clean-noinstPROGRAMS cscopelist-am \
ctags ctags-am distclean distclean-compile distclean-generic \
distclean-libtool distclean-tags distdir dvi dvi-am html \
html-am info info-am install install-am install-data \
install-data-am install-dvi install-dvi-am install-exec \
install-exec-am install-html install-html-am install-info \
install-info-am install-man install-pdf install-pdf-am \
install-ps install-ps-am install-strip installcheck \
installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-compile \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
recheck tags tags-am uninstall uninstall-am
.PRECIOUS: Makefile
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:
Index: trunk/config.log
===================================================================
--- trunk/config.log (revision 579)
+++ trunk/config.log (revision 580)
@@ -1,942 +1,942 @@
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
It was created by npstat configure 4.9.0, which was
generated by GNU Autoconf 2.69. Invocation command line was
$ ./configure --with-pic
## --------- ##
## Platform. ##
## --------- ##
hostname = dawn
uname -m = x86_64
-uname -r = 4.15.0-29-generic
+uname -r = 4.15.0-42-generic
uname -s = Linux
-uname -v = #31-Ubuntu SMP Tue Jul 17 15:39:52 UTC 2018
+uname -v = #45-Ubuntu SMP Thu Nov 15 19:32:57 UTC 2018
/usr/bin/uname -p = unknown
/bin/uname -X = unknown
/bin/arch = unknown
/usr/bin/arch -k = unknown
/usr/convex/getsysinfo = unknown
/usr/bin/hostinfo = unknown
/bin/machine = unknown
/usr/bin/oslevel = unknown
/bin/universe = unknown
PATH: /home/igv/bin
PATH: /home/igv/local/bin
PATH: /usr/local/anaconda3/bin
PATH: /usr/local/bin
PATH: /usr/local/root/bin
PATH: /usr/local/bin
PATH: /bin
PATH: /usr/bin
PATH: /sbin
PATH: /usr/sbin
PATH: .
## ----------- ##
## Core tests. ##
## ----------- ##
configure:2421: checking for a BSD-compatible install
configure:2489: result: /usr/bin/install -c
configure:2500: checking whether build environment is sane
configure:2555: result: yes
configure:2706: checking for a thread-safe mkdir -p
configure:2745: result: /bin/mkdir -p
configure:2752: checking for gawk
configure:2768: found /usr/bin/gawk
configure:2779: result: gawk
configure:2790: checking whether make sets $(MAKE)
configure:2812: result: yes
configure:2841: checking whether make supports nested variables
configure:2858: result: yes
configure:3041: checking for pkg-config
configure:3059: found /usr/bin/pkg-config
configure:3071: result: /usr/bin/pkg-config
configure:3096: checking pkg-config is at least version 0.9.0
configure:3099: result: yes
configure:3109: checking for DEPS
configure:3116: $PKG_CONFIG --exists --print-errors "fftw3 >= 3.1.2 geners >= 1.3.0 kstest >= 2.0.0"
configure:3119: $? = 0
configure:3133: $PKG_CONFIG --exists --print-errors "fftw3 >= 3.1.2 geners >= 1.3.0 kstest >= 2.0.0"
configure:3136: $? = 0
configure:3194: result: yes
configure:3257: checking for g++
configure:3273: found /usr/bin/g++
configure:3284: result: g++
configure:3311: checking for C++ compiler version
configure:3320: g++ --version >&5
-g++ (Ubuntu 7.3.0-16ubuntu3) 7.3.0
+g++ (Ubuntu 7.3.0-27ubuntu1~18.04) 7.3.0
Copyright (C) 2017 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
configure:3331: $? = 0
configure:3320: g++ -v >&5
Using built-in specs.
COLLECT_GCC=g++
COLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper
OFFLOAD_TARGET_NAMES=nvptx-none
OFFLOAD_TARGET_DEFAULT=1
Target: x86_64-linux-gnu
-Configured with: ../src/configure -v --with-pkgversion='Ubuntu 7.3.0-16ubuntu3' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --with-as=/usr/bin/x86_64-linux-gnu-as --with-ld=/usr/bin/x86_64-linux-gnu-ld --program-suffix=-7 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --with-sysroot=/ --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-libmpx --enable-plugin --enable-default-pie --with-system-zlib --with-target-system-zlib --enable-objc-gc=auto --enable-multiarch --disable-werror --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu
+Configured with: ../src/configure -v --with-pkgversion='Ubuntu 7.3.0-27ubuntu1~18.04' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --program-suffix=-7 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --with-sysroot=/ --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-libmpx --enable-plugin --enable-default-pie --with-system-zlib --with-target-system-zlib --enable-objc-gc=auto --enable-multiarch --disable-werror --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu
Thread model: posix
-gcc version 7.3.0 (Ubuntu 7.3.0-16ubuntu3)
+gcc version 7.3.0 (Ubuntu 7.3.0-27ubuntu1~18.04)
configure:3331: $? = 0
configure:3320: g++ -V >&5
g++: error: unrecognized command line option '-V'
g++: fatal error: no input files
compilation terminated.
configure:3331: $? = 1
configure:3320: g++ -qversion >&5
g++: error: unrecognized command line option '-qversion'; did you mean '--version'?
g++: fatal error: no input files
compilation terminated.
configure:3331: $? = 1
configure:3351: checking whether the C++ compiler works
configure:3373: g++ -std=c++11 -O3 -Wall -W -Werror conftest.cpp >&5
configure:3377: $? = 0
configure:3425: result: yes
configure:3428: checking for C++ compiler default output file name
configure:3430: result: a.out
configure:3436: checking for suffix of executables
configure:3443: g++ -o conftest -std=c++11 -O3 -Wall -W -Werror conftest.cpp >&5
configure:3447: $? = 0
configure:3469: result:
configure:3491: checking whether we are cross compiling
configure:3499: g++ -o conftest -std=c++11 -O3 -Wall -W -Werror conftest.cpp >&5
configure:3503: $? = 0
configure:3510: ./conftest
configure:3514: $? = 0
configure:3529: result: no
configure:3534: checking for suffix of object files
configure:3556: g++ -c -std=c++11 -O3 -Wall -W -Werror conftest.cpp >&5
configure:3560: $? = 0
configure:3581: result: o
configure:3585: checking whether we are using the GNU C++ compiler
configure:3604: g++ -c -std=c++11 -O3 -Wall -W -Werror conftest.cpp >&5
configure:3604: $? = 0
configure:3613: result: yes
configure:3622: checking whether g++ accepts -g
configure:3642: g++ -c -g conftest.cpp >&5
configure:3642: $? = 0
configure:3683: result: yes
configure:3717: checking for style of include used by make
configure:3745: result: GNU
configure:3771: checking dependency style of g++
configure:3882: result: gcc3
configure:3951: checking for g77
configure:3967: found /home/igv/bin/g77
configure:3978: result: g77
configure:4004: checking for Fortran 77 compiler version
configure:4013: g77 --version >&5
-GNU Fortran (Ubuntu 7.3.0-16ubuntu3) 7.3.0
+GNU Fortran (Ubuntu 7.3.0-27ubuntu1~18.04) 7.3.0
Copyright (C) 2017 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
configure:4024: $? = 0
configure:4013: g77 -v >&5
Using built-in specs.
COLLECT_GCC=g77
COLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper
OFFLOAD_TARGET_NAMES=nvptx-none
OFFLOAD_TARGET_DEFAULT=1
Target: x86_64-linux-gnu
-Configured with: ../src/configure -v --with-pkgversion='Ubuntu 7.3.0-16ubuntu3' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --with-as=/usr/bin/x86_64-linux-gnu-as --with-ld=/usr/bin/x86_64-linux-gnu-ld --program-suffix=-7 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --with-sysroot=/ --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-libmpx --enable-plugin --enable-default-pie --with-system-zlib --with-target-system-zlib --enable-objc-gc=auto --enable-multiarch --disable-werror --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu
+Configured with: ../src/configure -v --with-pkgversion='Ubuntu 7.3.0-27ubuntu1~18.04' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --program-suffix=-7 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --with-sysroot=/ --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-libmpx --enable-plugin --enable-default-pie --with-system-zlib --with-target-system-zlib --enable-objc-gc=auto --enable-multiarch --disable-werror --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu
Thread model: posix
-gcc version 7.3.0 (Ubuntu 7.3.0-16ubuntu3)
+gcc version 7.3.0 (Ubuntu 7.3.0-27ubuntu1~18.04)
configure:4024: $? = 0
configure:4013: g77 -V >&5
g77: error: unrecognized command line option '-V'
g77: fatal error: no input files
compilation terminated.
configure:4024: $? = 1
configure:4013: g77 -qversion >&5
g77: error: unrecognized command line option '-qversion'; did you mean '--version'?
g77: fatal error: no input files
compilation terminated.
configure:4024: $? = 1
configure:4033: checking whether we are using the GNU Fortran 77 compiler
configure:4046: g77 -c conftest.F >&5
configure:4046: $? = 0
configure:4055: result: yes
configure:4061: checking whether g77 accepts -g
configure:4072: g77 -c -g conftest.f >&5
configure:4072: $? = 0
configure:4080: result: yes
configure:4113: checking build system type
configure:4127: result: x86_64-pc-linux-gnu
configure:4147: checking host system type
configure:4160: result: x86_64-pc-linux-gnu
configure:4185: checking how to get verbose linking output from g77
configure:4195: g77 -c -g -O2 conftest.f >&5
configure:4195: $? = 0
configure:4213: g77 -o conftest -g -O2 -v conftest.f
Using built-in specs.
Target: x86_64-linux-gnu
Thread model: posix
-gcc version 7.3.0 (Ubuntu 7.3.0-16ubuntu3)
- /usr/lib/gcc/x86_64-linux-gnu/7/f951 conftest.f -ffixed-form -quiet -dumpbase conftest.f -mtune=generic -march=x86-64 -auxbase conftest -g -O2 -version -fintrinsic-modules-path /usr/lib/gcc/x86_64-linux-gnu/7/finclude -o /tmp/ccyRjNKr.s
-GNU Fortran (Ubuntu 7.3.0-16ubuntu3) version 7.3.0 (x86_64-linux-gnu)
+gcc version 7.3.0 (Ubuntu 7.3.0-27ubuntu1~18.04)
+ /usr/lib/gcc/x86_64-linux-gnu/7/f951 conftest.f -ffixed-form -quiet -dumpbase conftest.f -mtune=generic -march=x86-64 -auxbase conftest -g -O2 -version -fintrinsic-modules-path /usr/lib/gcc/x86_64-linux-gnu/7/finclude -o /tmp/ccxSyEdR.s
+GNU Fortran (Ubuntu 7.3.0-27ubuntu1~18.04) version 7.3.0 (x86_64-linux-gnu)
compiled by GNU C version 7.3.0, GMP version 6.1.2, MPFR version 4.0.1, MPC version 1.1.0, isl version isl-0.19-GMP
GGC heuristics: --param ggc-min-expand=100 --param ggc-min-heapsize=131072
-GNU Fortran2008 (Ubuntu 7.3.0-16ubuntu3) version 7.3.0 (x86_64-linux-gnu)
+GNU Fortran2008 (Ubuntu 7.3.0-27ubuntu1~18.04) version 7.3.0 (x86_64-linux-gnu)
compiled by GNU C version 7.3.0, GMP version 6.1.2, MPFR version 4.0.1, MPC version 1.1.0, isl version isl-0.19-GMP
GGC heuristics: --param ggc-min-expand=100 --param ggc-min-heapsize=131072
- /usr/bin/x86_64-linux-gnu-as -v --64 -o /tmp/cc34UAU7.o /tmp/ccyRjNKr.s
+ as -v --64 -o /tmp/ccWKm5Q5.o /tmp/ccxSyEdR.s
GNU assembler version 2.30 (x86_64-linux-gnu) using BFD version (GNU Binutils for Ubuntu) 2.30
Reading specs from /usr/lib/gcc/x86_64-linux-gnu/7/libgfortran.spec
rename spec lib to liborig
- /usr/lib/gcc/x86_64-linux-gnu/7/collect2 -plugin /usr/lib/gcc/x86_64-linux-gnu/7/liblto_plugin.so -plugin-opt=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper -plugin-opt=-fresolution=/tmp/ccR4od5N.res -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lquadmath -plugin-opt=-pass-through=-lm -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc --sysroot=/ --build-id --eh-frame-hdr -m elf_x86_64 --hash-style=gnu --as-needed -dynamic-linker /lib64/ld-linux-x86-64.so.2 -pie -z now -z relro -o conftest /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/Scrt1.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. /tmp/cc34UAU7.o -lgfortran -lm -lgcc_s -lgcc -lquadmath -lm -lgcc_s -lgcc -lc -lgcc_s -lgcc /usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o
+ /usr/lib/gcc/x86_64-linux-gnu/7/collect2 -plugin /usr/lib/gcc/x86_64-linux-gnu/7/liblto_plugin.so -plugin-opt=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper -plugin-opt=-fresolution=/tmp/ccuYHhvk.res -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lquadmath -plugin-opt=-pass-through=-lm -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc --sysroot=/ --build-id --eh-frame-hdr -m elf_x86_64 --hash-style=gnu --as-needed -dynamic-linker /lib64/ld-linux-x86-64.so.2 -pie -z now -z relro -o conftest /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/Scrt1.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. /tmp/ccWKm5Q5.o -lgfortran -lm -lgcc_s -lgcc -lquadmath -lm -lgcc_s -lgcc -lc -lgcc_s -lgcc /usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o
configure:4296: result: -v
configure:4298: checking for Fortran 77 libraries of g77
configure:4321: g77 -o conftest -g -O2 -v conftest.f
Using built-in specs.
Target: x86_64-linux-gnu
Thread model: posix
-gcc version 7.3.0 (Ubuntu 7.3.0-16ubuntu3)
- /usr/lib/gcc/x86_64-linux-gnu/7/f951 conftest.f -ffixed-form -quiet -dumpbase conftest.f -mtune=generic -march=x86-64 -auxbase conftest -g -O2 -version -fintrinsic-modules-path /usr/lib/gcc/x86_64-linux-gnu/7/finclude -o /tmp/ccZ2TJYr.s
-GNU Fortran (Ubuntu 7.3.0-16ubuntu3) version 7.3.0 (x86_64-linux-gnu)
+gcc version 7.3.0 (Ubuntu 7.3.0-27ubuntu1~18.04)
+ /usr/lib/gcc/x86_64-linux-gnu/7/f951 conftest.f -ffixed-form -quiet -dumpbase conftest.f -mtune=generic -march=x86-64 -auxbase conftest -g -O2 -version -fintrinsic-modules-path /usr/lib/gcc/x86_64-linux-gnu/7/finclude -o /tmp/cc7MTn7T.s
+GNU Fortran (Ubuntu 7.3.0-27ubuntu1~18.04) version 7.3.0 (x86_64-linux-gnu)
compiled by GNU C version 7.3.0, GMP version 6.1.2, MPFR version 4.0.1, MPC version 1.1.0, isl version isl-0.19-GMP
GGC heuristics: --param ggc-min-expand=100 --param ggc-min-heapsize=131072
-GNU Fortran2008 (Ubuntu 7.3.0-16ubuntu3) version 7.3.0 (x86_64-linux-gnu)
+GNU Fortran2008 (Ubuntu 7.3.0-27ubuntu1~18.04) version 7.3.0 (x86_64-linux-gnu)
compiled by GNU C version 7.3.0, GMP version 6.1.2, MPFR version 4.0.1, MPC version 1.1.0, isl version isl-0.19-GMP
GGC heuristics: --param ggc-min-expand=100 --param ggc-min-heapsize=131072
- /usr/bin/x86_64-linux-gnu-as -v --64 -o /tmp/ccqRfRh8.o /tmp/ccZ2TJYr.s
+ as -v --64 -o /tmp/ccUx2tT8.o /tmp/cc7MTn7T.s
GNU assembler version 2.30 (x86_64-linux-gnu) using BFD version (GNU Binutils for Ubuntu) 2.30
Reading specs from /usr/lib/gcc/x86_64-linux-gnu/7/libgfortran.spec
rename spec lib to liborig
- /usr/lib/gcc/x86_64-linux-gnu/7/collect2 -plugin /usr/lib/gcc/x86_64-linux-gnu/7/liblto_plugin.so -plugin-opt=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper -plugin-opt=-fresolution=/tmp/cci0NKBO.res -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lquadmath -plugin-opt=-pass-through=-lm -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc --sysroot=/ --build-id --eh-frame-hdr -m elf_x86_64 --hash-style=gnu --as-needed -dynamic-linker /lib64/ld-linux-x86-64.so.2 -pie -z now -z relro -o conftest /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/Scrt1.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. /tmp/ccqRfRh8.o -lgfortran -lm -lgcc_s -lgcc -lquadmath -lm -lgcc_s -lgcc -lc -lgcc_s -lgcc /usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o
+ /usr/lib/gcc/x86_64-linux-gnu/7/collect2 -plugin /usr/lib/gcc/x86_64-linux-gnu/7/liblto_plugin.so -plugin-opt=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper -plugin-opt=-fresolution=/tmp/cckpHiGn.res -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lquadmath -plugin-opt=-pass-through=-lm -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc -plugin-opt=-pass-through=-lc -plugin-opt=-pass-through=-lgcc_s -plugin-opt=-pass-through=-lgcc --sysroot=/ --build-id --eh-frame-hdr -m elf_x86_64 --hash-style=gnu --as-needed -dynamic-linker /lib64/ld-linux-x86-64.so.2 -pie -z now -z relro -o conftest /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/Scrt1.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. /tmp/ccUx2tT8.o -lgfortran -lm -lgcc_s -lgcc -lquadmath -lm -lgcc_s -lgcc -lc -lgcc_s -lgcc /usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o
configure:4517: result: -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath
configure:4579: checking how to print strings
configure:4606: result: printf
configure:4675: checking for gcc
configure:4691: found /usr/bin/gcc
configure:4702: result: gcc
configure:4931: checking for C compiler version
configure:4940: gcc --version >&5
-gcc (Ubuntu 7.3.0-16ubuntu3) 7.3.0
+gcc (Ubuntu 7.3.0-27ubuntu1~18.04) 7.3.0
Copyright (C) 2017 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO
warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
configure:4951: $? = 0
configure:4940: gcc -v >&5
Using built-in specs.
COLLECT_GCC=gcc
COLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/7/lto-wrapper
OFFLOAD_TARGET_NAMES=nvptx-none
OFFLOAD_TARGET_DEFAULT=1
Target: x86_64-linux-gnu
-Configured with: ../src/configure -v --with-pkgversion='Ubuntu 7.3.0-16ubuntu3' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --with-as=/usr/bin/x86_64-linux-gnu-as --with-ld=/usr/bin/x86_64-linux-gnu-ld --program-suffix=-7 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --with-sysroot=/ --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-libmpx --enable-plugin --enable-default-pie --with-system-zlib --with-target-system-zlib --enable-objc-gc=auto --enable-multiarch --disable-werror --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu
+Configured with: ../src/configure -v --with-pkgversion='Ubuntu 7.3.0-27ubuntu1~18.04' --with-bugurl=file:///usr/share/doc/gcc-7/README.Bugs --enable-languages=c,ada,c++,go,brig,d,fortran,objc,obj-c++ --prefix=/usr --with-gcc-major-version-only --program-suffix=-7 --program-prefix=x86_64-linux-gnu- --enable-shared --enable-linker-build-id --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --libdir=/usr/lib --enable-nls --with-sysroot=/ --enable-clocale=gnu --enable-libstdcxx-debug --enable-libstdcxx-time=yes --with-default-libstdcxx-abi=new --enable-gnu-unique-object --disable-vtable-verify --enable-libmpx --enable-plugin --enable-default-pie --with-system-zlib --with-target-system-zlib --enable-objc-gc=auto --enable-multiarch --disable-werror --with-arch-32=i686 --with-abi=m64 --with-multilib-list=m32,m64,mx32 --enable-multilib --with-tune=generic --enable-offload-targets=nvptx-none --without-cuda-driver --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu
Thread model: posix
-gcc version 7.3.0 (Ubuntu 7.3.0-16ubuntu3)
+gcc version 7.3.0 (Ubuntu 7.3.0-27ubuntu1~18.04)
configure:4951: $? = 0
configure:4940: gcc -V >&5
gcc: error: unrecognized command line option '-V'
gcc: fatal error: no input files
compilation terminated.
configure:4951: $? = 1
configure:4940: gcc -qversion >&5
gcc: error: unrecognized command line option '-qversion'; did you mean '--version'?
gcc: fatal error: no input files
compilation terminated.
configure:4951: $? = 1
configure:4955: checking whether we are using the GNU C compiler
configure:4974: gcc -c conftest.c >&5
configure:4974: $? = 0
configure:4983: result: yes
configure:4992: checking whether gcc accepts -g
configure:5012: gcc -c -g conftest.c >&5
configure:5012: $? = 0
configure:5053: result: yes
configure:5070: checking for gcc option to accept ISO C89
configure:5133: gcc -c -g -O2 conftest.c >&5
configure:5133: $? = 0
configure:5146: result: none needed
configure:5171: checking whether gcc understands -c and -o together
configure:5193: gcc -c conftest.c -o conftest2.o
configure:5196: $? = 0
configure:5193: gcc -c conftest.c -o conftest2.o
configure:5196: $? = 0
configure:5208: result: yes
configure:5227: checking dependency style of gcc
configure:5338: result: gcc3
configure:5353: checking for a sed that does not truncate output
configure:5417: result: /bin/sed
configure:5435: checking for grep that handles long lines and -e
configure:5493: result: /bin/grep
configure:5498: checking for egrep
configure:5560: result: /bin/grep -E
configure:5565: checking for fgrep
configure:5627: result: /bin/grep -F
configure:5662: checking for ld used by gcc
-configure:5729: result: /usr/bin/x86_64-linux-gnu-ld
-configure:5736: checking if the linker (/usr/bin/x86_64-linux-gnu-ld) is GNU ld
+configure:5729: result: /usr/bin/ld
+configure:5736: checking if the linker (/usr/bin/ld) is GNU ld
configure:5751: result: yes
configure:5763: checking for BSD- or MS-compatible name lister (nm)
configure:5817: result: /usr/bin/nm -B
configure:5947: checking the name lister (/usr/bin/nm -B) interface
configure:5954: gcc -c -g -O2 conftest.c >&5
configure:5957: /usr/bin/nm -B "conftest.o"
configure:5960: output
0000000000000000 B some_variable
configure:5967: result: BSD nm
configure:5970: checking whether ln -s works
configure:5974: result: yes
configure:5982: checking the maximum length of command line arguments
configure:6113: result: 1572864
configure:6161: checking how to convert x86_64-pc-linux-gnu file names to x86_64-pc-linux-gnu format
configure:6201: result: func_convert_file_noop
configure:6208: checking how to convert x86_64-pc-linux-gnu file names to toolchain format
configure:6228: result: func_convert_file_noop
-configure:6235: checking for /usr/bin/x86_64-linux-gnu-ld option to reload object files
+configure:6235: checking for /usr/bin/ld option to reload object files
configure:6242: result: -r
configure:6316: checking for objdump
configure:6332: found /usr/bin/objdump
configure:6343: result: objdump
configure:6375: checking how to recognize dependent libraries
configure:6575: result: pass_all
configure:6660: checking for dlltool
configure:6690: result: no
configure:6720: checking how to associate runtime and link libraries
configure:6747: result: printf %s\n
configure:6808: checking for ar
configure:6824: found /usr/bin/ar
configure:6835: result: ar
configure:6872: checking for archiver @FILE support
configure:6889: gcc -c -g -O2 conftest.c >&5
configure:6889: $? = 0
configure:6892: ar cru libconftest.a @conftest.lst >&5
ar: `u' modifier ignored since `D' is the default (see `U')
configure:6895: $? = 0
configure:6900: ar cru libconftest.a @conftest.lst >&5
ar: `u' modifier ignored since `D' is the default (see `U')
ar: conftest.o: No such file or directory
configure:6903: $? = 1
configure:6915: result: @
configure:6973: checking for strip
configure:6989: found /usr/bin/strip
configure:7000: result: strip
configure:7072: checking for ranlib
configure:7088: found /usr/bin/ranlib
configure:7099: result: ranlib
configure:7201: checking command to parse /usr/bin/nm -B output from gcc object
configure:7354: gcc -c -g -O2 conftest.c >&5
configure:7357: $? = 0
configure:7361: /usr/bin/nm -B conftest.o \| sed -n -e 's/^.*[ ]\([ABCDGIRSTW][ABCDGIRSTW]*\)[ ][ ]*\([_A-Za-z][_A-Za-z0-9]*\)$/\1 \2 \2/p' | sed '/ __gnu_lto/d' \> conftest.nm
configure:7364: $? = 0
configure:7430: gcc -o conftest -g -O2 conftest.c conftstm.o >&5
configure:7433: $? = 0
configure:7471: result: ok
configure:7518: checking for sysroot
configure:7548: result: no
configure:7555: checking for a working dd
configure:7593: result: /bin/dd
configure:7597: checking how to truncate binary pipes
configure:7612: result: /bin/dd bs=4096 count=1
configure:7748: gcc -c -g -O2 conftest.c >&5
configure:7751: $? = 0
configure:7941: checking for mt
configure:7957: found /bin/mt
configure:7968: result: mt
configure:7991: checking if mt is a manifest tool
configure:7997: mt '-?'
configure:8005: result: no
configure:8682: checking how to run the C preprocessor
configure:8713: gcc -E conftest.c
configure:8713: $? = 0
configure:8727: gcc -E conftest.c
conftest.c:11:10: fatal error: ac_nonexistent.h: No such file or directory
#include
^~~~~~~~~~~~~~~~~~
compilation terminated.
configure:8727: $? = 1
configure: failed program was:
| /* confdefs.h */
| #define PACKAGE_NAME "npstat"
| #define PACKAGE_TARNAME "npstat"
| #define PACKAGE_VERSION "4.9.0"
| #define PACKAGE_STRING "npstat 4.9.0"
| #define PACKAGE_BUGREPORT ""
| #define PACKAGE_URL ""
| #define PACKAGE "npstat"
| #define VERSION "4.9.0"
| /* end confdefs.h. */
| #include
configure:8752: result: gcc -E
configure:8772: gcc -E conftest.c
configure:8772: $? = 0
configure:8786: gcc -E conftest.c
conftest.c:11:10: fatal error: ac_nonexistent.h: No such file or directory
#include
^~~~~~~~~~~~~~~~~~
compilation terminated.
configure:8786: $? = 1
configure: failed program was:
| /* confdefs.h */
| #define PACKAGE_NAME "npstat"
| #define PACKAGE_TARNAME "npstat"
| #define PACKAGE_VERSION "4.9.0"
| #define PACKAGE_STRING "npstat 4.9.0"
| #define PACKAGE_BUGREPORT ""
| #define PACKAGE_URL ""
| #define PACKAGE "npstat"
| #define VERSION "4.9.0"
| /* end confdefs.h. */
| #include
configure:8815: checking for ANSI C header files
configure:8835: gcc -c -g -O2 conftest.c >&5
configure:8835: $? = 0
configure:8908: gcc -o conftest -g -O2 conftest.c >&5
configure:8908: $? = 0
configure:8908: ./conftest
configure:8908: $? = 0
configure:8919: result: yes
configure:8932: checking for sys/types.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8932: checking for sys/stat.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8932: checking for stdlib.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8932: checking for string.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8932: checking for memory.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8932: checking for strings.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8932: checking for inttypes.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8932: checking for stdint.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8932: checking for unistd.h
configure:8932: gcc -c -g -O2 conftest.c >&5
configure:8932: $? = 0
configure:8932: result: yes
configure:8946: checking for dlfcn.h
configure:8946: gcc -c -g -O2 conftest.c >&5
configure:8946: $? = 0
configure:8946: result: yes
configure:9213: checking for objdir
configure:9228: result: .libs
configure:9492: checking if gcc supports -fno-rtti -fno-exceptions
configure:9510: gcc -c -g -O2 -fno-rtti -fno-exceptions conftest.c >&5
cc1: warning: command line option '-fno-rtti' is valid for C++/ObjC++ but not for C
configure:9514: $? = 0
configure:9527: result: no
configure:9885: checking for gcc option to produce PIC
configure:9892: result: -fPIC -DPIC
configure:9900: checking if gcc PIC flag -fPIC -DPIC works
configure:9918: gcc -c -g -O2 -fPIC -DPIC -DPIC conftest.c >&5
configure:9922: $? = 0
configure:9935: result: yes
configure:9964: checking if gcc static flag -static works
configure:9992: result: yes
configure:10007: checking if gcc supports -c -o file.o
configure:10028: gcc -c -g -O2 -o out/conftest2.o conftest.c >&5
configure:10032: $? = 0
configure:10054: result: yes
configure:10062: checking if gcc supports -c -o file.o
configure:10109: result: yes
-configure:10142: checking whether the gcc linker (/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64) supports shared libraries
+configure:10142: checking whether the gcc linker (/usr/bin/ld -m elf_x86_64) supports shared libraries
configure:11401: result: yes
configure:11438: checking whether -lc should be explicitly linked in
configure:11446: gcc -c -g -O2 conftest.c >&5
configure:11449: $? = 0
configure:11464: gcc -shared -fPIC -DPIC conftest.o -v -Wl,-soname -Wl,conftest -o conftest 2\>\&1 \| /bin/grep -lc \>/dev/null 2\>\&1
configure:11467: $? = 0
configure:11481: result: no
configure:11641: checking dynamic linker characteristics
configure:12222: gcc -o conftest -g -O2 -Wl,-rpath -Wl,/foo conftest.c >&5
configure:12222: $? = 0
configure:12459: result: GNU/Linux ld.so
configure:12581: checking how to hardcode library paths into programs
configure:12606: result: immediate
configure:13154: checking whether stripping libraries is possible
configure:13159: result: yes
configure:13194: checking if libtool supports shared libraries
configure:13196: result: yes
configure:13199: checking whether to build shared libraries
configure:13224: result: yes
configure:13227: checking whether to build static libraries
configure:13231: result: yes
configure:13254: checking how to run the C++ preprocessor
configure:13281: g++ -E conftest.cpp
configure:13281: $? = 0
configure:13295: g++ -E conftest.cpp
conftest.cpp:23:10: fatal error: ac_nonexistent.h: No such file or directory
#include
^~~~~~~~~~~~~~~~~~
compilation terminated.
configure:13295: $? = 1
configure: failed program was:
| /* confdefs.h */
| #define PACKAGE_NAME "npstat"
| #define PACKAGE_TARNAME "npstat"
| #define PACKAGE_VERSION "4.9.0"
| #define PACKAGE_STRING "npstat 4.9.0"
| #define PACKAGE_BUGREPORT ""
| #define PACKAGE_URL ""
| #define PACKAGE "npstat"
| #define VERSION "4.9.0"
| #define STDC_HEADERS 1
| #define HAVE_SYS_TYPES_H 1
| #define HAVE_SYS_STAT_H 1
| #define HAVE_STDLIB_H 1
| #define HAVE_STRING_H 1
| #define HAVE_MEMORY_H 1
| #define HAVE_STRINGS_H 1
| #define HAVE_INTTYPES_H 1
| #define HAVE_STDINT_H 1
| #define HAVE_UNISTD_H 1
| #define HAVE_DLFCN_H 1
| #define LT_OBJDIR ".libs/"
| /* end confdefs.h. */
| #include
configure:13320: result: g++ -E
configure:13340: g++ -E conftest.cpp
configure:13340: $? = 0
configure:13354: g++ -E conftest.cpp
conftest.cpp:23:10: fatal error: ac_nonexistent.h: No such file or directory
#include
^~~~~~~~~~~~~~~~~~
compilation terminated.
configure:13354: $? = 1
configure: failed program was:
| /* confdefs.h */
| #define PACKAGE_NAME "npstat"
| #define PACKAGE_TARNAME "npstat"
| #define PACKAGE_VERSION "4.9.0"
| #define PACKAGE_STRING "npstat 4.9.0"
| #define PACKAGE_BUGREPORT ""
| #define PACKAGE_URL ""
| #define PACKAGE "npstat"
| #define VERSION "4.9.0"
| #define STDC_HEADERS 1
| #define HAVE_SYS_TYPES_H 1
| #define HAVE_SYS_STAT_H 1
| #define HAVE_STDLIB_H 1
| #define HAVE_STRING_H 1
| #define HAVE_MEMORY_H 1
| #define HAVE_STRINGS_H 1
| #define HAVE_INTTYPES_H 1
| #define HAVE_STDINT_H 1
| #define HAVE_UNISTD_H 1
| #define HAVE_DLFCN_H 1
| #define LT_OBJDIR ".libs/"
| /* end confdefs.h. */
| #include
configure:13516: checking for ld used by g++
-configure:13583: result: /usr/bin/x86_64-linux-gnu-ld -m elf_x86_64
-configure:13590: checking if the linker (/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64) is GNU ld
+configure:13583: result: /usr/bin/ld -m elf_x86_64
+configure:13590: checking if the linker (/usr/bin/ld -m elf_x86_64) is GNU ld
configure:13605: result: yes
-configure:13660: checking whether the g++ linker (/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64) supports shared libraries
+configure:13660: checking whether the g++ linker (/usr/bin/ld -m elf_x86_64) supports shared libraries
configure:14733: result: yes
configure:14769: g++ -c -std=c++11 -O3 -Wall -W -Werror conftest.cpp >&5
configure:14772: $? = 0
configure:15253: checking for g++ option to produce PIC
configure:15260: result: -fPIC -DPIC
configure:15268: checking if g++ PIC flag -fPIC -DPIC works
configure:15286: g++ -c -std=c++11 -O3 -Wall -W -Werror -fPIC -DPIC -DPIC conftest.cpp >&5
configure:15290: $? = 0
configure:15303: result: yes
configure:15326: checking if g++ static flag -static works
configure:15354: result: yes
configure:15366: checking if g++ supports -c -o file.o
configure:15387: g++ -c -std=c++11 -O3 -Wall -W -Werror -o out/conftest2.o conftest.cpp >&5
configure:15391: $? = 0
configure:15413: result: yes
configure:15418: checking if g++ supports -c -o file.o
configure:15465: result: yes
-configure:15495: checking whether the g++ linker (/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64) supports shared libraries
+configure:15495: checking whether the g++ linker (/usr/bin/ld -m elf_x86_64) supports shared libraries
configure:15535: result: yes
configure:15676: checking dynamic linker characteristics
configure:16421: result: GNU/Linux ld.so
configure:16486: checking how to hardcode library paths into programs
configure:16511: result: immediate
configure:16652: checking if libtool supports shared libraries
configure:16654: result: yes
configure:16657: checking whether to build shared libraries
configure:16681: result: yes
configure:16684: checking whether to build static libraries
configure:16688: result: yes
configure:17040: checking for g77 option to produce PIC
configure:17047: result: -fPIC
configure:17055: checking if g77 PIC flag -fPIC works
configure:17073: g77 -c -g -O2 -fPIC conftest.f >&5
configure:17077: $? = 0
configure:17090: result: yes
configure:17113: checking if g77 static flag -static works
configure:17141: result: yes
configure:17153: checking if g77 supports -c -o file.o
configure:17174: g77 -c -g -O2 -o out/conftest2.o conftest.f >&5
configure:17178: $? = 0
configure:17200: result: yes
configure:17205: checking if g77 supports -c -o file.o
configure:17252: result: yes
-configure:17282: checking whether the g77 linker (/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64) supports shared libraries
+configure:17282: checking whether the g77 linker (/usr/bin/ld -m elf_x86_64) supports shared libraries
configure:18491: result: yes
configure:18632: checking dynamic linker characteristics
configure:19371: result: GNU/Linux ld.so
configure:19436: checking how to hardcode library paths into programs
configure:19461: result: immediate
configure:19661: checking that generated files are newer than configure
configure:19667: result: done
configure:19694: creating ./config.status
## ---------------------- ##
## Running config.status. ##
## ---------------------- ##
This file was extended by npstat config.status 4.9.0, which was
generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES =
CONFIG_HEADERS =
CONFIG_LINKS =
CONFIG_COMMANDS =
$ ./config.status
on dawn
config.status:1142: creating Makefile
config.status:1142: creating npstat/nm/Makefile
config.status:1142: creating npstat/rng/Makefile
config.status:1142: creating npstat/stat/Makefile
config.status:1142: creating npstat/wrap/Makefile
config.status:1142: creating npstat/interfaces/Makefile
config.status:1142: creating npstat/emsunfold/Makefile
config.status:1142: creating npstat/Makefile
config.status:1142: creating examples/C++/Makefile
config.status:1142: creating npstat.pc
config.status:1314: executing depfiles commands
config.status:1314: executing libtool commands
## ---------------- ##
## Cache variables. ##
## ---------------- ##
ac_cv_build=x86_64-pc-linux-gnu
ac_cv_c_compiler_gnu=yes
ac_cv_cxx_compiler_gnu=yes
ac_cv_env_CCC_set=
ac_cv_env_CCC_value=
ac_cv_env_CC_set=
ac_cv_env_CC_value=
ac_cv_env_CFLAGS_set=
ac_cv_env_CFLAGS_value=
ac_cv_env_CPPFLAGS_set=
ac_cv_env_CPPFLAGS_value=
ac_cv_env_CPP_set=
ac_cv_env_CPP_value=
ac_cv_env_CXXCPP_set=
ac_cv_env_CXXCPP_value=
ac_cv_env_CXXFLAGS_set=set
ac_cv_env_CXXFLAGS_value='-std=c++11 -O3 -Wall -W -Werror'
ac_cv_env_CXX_set=
ac_cv_env_CXX_value=
ac_cv_env_DEPS_CFLAGS_set=
ac_cv_env_DEPS_CFLAGS_value=
ac_cv_env_DEPS_LIBS_set=
ac_cv_env_DEPS_LIBS_value=
ac_cv_env_F77_set=
ac_cv_env_F77_value=
ac_cv_env_FFLAGS_set=
ac_cv_env_FFLAGS_value=
ac_cv_env_LDFLAGS_set=
ac_cv_env_LDFLAGS_value=
ac_cv_env_LIBS_set=
ac_cv_env_LIBS_value=
ac_cv_env_LT_SYS_LIBRARY_PATH_set=
ac_cv_env_LT_SYS_LIBRARY_PATH_value=
ac_cv_env_PKG_CONFIG_LIBDIR_set=
ac_cv_env_PKG_CONFIG_LIBDIR_value=
ac_cv_env_PKG_CONFIG_PATH_set=set
ac_cv_env_PKG_CONFIG_PATH_value=/usr/local/lib/pkgconfig
ac_cv_env_PKG_CONFIG_set=
ac_cv_env_PKG_CONFIG_value=
ac_cv_env_build_alias_set=
ac_cv_env_build_alias_value=
ac_cv_env_host_alias_set=
ac_cv_env_host_alias_value=
ac_cv_env_target_alias_set=
ac_cv_env_target_alias_value=
ac_cv_f77_compiler_gnu=yes
ac_cv_f77_libs=' -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath'
ac_cv_header_dlfcn_h=yes
ac_cv_header_inttypes_h=yes
ac_cv_header_memory_h=yes
ac_cv_header_stdc=yes
ac_cv_header_stdint_h=yes
ac_cv_header_stdlib_h=yes
ac_cv_header_string_h=yes
ac_cv_header_strings_h=yes
ac_cv_header_sys_stat_h=yes
ac_cv_header_sys_types_h=yes
ac_cv_header_unistd_h=yes
ac_cv_host=x86_64-pc-linux-gnu
ac_cv_objext=o
ac_cv_path_EGREP='/bin/grep -E'
ac_cv_path_FGREP='/bin/grep -F'
ac_cv_path_GREP=/bin/grep
ac_cv_path_SED=/bin/sed
ac_cv_path_ac_pt_PKG_CONFIG=/usr/bin/pkg-config
ac_cv_path_install='/usr/bin/install -c'
ac_cv_path_lt_DD=/bin/dd
ac_cv_path_mkdir=/bin/mkdir
ac_cv_prog_AWK=gawk
ac_cv_prog_CPP='gcc -E'
ac_cv_prog_CXXCPP='g++ -E'
ac_cv_prog_ac_ct_AR=ar
ac_cv_prog_ac_ct_CC=gcc
ac_cv_prog_ac_ct_CXX=g++
ac_cv_prog_ac_ct_F77=g77
ac_cv_prog_ac_ct_MANIFEST_TOOL=mt
ac_cv_prog_ac_ct_OBJDUMP=objdump
ac_cv_prog_ac_ct_RANLIB=ranlib
ac_cv_prog_ac_ct_STRIP=strip
ac_cv_prog_cc_c89=
ac_cv_prog_cc_g=yes
ac_cv_prog_cxx_g=yes
ac_cv_prog_f77_g=yes
ac_cv_prog_f77_v=-v
ac_cv_prog_make_make_set=yes
am_cv_CC_dependencies_compiler_type=gcc3
am_cv_CXX_dependencies_compiler_type=gcc3
am_cv_make_support_nested_variables=yes
am_cv_prog_cc_c_o=yes
lt_cv_ar_at_file=@
lt_cv_archive_cmds_need_lc=no
lt_cv_deplibs_check_method=pass_all
lt_cv_file_magic_cmd='$MAGIC_CMD'
lt_cv_file_magic_test_file=
lt_cv_ld_reload_flag=-r
lt_cv_nm_interface='BSD nm'
lt_cv_objdir=.libs
-lt_cv_path_LD=/usr/bin/x86_64-linux-gnu-ld
-lt_cv_path_LDCXX='/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64'
+lt_cv_path_LD=/usr/bin/ld
+lt_cv_path_LDCXX='/usr/bin/ld -m elf_x86_64'
lt_cv_path_NM='/usr/bin/nm -B'
lt_cv_path_mainfest_tool=no
lt_cv_prog_compiler_c_o=yes
lt_cv_prog_compiler_c_o_CXX=yes
lt_cv_prog_compiler_c_o_F77=yes
lt_cv_prog_compiler_pic='-fPIC -DPIC'
lt_cv_prog_compiler_pic_CXX='-fPIC -DPIC'
lt_cv_prog_compiler_pic_F77=-fPIC
lt_cv_prog_compiler_pic_works=yes
lt_cv_prog_compiler_pic_works_CXX=yes
lt_cv_prog_compiler_pic_works_F77=yes
lt_cv_prog_compiler_rtti_exceptions=no
lt_cv_prog_compiler_static_works=yes
lt_cv_prog_compiler_static_works_CXX=yes
lt_cv_prog_compiler_static_works_F77=yes
lt_cv_prog_gnu_ld=yes
lt_cv_prog_gnu_ldcxx=yes
lt_cv_sharedlib_from_linklib_cmd='printf %s\n'
lt_cv_shlibpath_overrides_runpath=yes
lt_cv_sys_global_symbol_pipe='sed -n -e '\''s/^.*[ ]\([ABCDGIRSTW][ABCDGIRSTW]*\)[ ][ ]*\([_A-Za-z][_A-Za-z0-9]*\)$/\1 \2 \2/p'\'' | sed '\''/ __gnu_lto/d'\'''
lt_cv_sys_global_symbol_to_c_name_address='sed -n -e '\''s/^: \(.*\) .*$/ {"\1", (void *) 0},/p'\'' -e '\''s/^[ABCDGIRSTW][ABCDGIRSTW]* .* \(.*\)$/ {"\1", (void *) \&\1},/p'\'''
lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='sed -n -e '\''s/^: \(.*\) .*$/ {"\1", (void *) 0},/p'\'' -e '\''s/^[ABCDGIRSTW][ABCDGIRSTW]* .* \(lib.*\)$/ {"\1", (void *) \&\1},/p'\'' -e '\''s/^[ABCDGIRSTW][ABCDGIRSTW]* .* \(.*\)$/ {"lib\1", (void *) \&\1},/p'\'''
lt_cv_sys_global_symbol_to_cdecl='sed -n -e '\''s/^T .* \(.*\)$/extern int \1();/p'\'' -e '\''s/^[ABCDGIRSTW][ABCDGIRSTW]* .* \(.*\)$/extern char \1;/p'\'''
lt_cv_sys_global_symbol_to_import=
lt_cv_sys_max_cmd_len=1572864
lt_cv_to_host_file_cmd=func_convert_file_noop
lt_cv_to_tool_file_cmd=func_convert_file_noop
lt_cv_truncate_bin='/bin/dd bs=4096 count=1'
pkg_cv_DEPS_CFLAGS=-I/usr/local/include
pkg_cv_DEPS_LIBS='-L/usr/local/lib -lfftw3 -lgeners -lkstest'
## ----------------- ##
## Output variables. ##
## ----------------- ##
ACLOCAL='${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15'
AMDEPBACKSLASH='\'
AMDEP_FALSE='#'
AMDEP_TRUE=''
AMTAR='$${TAR-tar}'
AM_BACKSLASH='\'
AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)'
AM_DEFAULT_VERBOSITY='1'
AM_V='$(V)'
AR='ar'
AUTOCONF='${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf'
AUTOHEADER='${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader'
AUTOMAKE='${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15'
AWK='gawk'
CC='gcc'
CCDEPMODE='depmode=gcc3'
CFLAGS='-g -O2'
CPP='gcc -E'
CPPFLAGS=''
CXX='g++'
CXXCPP='g++ -E'
CXXDEPMODE='depmode=gcc3'
CXXFLAGS='-std=c++11 -O3 -Wall -W -Werror'
CYGPATH_W='echo'
DEFS='-DPACKAGE_NAME=\"npstat\" -DPACKAGE_TARNAME=\"npstat\" -DPACKAGE_VERSION=\"4.9.0\" -DPACKAGE_STRING=\"npstat\ 4.9.0\" -DPACKAGE_BUGREPORT=\"\" -DPACKAGE_URL=\"\" -DPACKAGE=\"npstat\" -DVERSION=\"4.9.0\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1 -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\".libs/\"'
DEPDIR='.deps'
DEPS_CFLAGS='-I/usr/local/include'
DEPS_LIBS='-L/usr/local/lib -lfftw3 -lgeners -lkstest'
DLLTOOL='false'
DSYMUTIL=''
DUMPBIN=''
ECHO_C=''
ECHO_N='-n'
ECHO_T=''
EGREP='/bin/grep -E'
EXEEXT=''
F77='g77'
FFLAGS='-g -O2'
FGREP='/bin/grep -F'
FLIBS=' -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmath'
GREP='/bin/grep'
INSTALL_DATA='${INSTALL} -m 644'
INSTALL_PROGRAM='${INSTALL}'
INSTALL_SCRIPT='${INSTALL}'
INSTALL_STRIP_PROGRAM='$(install_sh) -c -s'
-LD='/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64'
+LD='/usr/bin/ld -m elf_x86_64'
LDFLAGS=''
LIBOBJS=''
LIBS=''
LIBTOOL='$(SHELL) $(top_builddir)/libtool'
LIPO=''
LN_S='ln -s'
LTLIBOBJS=''
LT_SYS_LIBRARY_PATH=''
MAKEINFO='${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo'
MANIFEST_TOOL=':'
MKDIR_P='/bin/mkdir -p'
NM='/usr/bin/nm -B'
NMEDIT=''
OBJDUMP='objdump'
OBJEXT='o'
OTOOL64=''
OTOOL=''
PACKAGE='npstat'
PACKAGE_BUGREPORT=''
PACKAGE_NAME='npstat'
PACKAGE_STRING='npstat 4.9.0'
PACKAGE_TARNAME='npstat'
PACKAGE_URL=''
PACKAGE_VERSION='4.9.0'
PATH_SEPARATOR=':'
PKG_CONFIG='/usr/bin/pkg-config'
PKG_CONFIG_LIBDIR=''
PKG_CONFIG_PATH='/usr/local/lib/pkgconfig'
RANLIB='ranlib'
SED='/bin/sed'
SET_MAKE=''
SHELL='/bin/bash'
STRIP='strip'
VERSION='4.9.0'
ac_ct_AR='ar'
ac_ct_CC='gcc'
ac_ct_CXX='g++'
ac_ct_DUMPBIN=''
ac_ct_F77='g77'
am__EXEEXT_FALSE=''
am__EXEEXT_TRUE='#'
am__fastdepCC_FALSE='#'
am__fastdepCC_TRUE=''
am__fastdepCXX_FALSE='#'
am__fastdepCXX_TRUE=''
am__include='include'
am__isrc=''
am__leading_dot='.'
am__nodep='_no'
am__quote=''
am__tar='$${TAR-tar} chof - "$$tardir"'
am__untar='$${TAR-tar} xf -'
bindir='${exec_prefix}/bin'
build='x86_64-pc-linux-gnu'
build_alias=''
build_cpu='x86_64'
build_os='linux-gnu'
build_vendor='pc'
datadir='${datarootdir}'
datarootdir='${prefix}/share'
docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
dvidir='${docdir}'
exec_prefix='${prefix}'
host='x86_64-pc-linux-gnu'
host_alias=''
host_cpu='x86_64'
host_os='linux-gnu'
host_vendor='pc'
htmldir='${docdir}'
includedir='${prefix}/include'
infodir='${datarootdir}/info'
install_sh='${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh'
libdir='${exec_prefix}/lib'
libexecdir='${exec_prefix}/libexec'
localedir='${datarootdir}/locale'
localstatedir='${prefix}/var'
mandir='${datarootdir}/man'
mkdir_p='$(MKDIR_P)'
oldincludedir='/usr/include'
pdfdir='${docdir}'
prefix='/usr/local'
program_transform_name='s,x,x,'
psdir='${docdir}'
runstatedir='${localstatedir}/run'
sbindir='${exec_prefix}/sbin'
sharedstatedir='${prefix}/com'
sysconfdir='${prefix}/etc'
target_alias=''
## ----------- ##
## confdefs.h. ##
## ----------- ##
/* confdefs.h */
#define PACKAGE_NAME "npstat"
#define PACKAGE_TARNAME "npstat"
#define PACKAGE_VERSION "4.9.0"
#define PACKAGE_STRING "npstat 4.9.0"
#define PACKAGE_BUGREPORT ""
#define PACKAGE_URL ""
#define PACKAGE "npstat"
#define VERSION "4.9.0"
#define STDC_HEADERS 1
#define HAVE_SYS_TYPES_H 1
#define HAVE_SYS_STAT_H 1
#define HAVE_STDLIB_H 1
#define HAVE_STRING_H 1
#define HAVE_MEMORY_H 1
#define HAVE_STRINGS_H 1
#define HAVE_INTTYPES_H 1
#define HAVE_STDINT_H 1
#define HAVE_UNISTD_H 1
#define HAVE_DLFCN_H 1
#define LT_OBJDIR ".libs/"
configure: exit 0
Index: trunk/config.status
===================================================================
--- trunk/config.status (revision 579)
+++ trunk/config.status (revision 580)
@@ -1,2282 +1,2282 @@
#! /bin/bash
# Generated by configure.
# Run this file to recreate the current configuration.
# Compiler output produced by configure, useful for debugging
# configure, is in config.log if it exists.
debug=false
ac_cs_recheck=false
ac_cs_silent=false
SHELL=${CONFIG_SHELL-/bin/bash}
export SHELL
## -------------------- ##
## M4sh Initialization. ##
## -------------------- ##
# Be more Bourne compatible
DUALCASE=1; export DUALCASE # for MKS sh
if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
emulate sh
NULLCMD=:
# Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
# is contrary to our usage. Disable this feature.
alias -g '${1+"$@"}'='"$@"'
setopt NO_GLOB_SUBST
else
case `(set -o) 2>/dev/null` in #(
*posix*) :
set -o posix ;; #(
*) :
;;
esac
fi
as_nl='
'
export as_nl
# Printing a long string crashes Solaris 7 /usr/bin/printf.
as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
# Prefer a ksh shell builtin over an external printf program on Solaris,
# but without wasting forks for bash or zsh.
if test -z "$BASH_VERSION$ZSH_VERSION" \
&& (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
as_echo='print -r --'
as_echo_n='print -rn --'
elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
as_echo='printf %s\n'
as_echo_n='printf %s'
else
if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
as_echo_n='/usr/ucb/echo -n'
else
as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
as_echo_n_body='eval
arg=$1;
case $arg in #(
*"$as_nl"*)
expr "X$arg" : "X\\(.*\\)$as_nl";
arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
esac;
expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
'
export as_echo_n_body
as_echo_n='sh -c $as_echo_n_body as_echo'
fi
export as_echo_body
as_echo='sh -c $as_echo_body as_echo'
fi
# The user is always right.
if test "${PATH_SEPARATOR+set}" != set; then
PATH_SEPARATOR=:
(PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
(PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
PATH_SEPARATOR=';'
}
fi
# IFS
# We need space, tab and new line, in precisely that order. Quoting is
# there to prevent editors from complaining about space-tab.
# (If _AS_PATH_WALK were called with IFS unset, it would disable word
# splitting by setting IFS to empty value.)
IFS=" "" $as_nl"
# Find who we are. Look in the path if we contain no directory separator.
as_myself=
case $0 in #((
*[\\/]* ) as_myself=$0 ;;
*) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
for as_dir in $PATH
do
IFS=$as_save_IFS
test -z "$as_dir" && as_dir=.
test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
done
IFS=$as_save_IFS
;;
esac
# We did not find ourselves, most probably we were run as `sh COMMAND'
# in which case we are not to be found in the path.
if test "x$as_myself" = x; then
as_myself=$0
fi
if test ! -f "$as_myself"; then
$as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
exit 1
fi
# Unset variables that we do not need and which cause bugs (e.g. in
# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1"
# suppresses any "Segmentation fault" message there. '((' could
# trigger a bug in pdksh 5.2.14.
for as_var in BASH_ENV ENV MAIL MAILPATH
do eval test x\${$as_var+set} = xset \
&& ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
done
PS1='$ '
PS2='> '
PS4='+ '
# NLS nuisances.
LC_ALL=C
export LC_ALL
LANGUAGE=C
export LANGUAGE
# CDPATH.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
# as_fn_error STATUS ERROR [LINENO LOG_FD]
# ----------------------------------------
# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
# script with STATUS, using 1 if that was 0.
as_fn_error ()
{
as_status=$1; test $as_status -eq 0 && as_status=1
if test "$4"; then
as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
$as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
fi
$as_echo "$as_me: error: $2" >&2
as_fn_exit $as_status
} # as_fn_error
# as_fn_set_status STATUS
# -----------------------
# Set $? to STATUS, without forking.
as_fn_set_status ()
{
return $1
} # as_fn_set_status
# as_fn_exit STATUS
# -----------------
# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
as_fn_exit ()
{
set +e
as_fn_set_status $1
exit $1
} # as_fn_exit
# as_fn_unset VAR
# ---------------
# Portably unset VAR.
as_fn_unset ()
{
{ eval $1=; unset $1;}
}
as_unset=as_fn_unset
# as_fn_append VAR VALUE
# ----------------------
# Append the text in VALUE to the end of the definition contained in VAR. Take
# advantage of any shell optimizations that allow amortized linear growth over
# repeated appends, instead of the typical quadratic growth present in naive
# implementations.
if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
eval 'as_fn_append ()
{
eval $1+=\$2
}'
else
as_fn_append ()
{
eval $1=\$$1\$2
}
fi # as_fn_append
# as_fn_arith ARG...
# ------------------
# Perform arithmetic evaluation on the ARGs, and store the result in the
# global $as_val. Take advantage of shells that can avoid forks. The arguments
# must be portable across $(()) and expr.
if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
eval 'as_fn_arith ()
{
as_val=$(( $* ))
}'
else
as_fn_arith ()
{
as_val=`expr "$@" || test $? -eq 1`
}
fi # as_fn_arith
if expr a : '\(a\)' >/dev/null 2>&1 &&
test "X`expr 00001 : '.*\(...\)'`" = X001; then
as_expr=expr
else
as_expr=false
fi
if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
as_basename=basename
else
as_basename=false
fi
if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
as_dirname=dirname
else
as_dirname=false
fi
as_me=`$as_basename -- "$0" ||
$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
X"$0" : 'X\(//\)$' \| \
X"$0" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X/"$0" |
sed '/^.*\/\([^/][^/]*\)\/*$/{
s//\1/
q
}
/^X\/\(\/\/\)$/{
s//\1/
q
}
/^X\/\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
# Avoid depending upon Character Ranges.
as_cr_letters='abcdefghijklmnopqrstuvwxyz'
as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
as_cr_Letters=$as_cr_letters$as_cr_LETTERS
as_cr_digits='0123456789'
as_cr_alnum=$as_cr_Letters$as_cr_digits
ECHO_C= ECHO_N= ECHO_T=
case `echo -n x` in #(((((
-n*)
case `echo 'xy\c'` in
*c*) ECHO_T=' ';; # ECHO_T is single tab character.
xy) ECHO_C='\c';;
*) echo `echo ksh88 bug on AIX 6.1` > /dev/null
ECHO_T=' ';;
esac;;
*)
ECHO_N='-n';;
esac
rm -f conf$$ conf$$.exe conf$$.file
if test -d conf$$.dir; then
rm -f conf$$.dir/conf$$.file
else
rm -f conf$$.dir
mkdir conf$$.dir 2>/dev/null
fi
if (echo >conf$$.file) 2>/dev/null; then
if ln -s conf$$.file conf$$ 2>/dev/null; then
as_ln_s='ln -s'
# ... but there are two gotchas:
# 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
# 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
# In both cases, we have to default to `cp -pR'.
ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
as_ln_s='cp -pR'
elif ln conf$$.file conf$$ 2>/dev/null; then
as_ln_s=ln
else
as_ln_s='cp -pR'
fi
else
as_ln_s='cp -pR'
fi
rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
rmdir conf$$.dir 2>/dev/null
# as_fn_mkdir_p
# -------------
# Create "$as_dir" as a directory, including parents if necessary.
as_fn_mkdir_p ()
{
case $as_dir in #(
-*) as_dir=./$as_dir;;
esac
test -d "$as_dir" || eval $as_mkdir_p || {
as_dirs=
while :; do
case $as_dir in #(
*\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
*) as_qdir=$as_dir;;
esac
as_dirs="'$as_qdir' $as_dirs"
as_dir=`$as_dirname -- "$as_dir" ||
$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$as_dir" : 'X\(//\)[^/]' \| \
X"$as_dir" : 'X\(//\)$' \| \
X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$as_dir" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
test -d "$as_dir" && break
done
test -z "$as_dirs" || eval "mkdir $as_dirs"
} || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
} # as_fn_mkdir_p
if mkdir -p . 2>/dev/null; then
as_mkdir_p='mkdir -p "$as_dir"'
else
test -d ./-p && rmdir ./-p
as_mkdir_p=false
fi
# as_fn_executable_p FILE
# -----------------------
# Test if FILE is an executable regular file.
as_fn_executable_p ()
{
test -f "$1" && test -x "$1"
} # as_fn_executable_p
as_test_x='test -x'
as_executable_p=as_fn_executable_p
# Sed expression to map a string onto a valid CPP name.
as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
# Sed expression to map a string onto a valid variable name.
as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
exec 6>&1
## ----------------------------------- ##
## Main body of $CONFIG_STATUS script. ##
## ----------------------------------- ##
# Save the log message, to keep $0 and so on meaningful, and to
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
This file was extended by npstat $as_me 4.9.0, which was
generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
CONFIG_HEADERS = $CONFIG_HEADERS
CONFIG_LINKS = $CONFIG_LINKS
CONFIG_COMMANDS = $CONFIG_COMMANDS
$ $0 $@
on `(hostname || uname -n) 2>/dev/null | sed 1q`
"
# Files that config.status was made for.
config_files=" Makefile npstat/nm/Makefile npstat/rng/Makefile npstat/stat/Makefile npstat/wrap/Makefile npstat/interfaces/Makefile npstat/emsunfold/Makefile npstat/Makefile examples/C++/Makefile npstat.pc"
config_commands=" depfiles libtool"
ac_cs_usage="\
\`$as_me' instantiates files and other configuration actions
from templates according to the current configuration. Unless the files
and actions are specified as TAGs, all are instantiated by default.
Usage: $0 [OPTION]... [TAG]...
-h, --help print this help, then exit
-V, --version print version number and configuration settings, then exit
--config print configuration, then exit
-q, --quiet, --silent
do not print progress messages
-d, --debug don't remove temporary files
--recheck update $as_me by reconfiguring in the same conditions
--file=FILE[:TEMPLATE]
instantiate the configuration file FILE
Configuration files:
$config_files
Configuration commands:
$config_commands
Report bugs to the package provider."
ac_cs_config="'--with-pic' 'PKG_CONFIG_PATH=/usr/local/lib/pkgconfig' 'CXXFLAGS=-std=c++11 -O3 -Wall -W -Werror'"
ac_cs_version="\
npstat config.status 4.9.0
configured by ./configure, generated by GNU Autoconf 2.69,
with options \"$ac_cs_config\"
Copyright (C) 2012 Free Software Foundation, Inc.
This config.status script is free software; the Free Software Foundation
gives unlimited permission to copy, distribute and modify it."
ac_pwd='/home/igv/Hepforge/npstat/trunk'
srcdir='.'
INSTALL='/usr/bin/install -c'
MKDIR_P='/bin/mkdir -p'
AWK='gawk'
test -n "$AWK" || AWK=awk
# The default lists apply if the user does not specify any file.
ac_need_defaults=:
while test $# != 0
do
case $1 in
--*=?*)
ac_option=`expr "X$1" : 'X\([^=]*\)='`
ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
ac_shift=:
;;
--*=)
ac_option=`expr "X$1" : 'X\([^=]*\)='`
ac_optarg=
ac_shift=:
;;
*)
ac_option=$1
ac_optarg=$2
ac_shift=shift
;;
esac
case $ac_option in
# Handling of the options.
-recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
ac_cs_recheck=: ;;
--version | --versio | --versi | --vers | --ver | --ve | --v | -V )
$as_echo "$ac_cs_version"; exit ;;
--config | --confi | --conf | --con | --co | --c )
$as_echo "$ac_cs_config"; exit ;;
--debug | --debu | --deb | --de | --d | -d )
debug=: ;;
--file | --fil | --fi | --f )
$ac_shift
case $ac_optarg in
*\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
'') as_fn_error $? "missing file argument" ;;
esac
as_fn_append CONFIG_FILES " '$ac_optarg'"
ac_need_defaults=false;;
--he | --h | --help | --hel | -h )
$as_echo "$ac_cs_usage"; exit ;;
-q | -quiet | --quiet | --quie | --qui | --qu | --q \
| -silent | --silent | --silen | --sile | --sil | --si | --s)
ac_cs_silent=: ;;
# This is an error.
-*) as_fn_error $? "unrecognized option: \`$1'
Try \`$0 --help' for more information." ;;
*) as_fn_append ac_config_targets " $1"
ac_need_defaults=false ;;
esac
shift
done
ac_configure_extra_args=
if $ac_cs_silent; then
exec 6>/dev/null
ac_configure_extra_args="$ac_configure_extra_args --silent"
fi
if $ac_cs_recheck; then
set X /bin/bash './configure' '--with-pic' 'PKG_CONFIG_PATH=/usr/local/lib/pkgconfig' 'CXXFLAGS=-std=c++11 -O3 -Wall -W -Werror' $ac_configure_extra_args --no-create --no-recursion
shift
$as_echo "running CONFIG_SHELL=/bin/bash $*" >&6
CONFIG_SHELL='/bin/bash'
export CONFIG_SHELL
exec "$@"
fi
exec 5>>config.log
{
echo
sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
## Running $as_me. ##
_ASBOX
$as_echo "$ac_log"
} >&5
#
# INIT-COMMANDS
#
AMDEP_TRUE="" ac_aux_dir="."
# The HP-UX ksh and POSIX shell print the target directory to stdout
# if CDPATH is set.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
sed_quote_subst='s/\(["`$\\]\)/\\\1/g'
double_quote_subst='s/\(["`\\]\)/\\\1/g'
delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
macro_version='2.4.6'
macro_revision='2.4.6'
enable_shared='yes'
enable_static='yes'
pic_mode='yes'
enable_fast_install='needless'
shared_archive_member_spec=''
SHELL='/bin/bash'
ECHO='printf %s\n'
PATH_SEPARATOR=':'
host_alias=''
host='x86_64-pc-linux-gnu'
host_os='linux-gnu'
build_alias=''
build='x86_64-pc-linux-gnu'
build_os='linux-gnu'
SED='/bin/sed'
Xsed='/bin/sed -e 1s/^X//'
GREP='/bin/grep'
EGREP='/bin/grep -E'
FGREP='/bin/grep -F'
-LD='/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64'
+LD='/usr/bin/ld -m elf_x86_64'
NM='/usr/bin/nm -B'
LN_S='ln -s'
max_cmd_len='1572864'
ac_objext='o'
exeext=''
lt_unset='unset'
lt_SP2NL='tr \040 \012'
lt_NL2SP='tr \015\012 \040\040'
lt_cv_to_host_file_cmd='func_convert_file_noop'
lt_cv_to_tool_file_cmd='func_convert_file_noop'
reload_flag=' -r'
reload_cmds='$LD$reload_flag -o $output$reload_objs'
OBJDUMP='objdump'
deplibs_check_method='pass_all'
file_magic_cmd='$MAGIC_CMD'
file_magic_glob=''
want_nocaseglob='no'
DLLTOOL='false'
sharedlib_from_linklib_cmd='printf %s\n'
AR='ar'
AR_FLAGS='cru'
archiver_list_spec='@'
STRIP='strip'
RANLIB='ranlib'
old_postinstall_cmds='chmod 644 $oldlib~$RANLIB $tool_oldlib'
old_postuninstall_cmds=''
old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs~$RANLIB $tool_oldlib'
lock_old_archive_extraction='no'
CC='gcc'
CFLAGS='-g -O2'
compiler='g77'
GCC='yes'
lt_cv_sys_global_symbol_pipe='sed -n -e '\''s/^.*[ ]\([ABCDGIRSTW][ABCDGIRSTW]*\)[ ][ ]*\([_A-Za-z][_A-Za-z0-9]*\)$/\1 \2 \2/p'\'' | sed '\''/ __gnu_lto/d'\'''
lt_cv_sys_global_symbol_to_cdecl='sed -n -e '\''s/^T .* \(.*\)$/extern int \1();/p'\'' -e '\''s/^[ABCDGIRSTW][ABCDGIRSTW]* .* \(.*\)$/extern char \1;/p'\'''
lt_cv_sys_global_symbol_to_import=''
lt_cv_sys_global_symbol_to_c_name_address='sed -n -e '\''s/^: \(.*\) .*$/ {"\1", (void *) 0},/p'\'' -e '\''s/^[ABCDGIRSTW][ABCDGIRSTW]* .* \(.*\)$/ {"\1", (void *) \&\1},/p'\'''
lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='sed -n -e '\''s/^: \(.*\) .*$/ {"\1", (void *) 0},/p'\'' -e '\''s/^[ABCDGIRSTW][ABCDGIRSTW]* .* \(lib.*\)$/ {"\1", (void *) \&\1},/p'\'' -e '\''s/^[ABCDGIRSTW][ABCDGIRSTW]* .* \(.*\)$/ {"lib\1", (void *) \&\1},/p'\'''
lt_cv_nm_interface='BSD nm'
nm_file_list_spec='@'
lt_sysroot=''
lt_cv_truncate_bin='/bin/dd bs=4096 count=1'
objdir='.libs'
MAGIC_CMD='file'
lt_prog_compiler_no_builtin_flag=' -fno-builtin'
lt_prog_compiler_pic=' -fPIC -DPIC'
lt_prog_compiler_wl='-Wl,'
lt_prog_compiler_static='-static'
lt_cv_prog_compiler_c_o='yes'
need_locks='no'
MANIFEST_TOOL=':'
DSYMUTIL=''
NMEDIT=''
LIPO=''
OTOOL=''
OTOOL64=''
libext='a'
shrext_cmds='.so'
extract_expsyms_cmds=''
archive_cmds_need_lc='no'
enable_shared_with_static_runtimes='no'
export_dynamic_flag_spec='$wl--export-dynamic'
whole_archive_flag_spec='$wl--whole-archive$convenience $wl--no-whole-archive'
compiler_needs_object='no'
old_archive_from_new_cmds=''
old_archive_from_expsyms_cmds=''
archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
echo "local: *; };" >> $output_objdir/$libname.ver~
$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib'
module_cmds=''
module_expsym_cmds=''
with_gnu_ld='yes'
allow_undefined_flag=''
no_undefined_flag=''
hardcode_libdir_flag_spec='$wl-rpath $wl$libdir'
hardcode_libdir_separator=''
hardcode_direct='no'
hardcode_direct_absolute='no'
hardcode_minus_L='no'
hardcode_shlibpath_var='unsupported'
hardcode_automatic='no'
inherit_rpath='no'
link_all_deplibs='unknown'
always_export_symbols='no'
export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
include_expsyms=''
prelink_cmds=''
postlink_cmds=''
file_list_spec=''
variables_saved_for_relink='PATH LD_LIBRARY_PATH LD_RUN_PATH GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH'
need_lib_prefix='no'
need_version='no'
version_type='linux'
runpath_var='LD_RUN_PATH'
shlibpath_var='LD_LIBRARY_PATH'
shlibpath_overrides_runpath='yes'
libname_spec='lib$name'
library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
soname_spec='$libname$release$shared_ext$major'
install_override_mode=''
postinstall_cmds=''
postuninstall_cmds=''
finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
finish_eval=''
hardcode_into_libs='yes'
sys_lib_search_path_spec='/usr/lib/gcc/x86_64-linux-gnu/7 /usr/lib/x86_64-linux-gnu /usr/lib /lib/x86_64-linux-gnu /lib '
configure_time_dlsearch_path='/lib /usr/lib /usr/lib/x86_64-linux-gnu/libfakeroot /usr/local/lib /usr/local/lib/x86_64-linux-gnu /lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu '
configure_time_lt_sys_library_path=''
hardcode_action='immediate'
enable_dlopen='unknown'
enable_dlopen_self='unknown'
enable_dlopen_self_static='unknown'
old_striplib='strip --strip-debug'
striplib='strip --strip-unneeded'
compiler_lib_search_dirs=''
predep_objects=''
postdep_objects=''
predeps=''
postdeps=''
compiler_lib_search_path=''
-LD_CXX='/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64'
-LD_F77='/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64'
+LD_CXX='/usr/bin/ld -m elf_x86_64'
+LD_F77='/usr/bin/ld -m elf_x86_64'
reload_flag_CXX=' -r'
reload_flag_F77=' -r'
reload_cmds_CXX='$LD$reload_flag -o $output$reload_objs'
reload_cmds_F77='$LD$reload_flag -o $output$reload_objs'
old_archive_cmds_CXX='$AR $AR_FLAGS $oldlib$oldobjs~$RANLIB $tool_oldlib'
old_archive_cmds_F77='$AR $AR_FLAGS $oldlib$oldobjs~$RANLIB $tool_oldlib'
compiler_CXX='g++'
compiler_F77='g77'
GCC_CXX='yes'
GCC_F77='yes'
lt_prog_compiler_no_builtin_flag_CXX=' -fno-builtin'
lt_prog_compiler_no_builtin_flag_F77=''
lt_prog_compiler_pic_CXX=' -fPIC -DPIC'
lt_prog_compiler_pic_F77=' -fPIC'
lt_prog_compiler_wl_CXX='-Wl,'
lt_prog_compiler_wl_F77='-Wl,'
lt_prog_compiler_static_CXX='-static'
lt_prog_compiler_static_F77='-static'
lt_cv_prog_compiler_c_o_CXX='yes'
lt_cv_prog_compiler_c_o_F77='yes'
archive_cmds_need_lc_CXX='no'
archive_cmds_need_lc_F77='no'
enable_shared_with_static_runtimes_CXX='no'
enable_shared_with_static_runtimes_F77='no'
export_dynamic_flag_spec_CXX='$wl--export-dynamic'
export_dynamic_flag_spec_F77='$wl--export-dynamic'
whole_archive_flag_spec_CXX='$wl--whole-archive$convenience $wl--no-whole-archive'
whole_archive_flag_spec_F77='$wl--whole-archive$convenience $wl--no-whole-archive'
compiler_needs_object_CXX='no'
compiler_needs_object_F77='no'
old_archive_from_new_cmds_CXX=''
old_archive_from_new_cmds_F77=''
old_archive_from_expsyms_cmds_CXX=''
old_archive_from_expsyms_cmds_F77=''
archive_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib'
archive_cmds_F77='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib'
archive_expsym_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib'
archive_expsym_cmds_F77='echo "{ global:" > $output_objdir/$libname.ver~
cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
echo "local: *; };" >> $output_objdir/$libname.ver~
$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib'
module_cmds_CXX=''
module_cmds_F77=''
module_expsym_cmds_CXX=''
module_expsym_cmds_F77=''
with_gnu_ld_CXX='yes'
with_gnu_ld_F77='yes'
allow_undefined_flag_CXX=''
allow_undefined_flag_F77=''
no_undefined_flag_CXX=''
no_undefined_flag_F77=''
hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir'
hardcode_libdir_flag_spec_F77='$wl-rpath $wl$libdir'
hardcode_libdir_separator_CXX=''
hardcode_libdir_separator_F77=''
hardcode_direct_CXX='no'
hardcode_direct_F77='no'
hardcode_direct_absolute_CXX='no'
hardcode_direct_absolute_F77='no'
hardcode_minus_L_CXX='no'
hardcode_minus_L_F77='no'
hardcode_shlibpath_var_CXX='unsupported'
hardcode_shlibpath_var_F77='unsupported'
hardcode_automatic_CXX='no'
hardcode_automatic_F77='no'
inherit_rpath_CXX='no'
inherit_rpath_F77='no'
link_all_deplibs_CXX='unknown'
link_all_deplibs_F77='unknown'
always_export_symbols_CXX='no'
always_export_symbols_F77='no'
export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
export_symbols_cmds_F77='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
exclude_expsyms_F77='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
include_expsyms_CXX=''
include_expsyms_F77=''
prelink_cmds_CXX=''
prelink_cmds_F77=''
postlink_cmds_CXX=''
postlink_cmds_F77=''
file_list_spec_CXX=''
file_list_spec_F77=''
hardcode_action_CXX='immediate'
hardcode_action_F77='immediate'
compiler_lib_search_dirs_CXX='/usr/lib/gcc/x86_64-linux-gnu/7 /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu /usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib /lib/x86_64-linux-gnu /lib/../lib /usr/lib/x86_64-linux-gnu /usr/lib/../lib /usr/lib/gcc/x86_64-linux-gnu/7/../../..'
compiler_lib_search_dirs_F77=''
predep_objects_CXX='/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crti.o /usr/lib/gcc/x86_64-linux-gnu/7/crtbeginS.o'
predep_objects_F77=''
postdep_objects_CXX='/usr/lib/gcc/x86_64-linux-gnu/7/crtendS.o /usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu/crtn.o'
postdep_objects_F77=''
predeps_CXX=''
predeps_F77=''
postdeps_CXX='-lstdc++ -lm -lgcc_s -lc -lgcc_s'
postdeps_F77=''
compiler_lib_search_path_CXX='-L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../..'
compiler_lib_search_path_F77=''
LTCC='gcc'
LTCFLAGS='-g -O2'
compiler='gcc'
# A function that is used when there is no print builtin or printf.
func_fallback_echo ()
{
eval 'cat <<_LTECHO_EOF
$1
_LTECHO_EOF'
}
# Quote evaled strings.
for var in SHELL ECHO PATH_SEPARATOR SED GREP EGREP FGREP LD NM LN_S lt_SP2NL lt_NL2SP reload_flag OBJDUMP deplibs_check_method file_magic_cmd file_magic_glob want_nocaseglob DLLTOOL sharedlib_from_linklib_cmd AR AR_FLAGS archiver_list_spec STRIP RANLIB CC CFLAGS compiler lt_cv_sys_global_symbol_pipe lt_cv_sys_global_symbol_to_cdecl lt_cv_sys_global_symbol_to_import lt_cv_sys_global_symbol_to_c_name_address lt_cv_sys_global_symbol_to_c_name_address_lib_prefix lt_cv_nm_interface nm_file_list_spec lt_cv_truncate_bin lt_prog_compiler_no_builtin_flag lt_prog_compiler_pic lt_prog_compiler_wl lt_prog_compiler_static lt_cv_prog_compiler_c_o need_locks MANIFEST_TOOL DSYMUTIL NMEDIT LIPO OTOOL OTOOL64 shrext_cmds export_dynamic_flag_spec whole_archive_flag_spec compiler_needs_object with_gnu_ld allow_undefined_flag no_undefined_flag hardcode_libdir_flag_spec hardcode_libdir_separator exclude_expsyms include_expsyms file_list_spec variables_saved_for_relink libname_spec library_names_spec soname_spec install_override_mode finish_eval old_striplib striplib compiler_lib_search_dirs predep_objects postdep_objects predeps postdeps compiler_lib_search_path LD_CXX LD_F77 reload_flag_CXX reload_flag_F77 compiler_CXX compiler_F77 lt_prog_compiler_no_builtin_flag_CXX lt_prog_compiler_no_builtin_flag_F77 lt_prog_compiler_pic_CXX lt_prog_compiler_pic_F77 lt_prog_compiler_wl_CXX lt_prog_compiler_wl_F77 lt_prog_compiler_static_CXX lt_prog_compiler_static_F77 lt_cv_prog_compiler_c_o_CXX lt_cv_prog_compiler_c_o_F77 export_dynamic_flag_spec_CXX export_dynamic_flag_spec_F77 whole_archive_flag_spec_CXX whole_archive_flag_spec_F77 compiler_needs_object_CXX compiler_needs_object_F77 with_gnu_ld_CXX with_gnu_ld_F77 allow_undefined_flag_CXX allow_undefined_flag_F77 no_undefined_flag_CXX no_undefined_flag_F77 hardcode_libdir_flag_spec_CXX hardcode_libdir_flag_spec_F77 hardcode_libdir_separator_CXX hardcode_libdir_separator_F77 exclude_expsyms_CXX exclude_expsyms_F77 include_expsyms_CXX include_expsyms_F77 file_list_spec_CXX file_list_spec_F77 compiler_lib_search_dirs_CXX compiler_lib_search_dirs_F77 predep_objects_CXX predep_objects_F77 postdep_objects_CXX postdep_objects_F77 predeps_CXX predeps_F77 postdeps_CXX postdeps_F77 compiler_lib_search_path_CXX compiler_lib_search_path_F77; do
case `eval \\$ECHO \\""\\$$var"\\"` in
*[\\\`\"\$]*)
eval "lt_$var=\\\"\`\$ECHO \"\$$var\" | \$SED \"\$sed_quote_subst\"\`\\\"" ## exclude from sc_prohibit_nested_quotes
;;
*)
eval "lt_$var=\\\"\$$var\\\""
;;
esac
done
# Double-quote double-evaled strings.
for var in reload_cmds old_postinstall_cmds old_postuninstall_cmds old_archive_cmds extract_expsyms_cmds old_archive_from_new_cmds old_archive_from_expsyms_cmds archive_cmds archive_expsym_cmds module_cmds module_expsym_cmds export_symbols_cmds prelink_cmds postlink_cmds postinstall_cmds postuninstall_cmds finish_cmds sys_lib_search_path_spec configure_time_dlsearch_path configure_time_lt_sys_library_path reload_cmds_CXX reload_cmds_F77 old_archive_cmds_CXX old_archive_cmds_F77 old_archive_from_new_cmds_CXX old_archive_from_new_cmds_F77 old_archive_from_expsyms_cmds_CXX old_archive_from_expsyms_cmds_F77 archive_cmds_CXX archive_cmds_F77 archive_expsym_cmds_CXX archive_expsym_cmds_F77 module_cmds_CXX module_cmds_F77 module_expsym_cmds_CXX module_expsym_cmds_F77 export_symbols_cmds_CXX export_symbols_cmds_F77 prelink_cmds_CXX prelink_cmds_F77 postlink_cmds_CXX postlink_cmds_F77; do
case `eval \\$ECHO \\""\\$$var"\\"` in
*[\\\`\"\$]*)
eval "lt_$var=\\\"\`\$ECHO \"\$$var\" | \$SED -e \"\$double_quote_subst\" -e \"\$sed_quote_subst\" -e \"\$delay_variable_subst\"\`\\\"" ## exclude from sc_prohibit_nested_quotes
;;
*)
eval "lt_$var=\\\"\$$var\\\""
;;
esac
done
ac_aux_dir='.'
# See if we are running on zsh, and set the options that allow our
# commands through without removal of \ escapes INIT.
if test -n "${ZSH_VERSION+set}"; then
setopt NO_GLOB_SUBST
fi
PACKAGE='npstat'
VERSION='4.9.0'
RM='rm -f'
ofile='libtool'
# Handling of arguments.
for ac_config_target in $ac_config_targets
do
case $ac_config_target in
"depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;;
"libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;;
"Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;;
"npstat/nm/Makefile") CONFIG_FILES="$CONFIG_FILES npstat/nm/Makefile" ;;
"npstat/rng/Makefile") CONFIG_FILES="$CONFIG_FILES npstat/rng/Makefile" ;;
"npstat/stat/Makefile") CONFIG_FILES="$CONFIG_FILES npstat/stat/Makefile" ;;
"npstat/wrap/Makefile") CONFIG_FILES="$CONFIG_FILES npstat/wrap/Makefile" ;;
"npstat/interfaces/Makefile") CONFIG_FILES="$CONFIG_FILES npstat/interfaces/Makefile" ;;
"npstat/emsunfold/Makefile") CONFIG_FILES="$CONFIG_FILES npstat/emsunfold/Makefile" ;;
"npstat/Makefile") CONFIG_FILES="$CONFIG_FILES npstat/Makefile" ;;
"examples/C++/Makefile") CONFIG_FILES="$CONFIG_FILES examples/C++/Makefile" ;;
"npstat.pc") CONFIG_FILES="$CONFIG_FILES npstat.pc" ;;
*) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
esac
done
# If the user did not use the arguments to specify the items to instantiate,
# then the envvar interface is used. Set only those that are not.
# We use the long form for the default assignment because of an extremely
# bizarre bug on SunOS 4.1.3.
if $ac_need_defaults; then
test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
fi
# Have a temporary directory for convenience. Make it in the build tree
# simply because there is no reason against having it here, and in addition,
# creating and moving files from /tmp can sometimes cause problems.
# Hook for its removal unless debugging.
# Note that there is a small window in which the directory will not be cleaned:
# after its creation but before its name has been assigned to `$tmp'.
$debug ||
{
tmp= ac_tmp=
trap 'exit_status=$?
: "${ac_tmp:=$tmp}"
{ test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
' 0
trap 'as_fn_exit 1' 1 2 13 15
}
# Create a (secure) tmp directory for tmp files.
{
tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
test -d "$tmp"
} ||
{
tmp=./conf$$-$RANDOM
(umask 077 && mkdir "$tmp")
} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
ac_tmp=$tmp
# Set up the scripts for CONFIG_FILES section.
# No need to generate them if there are no CONFIG_FILES.
# This happens for instance with `./config.status config.h'.
if test -n "$CONFIG_FILES"; then
ac_cr=`echo X | tr X '\015'`
# On cygwin, bash can eat \r inside `` if the user requested igncr.
# But we know of no other shell where ac_cr would be empty at this
# point, so we can use a bashism as a fallback.
if test "x$ac_cr" = x; then
eval ac_cr=\$\'\\r\'
fi
ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null`
if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
ac_cs_awk_cr='\\r'
else
ac_cs_awk_cr=$ac_cr
fi
echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
cat >>"$ac_tmp/subs1.awk" <<\_ACAWK &&
S["am__EXEEXT_FALSE"]=""
S["am__EXEEXT_TRUE"]="#"
S["LTLIBOBJS"]=""
S["LIBOBJS"]=""
S["CXXCPP"]="g++ -E"
S["CPP"]="gcc -E"
S["LT_SYS_LIBRARY_PATH"]=""
S["OTOOL64"]=""
S["OTOOL"]=""
S["LIPO"]=""
S["NMEDIT"]=""
S["DSYMUTIL"]=""
S["MANIFEST_TOOL"]=":"
S["RANLIB"]="ranlib"
S["ac_ct_AR"]="ar"
S["AR"]="ar"
S["DLLTOOL"]="false"
S["OBJDUMP"]="objdump"
S["LN_S"]="ln -s"
S["NM"]="/usr/bin/nm -B"
S["ac_ct_DUMPBIN"]=""
S["DUMPBIN"]=""
-S["LD"]="/usr/bin/x86_64-linux-gnu-ld -m elf_x86_64"
+S["LD"]="/usr/bin/ld -m elf_x86_64"
S["FGREP"]="/bin/grep -F"
S["EGREP"]="/bin/grep -E"
S["GREP"]="/bin/grep"
S["SED"]="/bin/sed"
S["am__fastdepCC_FALSE"]="#"
S["am__fastdepCC_TRUE"]=""
S["CCDEPMODE"]="depmode=gcc3"
S["ac_ct_CC"]="gcc"
S["CFLAGS"]="-g -O2"
S["CC"]="gcc"
S["LIBTOOL"]="$(SHELL) $(top_builddir)/libtool"
S["FLIBS"]=" -L/usr/lib/gcc/x86_64-linux-gnu/7 -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../x86_64-linux-gnu -L/usr/lib/gcc/x86_64-linux-gnu/7/../../../../lib -L/"\
"lib/x86_64-linux-gnu -L/lib/../lib -L/usr/lib/x86_64-linux-gnu -L/usr/lib/../lib -L/usr/lib/gcc/x86_64-linux-gnu/7/../../.. -lgfortran -lm -lquadmat"\
"h"
S["host_os"]="linux-gnu"
S["host_vendor"]="pc"
S["host_cpu"]="x86_64"
S["host"]="x86_64-pc-linux-gnu"
S["build_os"]="linux-gnu"
S["build_vendor"]="pc"
S["build_cpu"]="x86_64"
S["build"]="x86_64-pc-linux-gnu"
S["ac_ct_F77"]="g77"
S["FFLAGS"]="-g -O2"
S["F77"]="g77"
S["am__fastdepCXX_FALSE"]="#"
S["am__fastdepCXX_TRUE"]=""
S["CXXDEPMODE"]="depmode=gcc3"
S["am__nodep"]="_no"
S["AMDEPBACKSLASH"]="\\"
S["AMDEP_FALSE"]="#"
S["AMDEP_TRUE"]=""
S["am__quote"]=""
S["am__include"]="include"
S["DEPDIR"]=".deps"
S["OBJEXT"]="o"
S["EXEEXT"]=""
S["ac_ct_CXX"]="g++"
S["CPPFLAGS"]=""
S["LDFLAGS"]=""
S["CXXFLAGS"]="-std=c++11 -O3 -Wall -W -Werror"
S["CXX"]="g++"
S["DEPS_LIBS"]="-L/usr/local/lib -lfftw3 -lgeners -lkstest"
S["DEPS_CFLAGS"]="-I/usr/local/include"
S["PKG_CONFIG_LIBDIR"]=""
S["PKG_CONFIG_PATH"]="/usr/local/lib/pkgconfig"
S["PKG_CONFIG"]="/usr/bin/pkg-config"
S["AM_BACKSLASH"]="\\"
S["AM_DEFAULT_VERBOSITY"]="1"
S["AM_DEFAULT_V"]="$(AM_DEFAULT_VERBOSITY)"
S["AM_V"]="$(V)"
S["am__untar"]="$${TAR-tar} xf -"
S["am__tar"]="$${TAR-tar} chof - \"$$tardir\""
S["AMTAR"]="$${TAR-tar}"
S["am__leading_dot"]="."
S["SET_MAKE"]=""
S["AWK"]="gawk"
S["mkdir_p"]="$(MKDIR_P)"
S["MKDIR_P"]="/bin/mkdir -p"
S["INSTALL_STRIP_PROGRAM"]="$(install_sh) -c -s"
S["STRIP"]="strip"
S["install_sh"]="${SHELL} /home/igv/Hepforge/npstat/trunk/install-sh"
S["MAKEINFO"]="${SHELL} /home/igv/Hepforge/npstat/trunk/missing makeinfo"
S["AUTOHEADER"]="${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoheader"
S["AUTOMAKE"]="${SHELL} /home/igv/Hepforge/npstat/trunk/missing automake-1.15"
S["AUTOCONF"]="${SHELL} /home/igv/Hepforge/npstat/trunk/missing autoconf"
S["ACLOCAL"]="${SHELL} /home/igv/Hepforge/npstat/trunk/missing aclocal-1.15"
S["VERSION"]="4.9.0"
S["PACKAGE"]="npstat"
S["CYGPATH_W"]="echo"
S["am__isrc"]=""
S["INSTALL_DATA"]="${INSTALL} -m 644"
S["INSTALL_SCRIPT"]="${INSTALL}"
S["INSTALL_PROGRAM"]="${INSTALL}"
S["target_alias"]=""
S["host_alias"]=""
S["build_alias"]=""
S["LIBS"]=""
S["ECHO_T"]=""
S["ECHO_N"]="-n"
S["ECHO_C"]=""
S["DEFS"]="-DPACKAGE_NAME=\\\"npstat\\\" -DPACKAGE_TARNAME=\\\"npstat\\\" -DPACKAGE_VERSION=\\\"4.9.0\\\" -DPACKAGE_STRING=\\\"npstat\\ 4.9.0\\\" -DPACKAGE_BUGREPORT=\\\"\\\" -DPAC"\
"KAGE_URL=\\\"\\\" -DPACKAGE=\\\"npstat\\\" -DVERSION=\\\"4.9.0\\\" -DSTDC_HEADERS=1 -DHAVE_SYS_TYPES_H=1 -DHAVE_SYS_STAT_H=1 -DHAVE_STDLIB_H=1 -DHAVE_STRING_H=1"\
" -DHAVE_MEMORY_H=1 -DHAVE_STRINGS_H=1 -DHAVE_INTTYPES_H=1 -DHAVE_STDINT_H=1 -DHAVE_UNISTD_H=1 -DHAVE_DLFCN_H=1 -DLT_OBJDIR=\\\".libs/\\\""
S["mandir"]="${datarootdir}/man"
S["localedir"]="${datarootdir}/locale"
S["libdir"]="${exec_prefix}/lib"
S["psdir"]="${docdir}"
S["pdfdir"]="${docdir}"
S["dvidir"]="${docdir}"
S["htmldir"]="${docdir}"
S["infodir"]="${datarootdir}/info"
S["docdir"]="${datarootdir}/doc/${PACKAGE_TARNAME}"
S["oldincludedir"]="/usr/include"
S["includedir"]="${prefix}/include"
S["runstatedir"]="${localstatedir}/run"
S["localstatedir"]="${prefix}/var"
S["sharedstatedir"]="${prefix}/com"
S["sysconfdir"]="${prefix}/etc"
S["datadir"]="${datarootdir}"
S["datarootdir"]="${prefix}/share"
S["libexecdir"]="${exec_prefix}/libexec"
S["sbindir"]="${exec_prefix}/sbin"
S["bindir"]="${exec_prefix}/bin"
S["program_transform_name"]="s,x,x,"
S["prefix"]="/usr/local"
S["exec_prefix"]="${prefix}"
S["PACKAGE_URL"]=""
S["PACKAGE_BUGREPORT"]=""
S["PACKAGE_STRING"]="npstat 4.9.0"
S["PACKAGE_VERSION"]="4.9.0"
S["PACKAGE_TARNAME"]="npstat"
S["PACKAGE_NAME"]="npstat"
S["PATH_SEPARATOR"]=":"
S["SHELL"]="/bin/bash"
_ACAWK
cat >>"$ac_tmp/subs1.awk" <<_ACAWK &&
for (key in S) S_is_set[key] = 1
FS = ""
}
{
line = $ 0
nfields = split(line, field, "@")
substed = 0
len = length(field[1])
for (i = 2; i < nfields; i++) {
key = field[i]
keylen = length(key)
if (S_is_set[key]) {
value = S[key]
line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
len += length(value) + length(field[++i])
substed = 1
} else
len += 1 + keylen
}
print line
}
_ACAWK
if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
else
cat
fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
|| as_fn_error $? "could not setup config files machinery" "$LINENO" 5
fi # test -n "$CONFIG_FILES"
eval set X " :F $CONFIG_FILES :C $CONFIG_COMMANDS"
shift
for ac_tag
do
case $ac_tag in
:[FHLC]) ac_mode=$ac_tag; continue;;
esac
case $ac_mode$ac_tag in
:[FHL]*:*);;
:L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
:[FH]-) ac_tag=-:-;;
:[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
esac
ac_save_IFS=$IFS
IFS=:
set x $ac_tag
IFS=$ac_save_IFS
shift
ac_file=$1
shift
case $ac_mode in
:L) ac_source=$1;;
:[FH])
ac_file_inputs=
for ac_f
do
case $ac_f in
-) ac_f="$ac_tmp/stdin";;
*) # Look for the file first in the build tree, then in the source tree
# (if the path is not absolute). The absolute path cannot be DOS-style,
# because $ac_f cannot contain `:'.
test -f "$ac_f" ||
case $ac_f in
[\\/$]*) false;;
*) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
esac ||
as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
esac
case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
as_fn_append ac_file_inputs " '$ac_f'"
done
# Let's still pretend it is `configure' which instantiates (i.e., don't
# use $as_me), people would be surprised to read:
# /* config.h. Generated by config.status. */
configure_input='Generated from '`
$as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
`' by configure.'
if test x"$ac_file" != x-; then
configure_input="$ac_file. $configure_input"
{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
$as_echo "$as_me: creating $ac_file" >&6;}
fi
# Neutralize special characters interpreted by sed in replacement strings.
case $configure_input in #(
*\&* | *\|* | *\\* )
ac_sed_conf_input=`$as_echo "$configure_input" |
sed 's/[\\\\&|]/\\\\&/g'`;; #(
*) ac_sed_conf_input=$configure_input;;
esac
case $ac_tag in
*:-:* | *:-) cat >"$ac_tmp/stdin" \
|| as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
esac
;;
esac
ac_dir=`$as_dirname -- "$ac_file" ||
$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$ac_file" : 'X\(//\)[^/]' \| \
X"$ac_file" : 'X\(//\)$' \| \
X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$ac_file" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
as_dir="$ac_dir"; as_fn_mkdir_p
ac_builddir=.
case "$ac_dir" in
.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
*)
ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
# A ".." for each directory in $ac_dir_suffix.
ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
case $ac_top_builddir_sub in
"") ac_top_builddir_sub=. ac_top_build_prefix= ;;
*) ac_top_build_prefix=$ac_top_builddir_sub/ ;;
esac ;;
esac
ac_abs_top_builddir=$ac_pwd
ac_abs_builddir=$ac_pwd$ac_dir_suffix
# for backward compatibility:
ac_top_builddir=$ac_top_build_prefix
case $srcdir in
.) # We are building in place.
ac_srcdir=.
ac_top_srcdir=$ac_top_builddir_sub
ac_abs_top_srcdir=$ac_pwd ;;
[\\/]* | ?:[\\/]* ) # Absolute name.
ac_srcdir=$srcdir$ac_dir_suffix;
ac_top_srcdir=$srcdir
ac_abs_top_srcdir=$srcdir ;;
*) # Relative name.
ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
ac_top_srcdir=$ac_top_build_prefix$srcdir
ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
esac
ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
case $ac_mode in
:F)
#
# CONFIG_FILE
#
case $INSTALL in
[\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;;
*) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
esac
ac_MKDIR_P=$MKDIR_P
case $MKDIR_P in
[\\/$]* | ?:[\\/]* ) ;;
*/*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
esac
# If the template does not know about datarootdir, expand it.
# FIXME: This hack should be removed a few years after 2.60.
ac_datarootdir_hack=; ac_datarootdir_seen=
ac_sed_dataroot='
/datarootdir/ {
p
q
}
/@datadir@/p
/@docdir@/p
/@infodir@/p
/@localedir@/p
/@mandir@/p'
case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
*datarootdir*) ac_datarootdir_seen=yes;;
*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
ac_datarootdir_hack='
s&@datadir@&${datarootdir}&g
s&@docdir@&${datarootdir}/doc/${PACKAGE_TARNAME}&g
s&@infodir@&${datarootdir}/info&g
s&@localedir@&${datarootdir}/locale&g
s&@mandir@&${datarootdir}/man&g
s&\${datarootdir}&${prefix}/share&g' ;;
esac
ac_sed_extra="/^[ ]*VPATH[ ]*=[ ]*/{
h
s///
s/^/:/
s/[ ]*$/:/
s/:\$(srcdir):/:/g
s/:\${srcdir}:/:/g
s/:@srcdir@:/:/g
s/^:*//
s/:*$//
x
s/\(=[ ]*\).*/\1/
G
s/\n//
s/^[^=]*=[ ]*$//
}
:t
/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
s|@configure_input@|$ac_sed_conf_input|;t t
s&@top_builddir@&$ac_top_builddir_sub&;t t
s&@top_build_prefix@&$ac_top_build_prefix&;t t
s&@srcdir@&$ac_srcdir&;t t
s&@abs_srcdir@&$ac_abs_srcdir&;t t
s&@top_srcdir@&$ac_top_srcdir&;t t
s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
s&@builddir@&$ac_builddir&;t t
s&@abs_builddir@&$ac_abs_builddir&;t t
s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
s&@INSTALL@&$ac_INSTALL&;t t
s&@MKDIR_P@&$ac_MKDIR_P&;t t
$ac_datarootdir_hack
"
eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
>$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
{ ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
{ ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \
"$ac_tmp/out"`; test -z "$ac_out"; } &&
{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
which seems to be undefined. Please make sure it is defined" >&5
$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
which seems to be undefined. Please make sure it is defined" >&2;}
rm -f "$ac_tmp/stdin"
case $ac_file in
-) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
*) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
esac \
|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
;;
:C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
$as_echo "$as_me: executing $ac_file commands" >&6;}
;;
esac
case $ac_file$ac_mode in
"depfiles":C) test x"$AMDEP_TRUE" != x"" || {
# Older Autoconf quotes --file arguments for eval, but not when files
# are listed without --file. Let's play safe and only enable the eval
# if we detect the quoting.
case $CONFIG_FILES in
*\'*) eval set x "$CONFIG_FILES" ;;
*) set x $CONFIG_FILES ;;
esac
shift
for mf
do
# Strip MF so we end up with the name of the file.
mf=`echo "$mf" | sed -e 's/:.*$//'`
# Check whether this is an Automake generated Makefile or not.
# We used to match only the files named 'Makefile.in', but
# some people rename them; so instead we look at the file content.
# Grep'ing the first line is not enough: some people post-process
# each Makefile.in and add a new line on top of each file to say so.
# Grep'ing the whole file is not good either: AIX grep has a line
# limit of 2048, but all sed's we know have understand at least 4000.
if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
dirpart=`$as_dirname -- "$mf" ||
$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$mf" : 'X\(//\)[^/]' \| \
X"$mf" : 'X\(//\)$' \| \
X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$mf" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
else
continue
fi
# Extract the definition of DEPDIR, am__include, and am__quote
# from the Makefile without running 'make'.
DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
test -z "$DEPDIR" && continue
am__include=`sed -n 's/^am__include = //p' < "$mf"`
test -z "$am__include" && continue
am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
# Find all dependency output files, they are included files with
# $(DEPDIR) in their names. We invoke sed twice because it is the
# simplest approach to changing $(DEPDIR) to its actual value in the
# expansion.
for file in `sed -n "
s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do
# Make sure the directory exists.
test -f "$dirpart/$file" && continue
fdir=`$as_dirname -- "$file" ||
$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
X"$file" : 'X\(//\)[^/]' \| \
X"$file" : 'X\(//\)$' \| \
X"$file" : 'X\(/\)' \| . 2>/dev/null ||
$as_echo X"$file" |
sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
s//\1/
q
}
/^X\(\/\/\)[^/].*/{
s//\1/
q
}
/^X\(\/\/\)$/{
s//\1/
q
}
/^X\(\/\).*/{
s//\1/
q
}
s/.*/./; q'`
as_dir=$dirpart/$fdir; as_fn_mkdir_p
# echo "creating $dirpart/$file"
echo '# dummy' > "$dirpart/$file"
done
done
}
;;
"libtool":C)
# See if we are running on zsh, and set the options that allow our
# commands through without removal of \ escapes.
if test -n "${ZSH_VERSION+set}"; then
setopt NO_GLOB_SUBST
fi
cfgfile=${ofile}T
trap "$RM \"$cfgfile\"; exit 1" 1 2 15
$RM "$cfgfile"
cat <<_LT_EOF >> "$cfgfile"
#! $SHELL
# Generated automatically by $as_me ($PACKAGE) $VERSION
# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
# NOTE: Changes made to this file will be lost: look at ltmain.sh.
# Provide generalized library-building support services.
# Written by Gordon Matzigkeit, 1996
# Copyright (C) 2014 Free Software Foundation, Inc.
# This is free software; see the source for copying conditions. There is NO
# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# GNU Libtool is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of of the License, or
# (at your option) any later version.
#
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program or library that is built
# using GNU Libtool, you may include this file under the same
# distribution terms that you use for the rest of that program.
#
# GNU Libtool is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
# The names of the tagged configurations supported by this script.
available_tags='CXX F77 '
# Configured defaults for sys_lib_dlsearch_path munging.
: \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"}
# ### BEGIN LIBTOOL CONFIG
# Which release of libtool.m4 was used?
macro_version=$macro_version
macro_revision=$macro_revision
# Whether or not to build shared libraries.
build_libtool_libs=$enable_shared
# Whether or not to build static libraries.
build_old_libs=$enable_static
# What type of objects to build.
pic_mode=$pic_mode
# Whether or not to optimize for fast installation.
fast_install=$enable_fast_install
# Shared archive member basename,for filename based shared library versioning on AIX.
shared_archive_member_spec=$shared_archive_member_spec
# Shell to use when invoking shell scripts.
SHELL=$lt_SHELL
# An echo program that protects backslashes.
ECHO=$lt_ECHO
# The PATH separator for the build system.
PATH_SEPARATOR=$lt_PATH_SEPARATOR
# The host system.
host_alias=$host_alias
host=$host
host_os=$host_os
# The build system.
build_alias=$build_alias
build=$build
build_os=$build_os
# A sed program that does not truncate output.
SED=$lt_SED
# Sed that helps us avoid accidentally triggering echo(1) options like -n.
Xsed="\$SED -e 1s/^X//"
# A grep program that handles long lines.
GREP=$lt_GREP
# An ERE matcher.
EGREP=$lt_EGREP
# A literal string matcher.
FGREP=$lt_FGREP
# A BSD- or MS-compatible name lister.
NM=$lt_NM
# Whether we need soft or hard links.
LN_S=$lt_LN_S
# What is the maximum length of a command?
max_cmd_len=$max_cmd_len
# Object file suffix (normally "o").
objext=$ac_objext
# Executable file suffix (normally "").
exeext=$exeext
# whether the shell understands "unset".
lt_unset=$lt_unset
# turn spaces into newlines.
SP2NL=$lt_lt_SP2NL
# turn newlines into spaces.
NL2SP=$lt_lt_NL2SP
# convert \$build file names to \$host format.
to_host_file_cmd=$lt_cv_to_host_file_cmd
# convert \$build files to toolchain format.
to_tool_file_cmd=$lt_cv_to_tool_file_cmd
# An object symbol dumper.
OBJDUMP=$lt_OBJDUMP
# Method to check whether dependent libraries are shared objects.
deplibs_check_method=$lt_deplibs_check_method
# Command to use when deplibs_check_method = "file_magic".
file_magic_cmd=$lt_file_magic_cmd
# How to find potential files when deplibs_check_method = "file_magic".
file_magic_glob=$lt_file_magic_glob
# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
want_nocaseglob=$lt_want_nocaseglob
# DLL creation program.
DLLTOOL=$lt_DLLTOOL
# Command to associate shared and link libraries.
sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
# The archiver.
AR=$lt_AR
# Flags to create an archive.
AR_FLAGS=$lt_AR_FLAGS
# How to feed a file listing to the archiver.
archiver_list_spec=$lt_archiver_list_spec
# A symbol stripping program.
STRIP=$lt_STRIP
# Commands used to install an old-style archive.
RANLIB=$lt_RANLIB
old_postinstall_cmds=$lt_old_postinstall_cmds
old_postuninstall_cmds=$lt_old_postuninstall_cmds
# Whether to use a lock for old archive extraction.
lock_old_archive_extraction=$lock_old_archive_extraction
# A C compiler.
LTCC=$lt_CC
# LTCC compiler flags.
LTCFLAGS=$lt_CFLAGS
# Take the output of nm and produce a listing of raw symbols and C names.
global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe
# Transform the output of nm in a proper C declaration.
global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl
# Transform the output of nm into a list of symbols to manually relocate.
global_symbol_to_import=$lt_lt_cv_sys_global_symbol_to_import
# Transform the output of nm in a C name address pair.
global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
# Transform the output of nm in a C name address pair when lib prefix is needed.
global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
# The name lister interface.
nm_interface=$lt_lt_cv_nm_interface
# Specify filename containing input files for \$NM.
nm_file_list_spec=$lt_nm_file_list_spec
# The root where to search for dependent libraries,and where our libraries should be installed.
lt_sysroot=$lt_sysroot
# Command to truncate a binary pipe.
lt_truncate_bin=$lt_lt_cv_truncate_bin
# The name of the directory that contains temporary libtool files.
objdir=$objdir
# Used to examine libraries when file_magic_cmd begins with "file".
MAGIC_CMD=$MAGIC_CMD
# Must we lock files when doing compilation?
need_locks=$lt_need_locks
# Manifest tool.
MANIFEST_TOOL=$lt_MANIFEST_TOOL
# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
DSYMUTIL=$lt_DSYMUTIL
# Tool to change global to local symbols on Mac OS X.
NMEDIT=$lt_NMEDIT
# Tool to manipulate fat objects and archives on Mac OS X.
LIPO=$lt_LIPO
# ldd/readelf like tool for Mach-O binaries on Mac OS X.
OTOOL=$lt_OTOOL
# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
OTOOL64=$lt_OTOOL64
# Old archive suffix (normally "a").
libext=$libext
# Shared library suffix (normally ".so").
shrext_cmds=$lt_shrext_cmds
# The commands to extract the exported symbol list from a shared archive.
extract_expsyms_cmds=$lt_extract_expsyms_cmds
# Variables whose values should be saved in libtool wrapper scripts and
# restored at link time.
variables_saved_for_relink=$lt_variables_saved_for_relink
# Do we need the "lib" prefix for modules?
need_lib_prefix=$need_lib_prefix
# Do we need a version for libraries?
need_version=$need_version
# Library versioning type.
version_type=$version_type
# Shared library runtime path variable.
runpath_var=$runpath_var
# Shared library path variable.
shlibpath_var=$shlibpath_var
# Is shlibpath searched before the hard-coded library search path?
shlibpath_overrides_runpath=$shlibpath_overrides_runpath
# Format of library name prefix.
libname_spec=$lt_libname_spec
# List of archive names. First name is the real one, the rest are links.
# The last name is the one that the linker finds with -lNAME
library_names_spec=$lt_library_names_spec
# The coded name of the library, if different from the real name.
soname_spec=$lt_soname_spec
# Permission mode override for installation of shared libraries.
install_override_mode=$lt_install_override_mode
# Command to use after installation of a shared archive.
postinstall_cmds=$lt_postinstall_cmds
# Command to use after uninstallation of a shared archive.
postuninstall_cmds=$lt_postuninstall_cmds
# Commands used to finish a libtool library installation in a directory.
finish_cmds=$lt_finish_cmds
# As "finish_cmds", except a single script fragment to be evaled but
# not shown.
finish_eval=$lt_finish_eval
# Whether we should hardcode library paths into libraries.
hardcode_into_libs=$hardcode_into_libs
# Compile-time system search path for libraries.
sys_lib_search_path_spec=$lt_sys_lib_search_path_spec
# Detected run-time system search path for libraries.
sys_lib_dlsearch_path_spec=$lt_configure_time_dlsearch_path
# Explicit LT_SYS_LIBRARY_PATH set during ./configure time.
configure_time_lt_sys_library_path=$lt_configure_time_lt_sys_library_path
# Whether dlopen is supported.
dlopen_support=$enable_dlopen
# Whether dlopen of programs is supported.
dlopen_self=$enable_dlopen_self
# Whether dlopen of statically linked programs is supported.
dlopen_self_static=$enable_dlopen_self_static
# Commands to strip libraries.
old_striplib=$lt_old_striplib
striplib=$lt_striplib
# The linker used to build libraries.
LD=$lt_LD
# How to create reloadable object files.
reload_flag=$lt_reload_flag
reload_cmds=$lt_reload_cmds
# Commands used to build an old-style archive.
old_archive_cmds=$lt_old_archive_cmds
# A language specific compiler.
CC=$lt_compiler
# Is the compiler the GNU compiler?
with_gcc=$GCC
# Compiler flag to turn off builtin functions.
no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
# Additional compiler flags for building library objects.
pic_flag=$lt_lt_prog_compiler_pic
# How to pass a linker flag through the compiler.
wl=$lt_lt_prog_compiler_wl
# Compiler flag to prevent dynamic linking.
link_static_flag=$lt_lt_prog_compiler_static
# Does compiler simultaneously support -c and -o options?
compiler_c_o=$lt_lt_cv_prog_compiler_c_o
# Whether or not to add -lc for building shared libraries.
build_libtool_need_lc=$archive_cmds_need_lc
# Whether or not to disallow shared libs when runtime libs are static.
allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes
# Compiler flag to allow reflexive dlopens.
export_dynamic_flag_spec=$lt_export_dynamic_flag_spec
# Compiler flag to generate shared objects directly from archives.
whole_archive_flag_spec=$lt_whole_archive_flag_spec
# Whether the compiler copes with passing no objects directly.
compiler_needs_object=$lt_compiler_needs_object
# Create an old-style archive from a shared archive.
old_archive_from_new_cmds=$lt_old_archive_from_new_cmds
# Create a temporary old-style archive to link instead of a shared archive.
old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds
# Commands used to build a shared archive.
archive_cmds=$lt_archive_cmds
archive_expsym_cmds=$lt_archive_expsym_cmds
# Commands used to build a loadable module if different from building
# a shared archive.
module_cmds=$lt_module_cmds
module_expsym_cmds=$lt_module_expsym_cmds
# Whether we are building with GNU ld or not.
with_gnu_ld=$lt_with_gnu_ld
# Flag that allows shared libraries with undefined symbols to be built.
allow_undefined_flag=$lt_allow_undefined_flag
# Flag that enforces no undefined symbols.
no_undefined_flag=$lt_no_undefined_flag
# Flag to hardcode \$libdir into a binary during linking.
# This must work even if \$libdir does not exist
hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec
# Whether we need a single "-rpath" flag with a separated argument.
hardcode_libdir_separator=$lt_hardcode_libdir_separator
# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes
# DIR into the resulting binary.
hardcode_direct=$hardcode_direct
# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes
# DIR into the resulting binary and the resulting library dependency is
# "absolute",i.e impossible to change by setting \$shlibpath_var if the
# library is relocated.
hardcode_direct_absolute=$hardcode_direct_absolute
# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
# into the resulting binary.
hardcode_minus_L=$hardcode_minus_L
# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
# into the resulting binary.
hardcode_shlibpath_var=$hardcode_shlibpath_var
# Set to "yes" if building a shared library automatically hardcodes DIR
# into the library and all subsequent libraries and executables linked
# against it.
hardcode_automatic=$hardcode_automatic
# Set to yes if linker adds runtime paths of dependent libraries
# to runtime path list.
inherit_rpath=$inherit_rpath
# Whether libtool must link a program against all its dependency libraries.
link_all_deplibs=$link_all_deplibs
# Set to "yes" if exported symbols are required.
always_export_symbols=$always_export_symbols
# The commands to list exported symbols.
export_symbols_cmds=$lt_export_symbols_cmds
# Symbols that should not be listed in the preloaded symbols.
exclude_expsyms=$lt_exclude_expsyms
# Symbols that must always be exported.
include_expsyms=$lt_include_expsyms
# Commands necessary for linking programs (against libraries) with templates.
prelink_cmds=$lt_prelink_cmds
# Commands necessary for finishing linking programs.
postlink_cmds=$lt_postlink_cmds
# Specify filename containing input files.
file_list_spec=$lt_file_list_spec
# How to hardcode a shared library path into an executable.
hardcode_action=$hardcode_action
# The directories searched by this compiler when creating a shared library.
compiler_lib_search_dirs=$lt_compiler_lib_search_dirs
# Dependencies to place before and after the objects being linked to
# create a shared library.
predep_objects=$lt_predep_objects
postdep_objects=$lt_postdep_objects
predeps=$lt_predeps
postdeps=$lt_postdeps
# The library search path used internally by the compiler when linking
# a shared library.
compiler_lib_search_path=$lt_compiler_lib_search_path
# ### END LIBTOOL CONFIG
_LT_EOF
cat <<'_LT_EOF' >> "$cfgfile"
# ### BEGIN FUNCTIONS SHARED WITH CONFIGURE
# func_munge_path_list VARIABLE PATH
# -----------------------------------
# VARIABLE is name of variable containing _space_ separated list of
# directories to be munged by the contents of PATH, which is string
# having a format:
# "DIR[:DIR]:"
# string "DIR[ DIR]" will be prepended to VARIABLE
# ":DIR[:DIR]"
# string "DIR[ DIR]" will be appended to VARIABLE
# "DIRP[:DIRP]::[DIRA:]DIRA"
# string "DIRP[ DIRP]" will be prepended to VARIABLE and string
# "DIRA[ DIRA]" will be appended to VARIABLE
# "DIR[:DIR]"
# VARIABLE will be replaced by "DIR[ DIR]"
func_munge_path_list ()
{
case x$2 in
x)
;;
*:)
eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\"
;;
x:*)
eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\"
;;
*::*)
eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\"
eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\"
;;
*)
eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\"
;;
esac
}
# Calculate cc_basename. Skip known compiler wrappers and cross-prefix.
func_cc_basename ()
{
for cc_temp in $*""; do
case $cc_temp in
compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
\-*) ;;
*) break;;
esac
done
func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
}
# ### END FUNCTIONS SHARED WITH CONFIGURE
_LT_EOF
case $host_os in
aix3*)
cat <<\_LT_EOF >> "$cfgfile"
# AIX sometimes has problems with the GCC collect2 program. For some
# reason, if we set the COLLECT_NAMES environment variable, the problems
# vanish in a puff of smoke.
if test set != "${COLLECT_NAMES+set}"; then
COLLECT_NAMES=
export COLLECT_NAMES
fi
_LT_EOF
;;
esac
ltmain=$ac_aux_dir/ltmain.sh
# We use sed instead of cat because bash on DJGPP gets confused if
# if finds mixed CR/LF and LF-only lines. Since sed operates in
# text mode, it properly converts lines to CR/LF. This bash problem
# is reportedly fixed, but why not run on old versions too?
sed '$q' "$ltmain" >> "$cfgfile" \
|| (rm -f "$cfgfile"; exit 1)
mv -f "$cfgfile" "$ofile" ||
(rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
chmod +x "$ofile"
cat <<_LT_EOF >> "$ofile"
# ### BEGIN LIBTOOL TAG CONFIG: CXX
# The linker used to build libraries.
LD=$lt_LD_CXX
# How to create reloadable object files.
reload_flag=$lt_reload_flag_CXX
reload_cmds=$lt_reload_cmds_CXX
# Commands used to build an old-style archive.
old_archive_cmds=$lt_old_archive_cmds_CXX
# A language specific compiler.
CC=$lt_compiler_CXX
# Is the compiler the GNU compiler?
with_gcc=$GCC_CXX
# Compiler flag to turn off builtin functions.
no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX
# Additional compiler flags for building library objects.
pic_flag=$lt_lt_prog_compiler_pic_CXX
# How to pass a linker flag through the compiler.
wl=$lt_lt_prog_compiler_wl_CXX
# Compiler flag to prevent dynamic linking.
link_static_flag=$lt_lt_prog_compiler_static_CXX
# Does compiler simultaneously support -c and -o options?
compiler_c_o=$lt_lt_cv_prog_compiler_c_o_CXX
# Whether or not to add -lc for building shared libraries.
build_libtool_need_lc=$archive_cmds_need_lc_CXX
# Whether or not to disallow shared libs when runtime libs are static.
allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_CXX
# Compiler flag to allow reflexive dlopens.
export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_CXX
# Compiler flag to generate shared objects directly from archives.
whole_archive_flag_spec=$lt_whole_archive_flag_spec_CXX
# Whether the compiler copes with passing no objects directly.
compiler_needs_object=$lt_compiler_needs_object_CXX
# Create an old-style archive from a shared archive.
old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_CXX
# Create a temporary old-style archive to link instead of a shared archive.
old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_CXX
# Commands used to build a shared archive.
archive_cmds=$lt_archive_cmds_CXX
archive_expsym_cmds=$lt_archive_expsym_cmds_CXX
# Commands used to build a loadable module if different from building
# a shared archive.
module_cmds=$lt_module_cmds_CXX
module_expsym_cmds=$lt_module_expsym_cmds_CXX
# Whether we are building with GNU ld or not.
with_gnu_ld=$lt_with_gnu_ld_CXX
# Flag that allows shared libraries with undefined symbols to be built.
allow_undefined_flag=$lt_allow_undefined_flag_CXX
# Flag that enforces no undefined symbols.
no_undefined_flag=$lt_no_undefined_flag_CXX
# Flag to hardcode \$libdir into a binary during linking.
# This must work even if \$libdir does not exist
hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_CXX
# Whether we need a single "-rpath" flag with a separated argument.
hardcode_libdir_separator=$lt_hardcode_libdir_separator_CXX
# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes
# DIR into the resulting binary.
hardcode_direct=$hardcode_direct_CXX
# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes
# DIR into the resulting binary and the resulting library dependency is
# "absolute",i.e impossible to change by setting \$shlibpath_var if the
# library is relocated.
hardcode_direct_absolute=$hardcode_direct_absolute_CXX
# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
# into the resulting binary.
hardcode_minus_L=$hardcode_minus_L_CXX
# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
# into the resulting binary.
hardcode_shlibpath_var=$hardcode_shlibpath_var_CXX
# Set to "yes" if building a shared library automatically hardcodes DIR
# into the library and all subsequent libraries and executables linked
# against it.
hardcode_automatic=$hardcode_automatic_CXX
# Set to yes if linker adds runtime paths of dependent libraries
# to runtime path list.
inherit_rpath=$inherit_rpath_CXX
# Whether libtool must link a program against all its dependency libraries.
link_all_deplibs=$link_all_deplibs_CXX
# Set to "yes" if exported symbols are required.
always_export_symbols=$always_export_symbols_CXX
# The commands to list exported symbols.
export_symbols_cmds=$lt_export_symbols_cmds_CXX
# Symbols that should not be listed in the preloaded symbols.
exclude_expsyms=$lt_exclude_expsyms_CXX
# Symbols that must always be exported.
include_expsyms=$lt_include_expsyms_CXX
# Commands necessary for linking programs (against libraries) with templates.
prelink_cmds=$lt_prelink_cmds_CXX
# Commands necessary for finishing linking programs.
postlink_cmds=$lt_postlink_cmds_CXX
# Specify filename containing input files.
file_list_spec=$lt_file_list_spec_CXX
# How to hardcode a shared library path into an executable.
hardcode_action=$hardcode_action_CXX
# The directories searched by this compiler when creating a shared library.
compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_CXX
# Dependencies to place before and after the objects being linked to
# create a shared library.
predep_objects=$lt_predep_objects_CXX
postdep_objects=$lt_postdep_objects_CXX
predeps=$lt_predeps_CXX
postdeps=$lt_postdeps_CXX
# The library search path used internally by the compiler when linking
# a shared library.
compiler_lib_search_path=$lt_compiler_lib_search_path_CXX
# ### END LIBTOOL TAG CONFIG: CXX
_LT_EOF
cat <<_LT_EOF >> "$ofile"
# ### BEGIN LIBTOOL TAG CONFIG: F77
# The linker used to build libraries.
LD=$lt_LD_F77
# How to create reloadable object files.
reload_flag=$lt_reload_flag_F77
reload_cmds=$lt_reload_cmds_F77
# Commands used to build an old-style archive.
old_archive_cmds=$lt_old_archive_cmds_F77
# A language specific compiler.
CC=$lt_compiler_F77
# Is the compiler the GNU compiler?
with_gcc=$GCC_F77
# Compiler flag to turn off builtin functions.
no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_F77
# Additional compiler flags for building library objects.
pic_flag=$lt_lt_prog_compiler_pic_F77
# How to pass a linker flag through the compiler.
wl=$lt_lt_prog_compiler_wl_F77
# Compiler flag to prevent dynamic linking.
link_static_flag=$lt_lt_prog_compiler_static_F77
# Does compiler simultaneously support -c and -o options?
compiler_c_o=$lt_lt_cv_prog_compiler_c_o_F77
# Whether or not to add -lc for building shared libraries.
build_libtool_need_lc=$archive_cmds_need_lc_F77
# Whether or not to disallow shared libs when runtime libs are static.
allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_F77
# Compiler flag to allow reflexive dlopens.
export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_F77
# Compiler flag to generate shared objects directly from archives.
whole_archive_flag_spec=$lt_whole_archive_flag_spec_F77
# Whether the compiler copes with passing no objects directly.
compiler_needs_object=$lt_compiler_needs_object_F77
# Create an old-style archive from a shared archive.
old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_F77
# Create a temporary old-style archive to link instead of a shared archive.
old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_F77
# Commands used to build a shared archive.
archive_cmds=$lt_archive_cmds_F77
archive_expsym_cmds=$lt_archive_expsym_cmds_F77
# Commands used to build a loadable module if different from building
# a shared archive.
module_cmds=$lt_module_cmds_F77
module_expsym_cmds=$lt_module_expsym_cmds_F77
# Whether we are building with GNU ld or not.
with_gnu_ld=$lt_with_gnu_ld_F77
# Flag that allows shared libraries with undefined symbols to be built.
allow_undefined_flag=$lt_allow_undefined_flag_F77
# Flag that enforces no undefined symbols.
no_undefined_flag=$lt_no_undefined_flag_F77
# Flag to hardcode \$libdir into a binary during linking.
# This must work even if \$libdir does not exist
hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_F77
# Whether we need a single "-rpath" flag with a separated argument.
hardcode_libdir_separator=$lt_hardcode_libdir_separator_F77
# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes
# DIR into the resulting binary.
hardcode_direct=$hardcode_direct_F77
# Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes
# DIR into the resulting binary and the resulting library dependency is
# "absolute",i.e impossible to change by setting \$shlibpath_var if the
# library is relocated.
hardcode_direct_absolute=$hardcode_direct_absolute_F77
# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
# into the resulting binary.
hardcode_minus_L=$hardcode_minus_L_F77
# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
# into the resulting binary.
hardcode_shlibpath_var=$hardcode_shlibpath_var_F77
# Set to "yes" if building a shared library automatically hardcodes DIR
# into the library and all subsequent libraries and executables linked
# against it.
hardcode_automatic=$hardcode_automatic_F77
# Set to yes if linker adds runtime paths of dependent libraries
# to runtime path list.
inherit_rpath=$inherit_rpath_F77
# Whether libtool must link a program against all its dependency libraries.
link_all_deplibs=$link_all_deplibs_F77
# Set to "yes" if exported symbols are required.
always_export_symbols=$always_export_symbols_F77
# The commands to list exported symbols.
export_symbols_cmds=$lt_export_symbols_cmds_F77
# Symbols that should not be listed in the preloaded symbols.
exclude_expsyms=$lt_exclude_expsyms_F77
# Symbols that must always be exported.
include_expsyms=$lt_include_expsyms_F77
# Commands necessary for linking programs (against libraries) with templates.
prelink_cmds=$lt_prelink_cmds_F77
# Commands necessary for finishing linking programs.
postlink_cmds=$lt_postlink_cmds_F77
# Specify filename containing input files.
file_list_spec=$lt_file_list_spec_F77
# How to hardcode a shared library path into an executable.
hardcode_action=$hardcode_action_F77
# The directories searched by this compiler when creating a shared library.
compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_F77
# Dependencies to place before and after the objects being linked to
# create a shared library.
predep_objects=$lt_predep_objects_F77
postdep_objects=$lt_postdep_objects_F77
predeps=$lt_predeps_F77
postdeps=$lt_postdeps_F77
# The library search path used internally by the compiler when linking
# a shared library.
compiler_lib_search_path=$lt_compiler_lib_search_path_F77
# ### END LIBTOOL TAG CONFIG: F77
_LT_EOF
;;
esac
done # for ac_tag
as_fn_exit 0