diff -Nru liblognorm-0.3.7/aclocal.m4 liblognorm-1.0.1/aclocal.m4 --- liblognorm-0.3.7/aclocal.m4 2013-07-18 07:37:53.000000000 +0000 +++ liblognorm-1.0.1/aclocal.m4 2014-04-11 04:30:14.000000000 +0000 @@ -1,8 +1,7 @@ -# generated automatically by aclocal 1.11.3 -*- Autoconf -*- +# generated automatically by aclocal 1.13.4 -*- Autoconf -*- + +# Copyright (C) 1996-2013 Free Software Foundation, Inc. -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, -# 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, -# Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -12,13 +11,14 @@ # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. +m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl -m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.68],, -[m4_warning([this file was generated for autoconf 2.68. +m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, +[m4_warning([this file was generated for autoconf 2.69. You have another version of autoconf. It may work, but is not guaranteed to. If you have problems, you may need to regenerate the build system entirely. -To do so, use the procedure documented by the package, typically `autoreconf'.])]) +To do so, use the procedure documented by the package, typically 'autoreconf'.])]) # pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- # serial 1 (pkg-config-0.24) @@ -180,25 +180,62 @@ fi[]dnl ])# PKG_CHECK_MODULES -# Copyright (C) 2002, 2003, 2005, 2006, 2007, 2008, 2011 Free Software -# Foundation, Inc. + +# PKG_INSTALLDIR(DIRECTORY) +# ------------------------- +# Substitutes the variable pkgconfigdir as the location where a module +# should install pkg-config .pc files. By default the directory is +# $libdir/pkgconfig, but the default can be changed by passing +# DIRECTORY. The user can override through the --with-pkgconfigdir +# parameter. +AC_DEFUN([PKG_INSTALLDIR], +[m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])]) +m4_pushdef([pkg_description], + [pkg-config installation directory @<:@]pkg_default[@:>@]) +AC_ARG_WITH([pkgconfigdir], + [AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],, + [with_pkgconfigdir=]pkg_default) +AC_SUBST([pkgconfigdir], [$with_pkgconfigdir]) +m4_popdef([pkg_default]) +m4_popdef([pkg_description]) +]) dnl PKG_INSTALLDIR + + +# PKG_NOARCH_INSTALLDIR(DIRECTORY) +# ------------------------- +# Substitutes the variable noarch_pkgconfigdir as the location where a +# module should install arch-independent pkg-config .pc files. By +# default the directory is $datadir/pkgconfig, but the default can be +# changed by passing DIRECTORY. The user can override through the +# --with-noarch-pkgconfigdir parameter. +AC_DEFUN([PKG_NOARCH_INSTALLDIR], +[m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])]) +m4_pushdef([pkg_description], + [pkg-config arch-independent installation directory @<:@]pkg_default[@:>@]) +AC_ARG_WITH([noarch-pkgconfigdir], + [AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],, + [with_noarch_pkgconfigdir=]pkg_default) +AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir]) +m4_popdef([pkg_default]) +m4_popdef([pkg_description]) +]) dnl PKG_NOARCH_INSTALLDIR + +# Copyright (C) 2002-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 1 - # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], -[am__api_version='1.11' +[am__api_version='1.13' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. -m4_if([$1], [1.11.3], [], +m4_if([$1], [1.13.4], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) @@ -214,24 +251,22 @@ # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], -[AM_AUTOMAKE_VERSION([1.11.3])dnl +[AM_AUTOMAKE_VERSION([1.13.4])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- -# Copyright (C) 2001, 2003, 2005, 2011 Free Software Foundation, Inc. +# Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 1 - # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets -# $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to -# `$srcdir', `$srcdir/..', or `$srcdir/../..'. +# $ac_aux_dir to '$srcdir/foo'. In other projects, it is set to +# '$srcdir', '$srcdir/..', or '$srcdir/../..'. # # Of course, Automake must honor this variable whenever it calls a # tool from the auxiliary directory. The problem is that $srcdir (and @@ -250,7 +285,7 @@ # # The reason of the latter failure is that $top_srcdir and $ac_aux_dir # are both prefixed by $srcdir. In an in-source build this is usually -# harmless because $srcdir is `.', but things will broke when you +# harmless because $srcdir is '.', but things will broke when you # start a VPATH build or use an absolute $srcdir. # # So we could use something similar to $top_srcdir/$ac_aux_dir/missing, @@ -276,22 +311,19 @@ # AM_CONDITIONAL -*- Autoconf -*- -# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006, 2008 -# Free Software Foundation, Inc. +# Copyright (C) 1997-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 9 - # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- # Define a conditional. AC_DEFUN([AM_CONDITIONAL], -[AC_PREREQ(2.52)dnl - ifelse([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], - [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl +[AC_PREREQ([2.52])dnl + m4_if([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], + [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl AC_SUBST([$1_TRUE])dnl AC_SUBST([$1_FALSE])dnl _AM_SUBST_NOTMAKE([$1_TRUE])dnl @@ -310,16 +342,14 @@ Usually this means the macro was only invoked conditionally.]]) fi])]) -# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, -# 2010, 2011 Free Software Foundation, Inc. +# Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 12 -# There are a few dirty hacks below to avoid letting `AC_PROG_CC' be +# There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be # written in clear, in which case automake, when reading aclocal.m4, # will think it sees a *use*, and therefore will trigger all it's # C support machinery. Also note that it means that autoscan, seeing @@ -329,7 +359,7 @@ # _AM_DEPENDENCIES(NAME) # ---------------------- # See how the compiler implements dependency checking. -# NAME is "CC", "CXX", "GCJ", or "OBJC". +# NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC". # We try a few techniques and use that to set a single cache variable. # # We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was @@ -342,12 +372,13 @@ AC_REQUIRE([AM_MAKE_INCLUDE])dnl AC_REQUIRE([AM_DEP_TRACK])dnl -ifelse([$1], CC, [depcc="$CC" am_compiler_list=], - [$1], CXX, [depcc="$CXX" am_compiler_list=], - [$1], OBJC, [depcc="$OBJC" am_compiler_list='gcc3 gcc'], - [$1], UPC, [depcc="$UPC" am_compiler_list=], - [$1], GCJ, [depcc="$GCJ" am_compiler_list='gcc3 gcc'], - [depcc="$$1" am_compiler_list=]) +m4_if([$1], [CC], [depcc="$CC" am_compiler_list=], + [$1], [CXX], [depcc="$CXX" am_compiler_list=], + [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'], + [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'], + [$1], [UPC], [depcc="$UPC" am_compiler_list=], + [$1], [GCJ], [depcc="$GCJ" am_compiler_list='gcc3 gcc'], + [depcc="$$1" am_compiler_list=]) AC_CACHE_CHECK([dependency style of $depcc], [am_cv_$1_dependencies_compiler_type], @@ -355,8 +386,8 @@ # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up - # making a dummy file named `D' -- because `-MD' means `put the output - # in D'. + # making a dummy file named 'D' -- because '-MD' means "put the output + # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're @@ -396,16 +427,16 @@ : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c - # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with - # Solaris 8's {/usr,}/bin/sh. - touch sub/conftst$i.h + # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with + # Solaris 10 /bin/sh. + echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf - # We check with `-c' and `-o' for the sake of the "dashmstdout" + # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly - # handle `-M -o', and we need to detect this. Also, some Intel - # versions had trouble with output in subdirs + # handle '-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in @@ -414,8 +445,8 @@ test "$am__universal" = false || continue ;; nosideeffect) - # after this tag, mechanisms are not by side-effect, so they'll - # only be used when explicitly requested + # After this tag, mechanisms are not by side-effect, so they'll + # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else @@ -423,7 +454,7 @@ fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) - # This compiler won't grok `-c -o', but also, the minuso test has + # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} @@ -471,7 +502,7 @@ # AM_SET_DEPDIR # ------------- # Choose a directory name for dependency files. -# This macro is AC_REQUIREd in _AM_DEPENDENCIES +# This macro is AC_REQUIREd in _AM_DEPENDENCIES. AC_DEFUN([AM_SET_DEPDIR], [AC_REQUIRE([AM_SET_LEADING_DOT])dnl AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl @@ -481,9 +512,13 @@ # AM_DEP_TRACK # ------------ AC_DEFUN([AM_DEP_TRACK], -[AC_ARG_ENABLE(dependency-tracking, -[ --disable-dependency-tracking speeds up one-time build - --enable-dependency-tracking do not reject slow dependency extractors]) +[AC_ARG_ENABLE([dependency-tracking], [dnl +AS_HELP_STRING( + [--enable-dependency-tracking], + [do not reject slow dependency extractors]) +AS_HELP_STRING( + [--disable-dependency-tracking], + [speeds up one-time build])]) if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' @@ -498,20 +533,18 @@ # Generate code to set up dependency tracking. -*- Autoconf -*- -# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2008 -# Free Software Foundation, Inc. +# Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -#serial 5 # _AM_OUTPUT_DEPENDENCY_COMMANDS # ------------------------------ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], [{ - # Autoconf 2.62 quotes --file arguments for eval, but not when files + # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in @@ -524,7 +557,7 @@ # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. - # We used to match only the files named `Makefile.in', but + # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. @@ -536,21 +569,19 @@ continue fi # Extract the definition of DEPDIR, am__include, and am__quote - # from the Makefile without running `make'. + # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` - test -z "am__include" && continue + test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` - # When using ansi2knr, U may be empty or an underscore; expand it - U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ - sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do + sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`AS_DIRNAME(["$file"])` @@ -568,7 +599,7 @@ # This macro should only be invoked once -- use via AC_REQUIRE. # # This code is only required when automatic dependency tracking -# is enabled. FIXME. This creates each `.P' file that we will +# is enabled. FIXME. This creates each '.P' file that we will # need in order to bootstrap the dependency handling code. AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], [AC_CONFIG_COMMANDS([depfiles], @@ -578,15 +609,12 @@ # Do all the work for Automake. -*- Autoconf -*- -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, -# 2005, 2006, 2008, 2009 Free Software Foundation, Inc. +# Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 16 - # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. @@ -602,7 +630,7 @@ # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], -[AC_PREREQ([2.62])dnl +[AC_PREREQ([2.65])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl @@ -631,31 +659,40 @@ # Define the identity of the package. dnl Distinguish between old-style and new-style calls. m4_ifval([$2], -[m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl +[AC_DIAGNOSE([obsolete], + [$0: two- and three-arguments forms are deprecated.]) +m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl AC_SUBST([PACKAGE], [$1])dnl AC_SUBST([VERSION], [$2])], [_AM_SET_OPTIONS([$1])dnl dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. -m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,, +m4_if( + m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]), + [ok:ok],, [m4_fatal([AC_INIT should be called with package and version arguments])])dnl AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl _AM_IF_OPTION([no-define],, -[AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package]) - AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl +[AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package]) + AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl # Some tools Automake needs. AC_REQUIRE([AM_SANITY_CHECK])dnl AC_REQUIRE([AC_ARG_PROGRAM])dnl -AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version}) -AM_MISSING_PROG(AUTOCONF, autoconf) -AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version}) -AM_MISSING_PROG(AUTOHEADER, autoheader) -AM_MISSING_PROG(MAKEINFO, makeinfo) +AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}]) +AM_MISSING_PROG([AUTOCONF], [autoconf]) +AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}]) +AM_MISSING_PROG([AUTOHEADER], [autoheader]) +AM_MISSING_PROG([MAKEINFO], [makeinfo]) AC_REQUIRE([AM_PROG_INSTALL_SH])dnl AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl -AC_REQUIRE([AM_PROG_MKDIR_P])dnl +AC_REQUIRE([AC_PROG_MKDIR_P])dnl +# For better backward compatibility. To be removed once Automake 1.9.x +# dies out for good. For more background, see: +# +# +AC_SUBST([mkdir_p], ['$(MKDIR_P)']) # We need awk for the "check" target. The system "awk" is bad on # some platforms. AC_REQUIRE([AC_PROG_AWK])dnl @@ -666,28 +703,32 @@ [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], - [_AM_DEPENDENCIES(CC)], - [define([AC_PROG_CC], - defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl + [_AM_DEPENDENCIES([CC])], + [m4_define([AC_PROG_CC], + m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], - [_AM_DEPENDENCIES(CXX)], - [define([AC_PROG_CXX], - defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl + [_AM_DEPENDENCIES([CXX])], + [m4_define([AC_PROG_CXX], + m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJC], - [_AM_DEPENDENCIES(OBJC)], - [define([AC_PROG_OBJC], - defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl + [_AM_DEPENDENCIES([OBJC])], + [m4_define([AC_PROG_OBJC], + m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl +AC_PROVIDE_IFELSE([AC_PROG_OBJCXX], + [_AM_DEPENDENCIES([OBJCXX])], + [m4_define([AC_PROG_OBJCXX], + m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl ]) -_AM_IF_OPTION([silent-rules], [AC_REQUIRE([AM_SILENT_RULES])])dnl -dnl The `parallel-tests' driver may need to know about EXEEXT, so add the -dnl `am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This macro -dnl is hooked onto _AC_COMPILER_EXEEXT early, see below. +AC_REQUIRE([AM_SILENT_RULES])dnl +dnl The testsuite driver may need to know about EXEEXT, so add the +dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This +dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl ]) -dnl Hook into `_AC_COMPILER_EXEEXT' early to learn its expansion. Do not +dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], @@ -715,15 +756,12 @@ done echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) -# Copyright (C) 2001, 2003, 2005, 2008, 2011 Free Software Foundation, -# Inc. +# Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 1 - # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. @@ -737,16 +775,14 @@ install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi -AC_SUBST(install_sh)]) +AC_SUBST([install_sh])]) -# Copyright (C) 2003, 2005 Free Software Foundation, Inc. +# Copyright (C) 2003-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 2 - # Check whether the underlying file-system supports filenames # with a leading dot. For instance MS-DOS doesn't. AC_DEFUN([AM_SET_LEADING_DOT], @@ -762,14 +798,12 @@ # Check to see how 'make' treats includes. -*- Autoconf -*- -# Copyright (C) 2001, 2002, 2003, 2005, 2009 Free Software Foundation, Inc. +# Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 4 - # AM_MAKE_INCLUDE() # ----------------- # Check to see how make treats includes. @@ -787,7 +821,7 @@ _am_result=none # First try GNU make style include. echo "include confinc" > confmf -# Ignore all kinds of additional output from `make'. +# Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include @@ -812,15 +846,12 @@ rm -f confinc confmf ]) -# Copyright (C) 1999, 2000, 2001, 2003, 2004, 2005, 2008 -# Free Software Foundation, Inc. +# Copyright (C) 1999-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 6 - # AM_PROG_CC_C_O # -------------- # Like AC_PROG_CC_C_O, but changed for automake. @@ -849,15 +880,12 @@ # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- -# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005, 2008 -# Free Software Foundation, Inc. +# Copyright (C) 1997-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 6 - # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ AC_DEFUN([AM_MISSING_PROG], @@ -865,11 +893,10 @@ $1=${$1-"${am_missing_run}$2"} AC_SUBST($1)]) - # AM_MISSING_HAS_RUN # ------------------ -# Define MISSING if not defined so far and test if it supports --run. -# If it does, set am_missing_run to use it, otherwise, to nothing. +# Define MISSING if not defined so far and test if it is modern enough. +# If it is, set am_missing_run to use it, otherwise, to nothing. AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([missing])dnl @@ -882,54 +909,22 @@ esac fi # Use eval to expand $SHELL -if eval "$MISSING --run true"; then - am_missing_run="$MISSING --run " +if eval "$MISSING --is-lightweight"; then + am_missing_run="$MISSING " else am_missing_run= - AC_MSG_WARN([`missing' script is too old or missing]) + AC_MSG_WARN(['missing' script is too old or missing]) fi ]) -# Copyright (C) 2003, 2004, 2005, 2006, 2011 Free Software Foundation, -# Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 1 - -# AM_PROG_MKDIR_P -# --------------- -# Check for `mkdir -p'. -AC_DEFUN([AM_PROG_MKDIR_P], -[AC_PREREQ([2.60])dnl -AC_REQUIRE([AC_PROG_MKDIR_P])dnl -dnl Automake 1.8 to 1.9.6 used to define mkdir_p. We now use MKDIR_P, -dnl while keeping a definition of mkdir_p for backward compatibility. -dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile. -dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of -dnl Makefile.ins that do not define MKDIR_P, so we do our own -dnl adjustment using top_builddir (which is defined more often than -dnl MKDIR_P). -AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl -case $mkdir_p in - [[\\/$]]* | ?:[[\\/]]*) ;; - */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; -esac -]) - # Helper functions for option handling. -*- Autoconf -*- -# Copyright (C) 2001, 2002, 2003, 2005, 2008, 2010 Free Software -# Foundation, Inc. +# Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 5 - # _AM_MANGLE_OPTION(NAME) # ----------------------- AC_DEFUN([_AM_MANGLE_OPTION], @@ -939,7 +934,7 @@ # -------------------- # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], -[m4_define(_AM_MANGLE_OPTION([$1]), 1)]) +[m4_define(_AM_MANGLE_OPTION([$1]), [1])]) # _AM_SET_OPTIONS(OPTIONS) # ------------------------ @@ -955,22 +950,16 @@ # Check to make sure that the build environment is sane. -*- Autoconf -*- -# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005, 2008 -# Free Software Foundation, Inc. +# Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 5 - # AM_SANITY_CHECK # --------------- AC_DEFUN([AM_SANITY_CHECK], [AC_MSG_CHECKING([whether build environment is sane]) -# Just in case -sleep 1 -echo timestamp > conftest.file # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' @@ -981,32 +970,40 @@ esac case $srcdir in *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) - AC_MSG_ERROR([unsafe srcdir value: `$srcdir']);; + AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);; esac -# Do `set' in a subshell so we don't clobber the current shell's +# Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( - set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` - if test "$[*]" = "X"; then - # -L didn't work. - set X `ls -t "$srcdir/configure" conftest.file` - fi - rm -f conftest.file - if test "$[*]" != "X $srcdir/configure conftest.file" \ - && test "$[*]" != "X conftest.file $srcdir/configure"; then - - # If neither matched, then we have a broken ls. This can happen - # if, for instance, CONFIG_SHELL is bash and it inherits a - # broken ls alias from the environment. This has actually - # happened. Such a system could not be considered "sane". - AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken -alias in your environment]) - fi - + am_has_slept=no + for am_try in 1 2; do + echo "timestamp, slept: $am_has_slept" > conftest.file + set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` + if test "$[*]" = "X"; then + # -L didn't work. + set X `ls -t "$srcdir/configure" conftest.file` + fi + if test "$[*]" != "X $srcdir/configure conftest.file" \ + && test "$[*]" != "X conftest.file $srcdir/configure"; then + + # If neither matched, then we have a broken ls. This can happen + # if, for instance, CONFIG_SHELL is bash and it inherits a + # broken ls alias from the environment. This has actually + # happened. Such a system could not be considered "sane". + AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken + alias in your environment]) + fi + if test "$[2]" = conftest.file || test $am_try -eq 2; then + break + fi + # Just in case. + sleep 1 + am_has_slept=yes + done test "$[2]" = conftest.file ) then @@ -1016,31 +1013,50 @@ AC_MSG_ERROR([newly created file is older than distributed files! Check your system clock]) fi -AC_MSG_RESULT(yes)]) +AC_MSG_RESULT([yes]) +# If we didn't sleep, we still need to ensure time stamps of config.status and +# generated files are strictly newer. +am_sleep_pid= +if grep 'slept: no' conftest.file >/dev/null 2>&1; then + ( sleep 1 ) & + am_sleep_pid=$! +fi +AC_CONFIG_COMMANDS_PRE( + [AC_MSG_CHECKING([that generated files are newer than configure]) + if test -n "$am_sleep_pid"; then + # Hide warnings about reused PIDs. + wait $am_sleep_pid 2>/dev/null + fi + AC_MSG_RESULT([done])]) +rm -f conftest.file +]) -# Copyright (C) 2009, 2011 Free Software Foundation, Inc. +# Copyright (C) 2009-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 2 - # AM_SILENT_RULES([DEFAULT]) # -------------------------- # Enable less verbose build rules; with the default set to DEFAULT -# (`yes' being less verbose, `no' or empty being verbose). +# ("yes" being less verbose, "no" or empty being verbose). AC_DEFUN([AM_SILENT_RULES], -[AC_ARG_ENABLE([silent-rules], -[ --enable-silent-rules less verbose build output (undo: `make V=1') - --disable-silent-rules verbose build output (undo: `make V=0')]) -case $enable_silent_rules in -yes) AM_DEFAULT_VERBOSITY=0;; -no) AM_DEFAULT_VERBOSITY=1;; -*) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; +[AC_ARG_ENABLE([silent-rules], [dnl +AS_HELP_STRING( + [--enable-silent-rules], + [less verbose build output (undo: "make V=1")]) +AS_HELP_STRING( + [--disable-silent-rules], + [verbose build output (undo: "make V=0")])dnl +]) +case $enable_silent_rules in @%:@ ((( + yes) AM_DEFAULT_VERBOSITY=0;; + no) AM_DEFAULT_VERBOSITY=1;; + *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; esac dnl -dnl A few `make' implementations (e.g., NonStop OS and NextStep) +dnl A few 'make' implementations (e.g., NonStop OS and NextStep) dnl do not support nested variable expansions. dnl See automake bug#9928 and bug#10237. am_make=${MAKE-make} @@ -1058,7 +1074,7 @@ am_cv_make_support_nested_variables=no fi]) if test $am_cv_make_support_nested_variables = yes; then - dnl Using `$V' instead of `$(V)' breaks IRIX make. + dnl Using '$V' instead of '$(V)' breaks IRIX make. AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else @@ -1075,44 +1091,40 @@ _AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl ]) -# Copyright (C) 2001, 2003, 2005, 2011 Free Software Foundation, Inc. +# Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 1 - # AM_PROG_INSTALL_STRIP # --------------------- -# One issue with vendor `install' (even GNU) is that you can't +# One issue with vendor 'install' (even GNU) is that you can't # specify the program used to strip binaries. This is especially # annoying in cross-compiling environments, where the build's strip # is unlikely to handle the host's binaries. # Fortunately install-sh will honor a STRIPPROG variable, so we -# always use install-sh in `make install-strip', and initialize +# always use install-sh in "make install-strip", and initialize # STRIPPROG with the value of the STRIP variable (set by the user). AC_DEFUN([AM_PROG_INSTALL_STRIP], [AC_REQUIRE([AM_PROG_INSTALL_SH])dnl -# Installed binaries are usually stripped using `strip' when the user -# run `make install-strip'. However `strip' might not be the right +# Installed binaries are usually stripped using 'strip' when the user +# run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake -# will honor the `STRIP' environment variable to overrule this program. -dnl Don't test for $cross_compiling = yes, because it might be `maybe'. +# will honor the 'STRIP' environment variable to overrule this program. +dnl Don't test for $cross_compiling = yes, because it might be 'maybe'. if test "$cross_compiling" != no; then AC_CHECK_TOOL([STRIP], [strip], :) fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) -# Copyright (C) 2006, 2008, 2010 Free Software Foundation, Inc. +# Copyright (C) 2006-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 3 - # _AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- # Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. @@ -1126,18 +1138,16 @@ # Check how to create a tarball. -*- Autoconf -*- -# Copyright (C) 2004, 2005, 2012 Free Software Foundation, Inc. +# Copyright (C) 2004-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 2 - # _AM_PROG_TAR(FORMAT) # -------------------- # Check how to create a tarball in format FORMAT. -# FORMAT should be one of `v7', `ustar', or `pax'. +# FORMAT should be one of 'v7', 'ustar', or 'pax'. # # Substitute a variable $(am__tar) that is a command # writing to stdout a FORMAT-tarball containing the directory @@ -1147,76 +1157,114 @@ # Substitute a variable $(am__untar) that extract such # a tarball read from stdin. # $(am__untar) < result.tar +# AC_DEFUN([_AM_PROG_TAR], [# Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AC_SUBST([AMTAR], ['$${TAR-tar}']) -m4_if([$1], [v7], - [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], - [m4_case([$1], [ustar],, [pax],, - [m4_fatal([Unknown tar format])]) -AC_MSG_CHECKING([how to create a $1 tar archive]) -# Loop over all known methods to create a tar archive until one works. + +# We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' -_am_tools=${am_cv_prog_tar_$1-$_am_tools} -# Do not fold the above two line into one, because Tru64 sh and -# Solaris sh will not grok spaces in the rhs of `-'. -for _am_tool in $_am_tools -do - case $_am_tool in - gnutar) - for _am_tar in tar gnutar gtar; - do - AM_RUN_LOG([$_am_tar --version]) && break - done - am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' - am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' - am__untar="$_am_tar -xf -" - ;; - plaintar) - # Must skip GNU tar: if it does not support --format= it doesn't create - # ustar tarball either. - (tar --version) >/dev/null 2>&1 && continue - am__tar='tar chf - "$$tardir"' - am__tar_='tar chf - "$tardir"' - am__untar='tar xf -' - ;; - pax) - am__tar='pax -L -x $1 -w "$$tardir"' - am__tar_='pax -L -x $1 -w "$tardir"' - am__untar='pax -r' - ;; - cpio) - am__tar='find "$$tardir" -print | cpio -o -H $1 -L' - am__tar_='find "$tardir" -print | cpio -o -H $1 -L' - am__untar='cpio -i -H $1 -d' - ;; - none) - am__tar=false - am__tar_=false - am__untar=false - ;; - esac - # If the value was cached, stop now. We just wanted to have am__tar - # and am__untar set. - test -n "${am_cv_prog_tar_$1}" && break +m4_if([$1], [v7], + [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], - # tar/untar a dummy directory, and stop if the command works - rm -rf conftest.dir - mkdir conftest.dir - echo GrepMe > conftest.dir/file - AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) + [m4_case([$1], + [ustar], + [# The POSIX 1988 'ustar' format is defined with fixed-size fields. + # There is notably a 21 bits limit for the UID and the GID. In fact, + # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 + # and bug#13588). + am_max_uid=2097151 # 2^21 - 1 + am_max_gid=$am_max_uid + # The $UID and $GID variables are not portable, so we need to resort + # to the POSIX-mandated id(1) utility. Errors in the 'id' calls + # below are definitely unexpected, so allow the users to see them + # (that is, avoid stderr redirection). + am_uid=`id -u || echo unknown` + am_gid=`id -g || echo unknown` + AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format]) + if test $am_uid -le $am_max_uid; then + AC_MSG_RESULT([yes]) + else + AC_MSG_RESULT([no]) + _am_tools=none + fi + AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format]) + if test $am_gid -le $am_max_gid; then + AC_MSG_RESULT([yes]) + else + AC_MSG_RESULT([no]) + _am_tools=none + fi], + + [pax], + [], + + [m4_fatal([Unknown tar format])]) + + AC_MSG_CHECKING([how to create a $1 tar archive]) + + # Go ahead even if we have the value already cached. We do so because we + # need to set the values for the 'am__tar' and 'am__untar' variables. + _am_tools=${am_cv_prog_tar_$1-$_am_tools} + + for _am_tool in $_am_tools; do + case $_am_tool in + gnutar) + for _am_tar in tar gnutar gtar; do + AM_RUN_LOG([$_am_tar --version]) && break + done + am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' + am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' + am__untar="$_am_tar -xf -" + ;; + plaintar) + # Must skip GNU tar: if it does not support --format= it doesn't create + # ustar tarball either. + (tar --version) >/dev/null 2>&1 && continue + am__tar='tar chf - "$$tardir"' + am__tar_='tar chf - "$tardir"' + am__untar='tar xf -' + ;; + pax) + am__tar='pax -L -x $1 -w "$$tardir"' + am__tar_='pax -L -x $1 -w "$tardir"' + am__untar='pax -r' + ;; + cpio) + am__tar='find "$$tardir" -print | cpio -o -H $1 -L' + am__tar_='find "$tardir" -print | cpio -o -H $1 -L' + am__untar='cpio -i -H $1 -d' + ;; + none) + am__tar=false + am__tar_=false + am__untar=false + ;; + esac + + # If the value was cached, stop now. We just wanted to have am__tar + # and am__untar set. + test -n "${am_cv_prog_tar_$1}" && break + + # tar/untar a dummy directory, and stop if the command works. + rm -rf conftest.dir + mkdir conftest.dir + echo GrepMe > conftest.dir/file + AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) + rm -rf conftest.dir + if test -s conftest.tar; then + AM_RUN_LOG([$am__untar /dev/null 2>&1 && break + fi + done rm -rf conftest.dir - if test -s conftest.tar; then - AM_RUN_LOG([$am__untar /dev/null 2>&1 && break - fi -done -rm -rf conftest.dir -AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) -AC_MSG_RESULT([$am_cv_prog_tar_$1])]) + AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) + AC_MSG_RESULT([$am_cv_prog_tar_$1])]) + AC_SUBST([am__tar]) AC_SUBST([am__untar]) ]) # _AM_PROG_TAR diff -Nru liblognorm-0.3.7/ChangeLog liblognorm-1.0.1/ChangeLog --- liblognorm-0.3.7/ChangeLog 2013-07-18 07:37:32.000000000 +0000 +++ liblognorm-1.0.1/ChangeLog 2014-04-11 04:29:46.000000000 +0000 @@ -1,4 +1,33 @@ ---------------------------------------------------------------------- +Version 1.0.1, 2014-04-11 +- improved doc (via RST/Sphinx) +- bugfix: unparsed fields were copied incorrectly from non-terminated + string. Thanks to Josh Blum for the fix. +- bugfix: mandatory tag did not work in lognormalizer +---------------------------------------------------------------------- +Version 1.0.0, 2013-11-28 +- WARNING: this version has incompatible interface and older programs + will not compile with it. + For details see http://www.liblognorm.com/news/on-liblognorm-1-0-0/ +- libestr is not used any more in interface functions. Traditional + C strings are used instead. Internally, libestr is still used, but + scheduled for removal. +- libee is not used any more. JSON-C is used for object handling + instead. Parsers and formatters are now part of liblognorm. +- added new field type "rest", which simply sinks all up to end of + the string. +- added support for glueing two fields together, without literal + between them. It allows for constructs like: + %volume:number%%unit:word% + which matches string "1000Kbps" +- Fix incorrect merging of trees with empty literal at end + Thanks to Pavel Levshin for the patch +- this version has survived many bugfixes +---------------------------------------------------------------------- +================================================================================ +The versions below is liblognorm0, which has a different API +================================================================================ +---------------------------------------------------------------------- Version 0.3.7, 2013-07-17 - added support to load single samples Thanks to John Hopper for the patch diff -Nru liblognorm-0.3.7/compile liblognorm-1.0.1/compile --- liblognorm-0.3.7/compile 2012-04-04 09:16:38.000000000 +0000 +++ liblognorm-1.0.1/compile 2014-04-11 04:30:18.000000000 +0000 @@ -1,10 +1,9 @@ #! /bin/sh # Wrapper for compilers which do not understand '-c -o'. -scriptversion=2012-01-04.17; # UTC +scriptversion=2012-10-14.11; # UTC -# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2009, 2010, 2012 Free -# Software Foundation, Inc. +# Copyright (C) 1999-2013 Free Software Foundation, Inc. # Written by Tom Tromey . # # This program is free software; you can redistribute it and/or modify @@ -79,6 +78,53 @@ esac } +# func_cl_dashL linkdir +# Make cl look for libraries in LINKDIR +func_cl_dashL () +{ + func_file_conv "$1" + if test -z "$lib_path"; then + lib_path=$file + else + lib_path="$lib_path;$file" + fi + linker_opts="$linker_opts -LIBPATH:$file" +} + +# func_cl_dashl library +# Do a library search-path lookup for cl +func_cl_dashl () +{ + lib=$1 + found=no + save_IFS=$IFS + IFS=';' + for dir in $lib_path $LIB + do + IFS=$save_IFS + if $shared && test -f "$dir/$lib.dll.lib"; then + found=yes + lib=$dir/$lib.dll.lib + break + fi + if test -f "$dir/$lib.lib"; then + found=yes + lib=$dir/$lib.lib + break + fi + if test -f "$dir/lib$lib.a"; then + found=yes + lib=$dir/lib$lib.a + break + fi + done + IFS=$save_IFS + + if test "$found" != yes; then + lib=$lib.lib + fi +} + # func_cl_wrapper cl arg... # Adjust compile command to suit cl func_cl_wrapper () @@ -109,43 +155,34 @@ ;; esac ;; + -I) + eat=1 + func_file_conv "$2" mingw + set x "$@" -I"$file" + shift + ;; -I*) func_file_conv "${1#-I}" mingw set x "$@" -I"$file" shift ;; + -l) + eat=1 + func_cl_dashl "$2" + set x "$@" "$lib" + shift + ;; -l*) - lib=${1#-l} - found=no - save_IFS=$IFS - IFS=';' - for dir in $lib_path $LIB - do - IFS=$save_IFS - if $shared && test -f "$dir/$lib.dll.lib"; then - found=yes - set x "$@" "$dir/$lib.dll.lib" - break - fi - if test -f "$dir/$lib.lib"; then - found=yes - set x "$@" "$dir/$lib.lib" - break - fi - done - IFS=$save_IFS - - test "$found" != yes && set x "$@" "$lib.lib" + func_cl_dashl "${1#-l}" + set x "$@" "$lib" shift ;; + -L) + eat=1 + func_cl_dashL "$2" + ;; -L*) - func_file_conv "${1#-L}" - if test -z "$lib_path"; then - lib_path=$file - else - lib_path="$lib_path;$file" - fi - linker_opts="$linker_opts -LIBPATH:$file" + func_cl_dashL "${1#-L}" ;; -static) shared=false diff -Nru liblognorm-0.3.7/config.guess liblognorm-1.0.1/config.guess --- liblognorm-0.3.7/config.guess 2012-04-04 09:16:38.000000000 +0000 +++ liblognorm-1.0.1/config.guess 2014-04-11 04:30:18.000000000 +0000 @@ -1,14 +1,12 @@ #! /bin/sh # Attempt to guess a canonical system name. -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, -# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -# 2011, 2012 Free Software Foundation, Inc. +# Copyright 1992-2013 Free Software Foundation, Inc. -timestamp='2012-02-10' +timestamp='2013-06-10' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or +# the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but @@ -22,19 +20,17 @@ # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - - -# Originally written by Per Bothner. Please send patches (context -# diff format) to and include a ChangeLog -# entry. +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). # -# This script attempts to guess a canonical system name similar to -# config.sub. If it succeeds, it prints the system name on stdout, and -# exits with 0. Otherwise, it exits with 1. +# Originally written by Per Bothner. # # You can get the latest version of this script from: # http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD +# +# Please send patches with a ChangeLog entry to config-patches@gnu.org. + me=`echo "$0" | sed -e 's,.*/,,'` @@ -54,9 +50,7 @@ GNU config.guess ($timestamp) Originally written by Per Bothner. -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, -2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 -Free Software Foundation, Inc. +Copyright 1992-2013 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." @@ -138,6 +132,27 @@ UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown +case "${UNAME_SYSTEM}" in +Linux|GNU|GNU/*) + # If the system lacks a compiler, then just pick glibc. + # We could probably try harder. + LIBC=gnu + + eval $set_cc_for_build + cat <<-EOF > $dummy.c + #include + #if defined(__UCLIBC__) + LIBC=uclibc + #elif defined(__dietlibc__) + LIBC=dietlibc + #else + LIBC=gnu + #endif + EOF + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'` + ;; +esac + # Note: order is significant - the case branches are not exclusive. case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in @@ -200,6 +215,10 @@ # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. echo "${machine}-${os}${release}" exit ;; + *:Bitrig:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` + echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE} + exit ;; *:OpenBSD:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} @@ -302,7 +321,7 @@ arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) echo arm-acorn-riscix${UNAME_RELEASE} exit ;; - arm:riscos:*:*|arm:RISCOS:*:*) + arm*:riscos:*:*|arm*:RISCOS:*:*) echo arm-unknown-riscos exit ;; SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) @@ -801,6 +820,9 @@ i*:CYGWIN*:*) echo ${UNAME_MACHINE}-pc-cygwin exit ;; + *:MINGW64*:*) + echo ${UNAME_MACHINE}-pc-mingw64 + exit ;; *:MINGW*:*) echo ${UNAME_MACHINE}-pc-mingw32 exit ;; @@ -852,21 +874,21 @@ exit ;; *:GNU:*:*) # the GNU system - echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` + echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` exit ;; *:GNU/*:*:*) # other systems with GNU libc and userland - echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu + echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC} exit ;; i*86:Minix:*:*) echo ${UNAME_MACHINE}-pc-minix exit ;; aarch64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; aarch64_be:Linux:*:*) UNAME_MACHINE=aarch64_be - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; alpha:Linux:*:*) case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in @@ -879,59 +901,54 @@ EV68*) UNAME_MACHINE=alphaev68 ;; esac objdump --private-headers /bin/sh | grep -q ld.so.1 - if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi - echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} + if test "$?" = 0 ; then LIBC="gnulibc1" ; fi + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + arc:Linux:*:* | arceb:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; arm*:Linux:*:*) eval $set_cc_for_build if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_EABI__ then - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} else if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_PCS_VFP then - echo ${UNAME_MACHINE}-unknown-linux-gnueabi + echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi else - echo ${UNAME_MACHINE}-unknown-linux-gnueabihf + echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf fi fi exit ;; avr32*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; cris:Linux:*:*) - echo ${UNAME_MACHINE}-axis-linux-gnu + echo ${UNAME_MACHINE}-axis-linux-${LIBC} exit ;; crisv32:Linux:*:*) - echo ${UNAME_MACHINE}-axis-linux-gnu + echo ${UNAME_MACHINE}-axis-linux-${LIBC} exit ;; frv:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; hexagon:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; i*86:Linux:*:*) - LIBC=gnu - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #ifdef __dietlibc__ - LIBC=dietlibc - #endif -EOF - eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'` - echo "${UNAME_MACHINE}-pc-linux-${LIBC}" + echo ${UNAME_MACHINE}-pc-linux-${LIBC} exit ;; ia64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; m32r*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; m68*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; mips:Linux:*:* | mips64:Linux:*:*) eval $set_cc_for_build @@ -950,54 +967,63 @@ #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` - test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } + test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; } ;; + or1k:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; or32:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; padre:Linux:*:*) - echo sparc-unknown-linux-gnu + echo sparc-unknown-linux-${LIBC} exit ;; parisc64:Linux:*:* | hppa64:Linux:*:*) - echo hppa64-unknown-linux-gnu + echo hppa64-unknown-linux-${LIBC} exit ;; parisc:Linux:*:* | hppa:Linux:*:*) # Look for CPU level case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in - PA7*) echo hppa1.1-unknown-linux-gnu ;; - PA8*) echo hppa2.0-unknown-linux-gnu ;; - *) echo hppa-unknown-linux-gnu ;; + PA7*) echo hppa1.1-unknown-linux-${LIBC} ;; + PA8*) echo hppa2.0-unknown-linux-${LIBC} ;; + *) echo hppa-unknown-linux-${LIBC} ;; esac exit ;; ppc64:Linux:*:*) - echo powerpc64-unknown-linux-gnu + echo powerpc64-unknown-linux-${LIBC} exit ;; ppc:Linux:*:*) - echo powerpc-unknown-linux-gnu + echo powerpc-unknown-linux-${LIBC} + exit ;; + ppc64le:Linux:*:*) + echo powerpc64le-unknown-linux-${LIBC} + exit ;; + ppcle:Linux:*:*) + echo powerpcle-unknown-linux-${LIBC} exit ;; s390:Linux:*:* | s390x:Linux:*:*) - echo ${UNAME_MACHINE}-ibm-linux + echo ${UNAME_MACHINE}-ibm-linux-${LIBC} exit ;; sh64*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; sh*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; sparc:Linux:*:* | sparc64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; tile*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; vax:Linux:*:*) - echo ${UNAME_MACHINE}-dec-linux-gnu + echo ${UNAME_MACHINE}-dec-linux-${LIBC} exit ;; x86_64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; xtensa*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; i*86:DYNIX/ptx:4*:*) # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. @@ -1201,6 +1227,9 @@ BePC:Haiku:*:*) # Haiku running on Intel PC compatible. echo i586-pc-haiku exit ;; + x86_64:Haiku:*:*) + echo x86_64-unknown-haiku + exit ;; SX-4:SUPER-UX:*:*) echo sx4-nec-superux${UNAME_RELEASE} exit ;; @@ -1227,19 +1256,21 @@ exit ;; *:Darwin:*:*) UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown - case $UNAME_PROCESSOR in - i386) - eval $set_cc_for_build - if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then - if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - UNAME_PROCESSOR="x86_64" - fi - fi ;; - unknown) UNAME_PROCESSOR=powerpc ;; - esac + eval $set_cc_for_build + if test "$UNAME_PROCESSOR" = unknown ; then + UNAME_PROCESSOR=powerpc + fi + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + case $UNAME_PROCESSOR in + i386) UNAME_PROCESSOR=x86_64 ;; + powerpc) UNAME_PROCESSOR=powerpc64 ;; + esac + fi + fi echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} exit ;; *:procnto*:*:* | *:QNX:[0123456789]*:*) @@ -1256,7 +1287,7 @@ NEO-?:NONSTOP_KERNEL:*:*) echo neo-tandem-nsk${UNAME_RELEASE} exit ;; - NSE-?:NONSTOP_KERNEL:*:*) + NSE-*:NONSTOP_KERNEL:*:*) echo nse-tandem-nsk${UNAME_RELEASE} exit ;; NSR-?:NONSTOP_KERNEL:*:*) @@ -1330,9 +1361,6 @@ exit ;; esac -#echo '(No uname command or uname output not recognized.)' 1>&2 -#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2 - eval $set_cc_for_build cat >$dummy.c <. @@ -26,11 +20,12 @@ # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). -# Please send patches to . Submit a context -# diff and a properly formatted GNU ChangeLog entry. +# Please send patches with a ChangeLog entry to config-patches@gnu.org. # # Configuration subroutine to validate and canonicalize a configuration type. # Supply the specified configuration type as an argument. @@ -73,9 +68,7 @@ version="\ GNU config.sub ($timestamp) -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, -2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 -Free Software Foundation, Inc. +Copyright 1992-2013 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." @@ -123,7 +116,7 @@ maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` case $maybe_os in nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ - linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ + linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ knetbsd*-gnu* | netbsd*-gnu* | \ kopensolaris*-gnu* | \ storm-chaos* | os2-emx* | rtmk-nova*) @@ -156,7 +149,7 @@ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ - -apple | -axis | -knuth | -cray | -microblaze) + -apple | -axis | -knuth | -cray | -microblaze*) os= basic_machine=$1 ;; @@ -225,6 +218,12 @@ -isc*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; + -lynx*178) + os=-lynxos178 + ;; + -lynx*5) + os=-lynxos5 + ;; -lynx*) os=-lynxos ;; @@ -253,8 +252,10 @@ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ | am33_2.0 \ - | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \ - | be32 | be64 \ + | arc | arceb \ + | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ + | avr | avr32 \ + | be32 | be64 \ | bfin \ | c4x | clipper \ | d10v | d30v | dlx | dsp16xx \ @@ -267,7 +268,7 @@ | le32 | le64 \ | lm32 \ | m32c | m32r | m32rle | m68000 | m68k | m88k \ - | maxq | mb | microblaze | mcore | mep | metag \ + | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ | mips | mipsbe | mipseb | mipsel | mipsle \ | mips16 \ | mips64 | mips64el \ @@ -285,16 +286,17 @@ | mipsisa64r2 | mipsisa64r2el \ | mipsisa64sb1 | mipsisa64sb1el \ | mipsisa64sr71k | mipsisa64sr71kel \ + | mipsr5900 | mipsr5900el \ | mipstx39 | mipstx39el \ | mn10200 | mn10300 \ | moxie \ | mt \ | msp430 \ | nds32 | nds32le | nds32be \ - | nios | nios2 \ + | nios | nios2 | nios2eb | nios2el \ | ns16k | ns32k \ | open8 \ - | or32 \ + | or1k | or32 \ | pdp10 | pdp11 | pj | pjl \ | powerpc | powerpc64 | powerpc64le | powerpcle \ | pyramid \ @@ -364,7 +366,7 @@ | aarch64-* | aarch64_be-* \ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ - | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ + | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ | avr-* | avr32-* \ | be32-* | be64-* \ @@ -383,7 +385,8 @@ | lm32-* \ | m32c-* | m32r-* | m32rle-* \ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ - | m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \ + | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \ + | microblaze-* | microblazeel-* \ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ | mips16-* \ | mips64-* | mips64el-* \ @@ -401,12 +404,13 @@ | mipsisa64r2-* | mipsisa64r2el-* \ | mipsisa64sb1-* | mipsisa64sb1el-* \ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ + | mipsr5900-* | mipsr5900el-* \ | mipstx39-* | mipstx39el-* \ | mmix-* \ | mt-* \ | msp430-* \ | nds32-* | nds32le-* | nds32be-* \ - | nios-* | nios2-* \ + | nios-* | nios2-* | nios2eb-* | nios2el-* \ | none-* | np1-* | ns16k-* | ns32k-* \ | open8-* \ | orion-* \ @@ -782,9 +786,13 @@ basic_machine=ns32k-utek os=-sysv ;; - microblaze) + microblaze*) basic_machine=microblaze-xilinx ;; + mingw64) + basic_machine=x86_64-pc + os=-mingw64 + ;; mingw32) basic_machine=i386-pc os=-mingw32 @@ -998,7 +1006,7 @@ ;; ppc64) basic_machine=powerpc64-unknown ;; - ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` + ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64le | powerpc64little | ppc64-le | powerpc64-little) basic_machine=powerpc64le-unknown @@ -1013,7 +1021,11 @@ basic_machine=i586-unknown os=-pw32 ;; - rdos) + rdos | rdos64) + basic_machine=x86_64-pc + os=-rdos + ;; + rdos32) basic_machine=i386-pc os=-rdos ;; @@ -1340,21 +1352,21 @@ -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ - | -sym* | -kopensolaris* \ + | -sym* | -kopensolaris* | -plan9* \ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ | -aos* | -aros* \ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ - | -openbsd* | -solidbsd* \ + | -bitrig* | -openbsd* | -solidbsd* \ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ | -chorusos* | -chorusrdb* | -cegcc* \ | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ - | -mingw32* | -linux-gnu* | -linux-android* \ - | -linux-newlib* | -linux-uclibc* \ + | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ + | -linux-newlib* | -linux-musl* | -linux-uclibc* \ | -uxpv* | -beos* | -mpeix* | -udk* \ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ @@ -1486,9 +1498,6 @@ -aros*) os=-aros ;; - -kaos*) - os=-kaos - ;; -zvmoe) os=-zvmoe ;; @@ -1537,6 +1546,9 @@ c4x-* | tic4x-*) os=-coff ;; + hexagon-*) + os=-elf + ;; tic54x-*) os=-coff ;; @@ -1577,6 +1589,9 @@ mips*-*) os=-elf ;; + or1k-*) + os=-elf + ;; or32-*) os=-coff ;; diff -Nru liblognorm-0.3.7/configure liblognorm-1.0.1/configure --- liblognorm-0.3.7/configure 2013-07-18 07:37:57.000000000 +0000 +++ liblognorm-1.0.1/configure 2014-04-11 04:30:16.000000000 +0000 @@ -1,13 +1,11 @@ #! /bin/sh # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.68 for liblognorm 0.3.7. +# Generated by GNU Autoconf 2.69 for liblognorm 1.0.1. # # Report bugs to . # # -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, -# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software -# Foundation, Inc. +# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. # # # This configure script is free software; the Free Software Foundation @@ -136,6 +134,31 @@ # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +as_fn_exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh @@ -169,7 +192,8 @@ else exitcode=1; echo positional parameters were not saved. fi -test x\$exitcode = x0 || exit 1" +test x\$exitcode = x0 || exit 1 +test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && @@ -222,21 +246,25 @@ if test "x$CONFIG_SHELL" != x; then : - # We cannot yet assume a decent shell, so we have to provide a - # neutralization value for shells without unset; and this also - # works around shells that cannot unset nonexistent variables. - # Preserve -v and -x to the replacement shell. - BASH_ENV=/dev/null - ENV=/dev/null - (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV - export CONFIG_SHELL - case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; - esac - exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"} + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 fi if test x$as_have_required = xno; then : @@ -339,6 +367,14 @@ } # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take @@ -460,6 +496,10 @@ chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). @@ -494,16 +534,16 @@ # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. + # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' + as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null @@ -515,28 +555,8 @@ as_mkdir_p=false fi -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x +as_test_x='test -x' +as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -570,8 +590,8 @@ # Identity of this package. PACKAGE_NAME='liblognorm' PACKAGE_TARNAME='liblognorm' -PACKAGE_VERSION='0.3.7' -PACKAGE_STRING='liblognorm 0.3.7' +PACKAGE_VERSION='1.0.1' +PACKAGE_STRING='liblognorm 1.0.1' PACKAGE_BUGREPORT='rgerhards@adiscon.com' PACKAGE_URL='' @@ -615,8 +635,11 @@ ac_subst_vars='am__EXEEXT_FALSE am__EXEEXT_TRUE LTLIBOBJS -LIBEE_LIBS -LIBEE_CFLAGS +ENABLE_DOCS_FALSE +ENABLE_DOCS_TRUE +SPHINXBUILD +JSON_C_LIBS +JSON_C_CFLAGS LIBESTR_LIBS LIBESTR_CFLAGS PKG_CONFIG_LIBDIR @@ -750,6 +773,7 @@ with_sysroot enable_libtool_lock enable_debug +enable_docs ' ac_precious_vars='build_alias host_alias @@ -765,8 +789,8 @@ PKG_CONFIG_LIBDIR LIBESTR_CFLAGS LIBESTR_LIBS -LIBEE_CFLAGS -LIBEE_LIBS' +JSON_C_CFLAGS +JSON_C_LIBS' # Initialize some variables set by options. @@ -1222,8 +1246,6 @@ if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe - $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host. - If a cross compiler is detected then cross compile mode will be used" >&2 elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi @@ -1309,7 +1331,7 @@ # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures liblognorm 0.3.7 to adapt to many kinds of systems. +\`configure' configures liblognorm 1.0.1 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1379,7 +1401,7 @@ if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of liblognorm 0.3.7:";; + short | recursive ) echo "Configuration of liblognorm 1.0.1:";; esac cat <<\_ACEOF @@ -1387,16 +1409,19 @@ --disable-option-checking ignore unrecognized --enable/--with options --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] - --enable-silent-rules less verbose build output (undo: `make V=1') - --disable-silent-rules verbose build output (undo: `make V=0') - --disable-dependency-tracking speeds up one-time build - --enable-dependency-tracking do not reject slow dependency extractors + --enable-silent-rules less verbose build output (undo: "make V=1") + --disable-silent-rules verbose build output (undo: "make V=0") + --enable-dependency-tracking + do not reject slow dependency extractors + --disable-dependency-tracking + speeds up one-time build --enable-shared[=PKGS] build shared libraries [default=yes] --enable-static[=PKGS] build static libraries [default=yes] --enable-fast-install[=PKGS] optimize for fast installation [default=yes] --disable-libtool-lock avoid locking (might break parallel builds) --enable-debug Enable debug mode [default=no] + --disable-docs Disable building HTML docs (requires Sphinx) Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] @@ -1425,9 +1450,9 @@ C compiler flags for LIBESTR, overriding pkg-config LIBESTR_LIBS linker flags for LIBESTR, overriding pkg-config - LIBEE_CFLAGS - C compiler flags for LIBEE, overriding pkg-config - LIBEE_LIBS linker flags for LIBEE, overriding pkg-config + JSON_C_CFLAGS + C compiler flags for JSON_C, overriding pkg-config + JSON_C_LIBS linker flags for JSON_C, overriding pkg-config Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. @@ -1495,10 +1520,10 @@ test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -liblognorm configure 0.3.7 -generated by GNU Autoconf 2.68 +liblognorm configure 1.0.1 +generated by GNU Autoconf 2.69 -Copyright (C) 2010 Free Software Foundation, Inc. +Copyright (C) 2012 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF @@ -1574,7 +1599,7 @@ test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || - $as_test_x conftest$ac_exeext + test -x conftest$ac_exeext }; then : ac_retval=0 else @@ -1918,8 +1943,8 @@ This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by liblognorm $as_me 0.3.7, which was -generated by GNU Autoconf 2.68. Invocation command line was +It was created by liblognorm $as_me 1.0.1, which was +generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -2266,7 +2291,7 @@ ac_compiler_gnu=$ac_cv_c_compiler_gnu -am__api_version='1.11' +am__api_version='1.13' ac_aux_dir= for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do @@ -2334,7 +2359,7 @@ # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. @@ -2392,9 +2417,6 @@ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 $as_echo_n "checking whether build environment is sane... " >&6; } -# Just in case -sleep 1 -echo timestamp > conftest.file # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' @@ -2405,32 +2427,40 @@ esac case $srcdir in *[\\\"\#\$\&\'\`$am_lf\ \ ]*) - as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;; + as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;; esac -# Do `set' in a subshell so we don't clobber the current shell's +# Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( - set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` - if test "$*" = "X"; then - # -L didn't work. - set X `ls -t "$srcdir/configure" conftest.file` - fi - rm -f conftest.file - if test "$*" != "X $srcdir/configure conftest.file" \ - && test "$*" != "X conftest.file $srcdir/configure"; then - - # If neither matched, then we have a broken ls. This can happen - # if, for instance, CONFIG_SHELL is bash and it inherits a - # broken ls alias from the environment. This has actually - # happened. Such a system could not be considered "sane". - as_fn_error $? "ls -t appears to fail. Make sure there is not a broken -alias in your environment" "$LINENO" 5 - fi + am_has_slept=no + for am_try in 1 2; do + echo "timestamp, slept: $am_has_slept" > conftest.file + set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` + if test "$*" = "X"; then + # -L didn't work. + set X `ls -t "$srcdir/configure" conftest.file` + fi + if test "$*" != "X $srcdir/configure conftest.file" \ + && test "$*" != "X conftest.file $srcdir/configure"; then + # If neither matched, then we have a broken ls. This can happen + # if, for instance, CONFIG_SHELL is bash and it inherits a + # broken ls alias from the environment. This has actually + # happened. Such a system could not be considered "sane". + as_fn_error $? "ls -t appears to fail. Make sure there is not a broken + alias in your environment" "$LINENO" 5 + fi + if test "$2" = conftest.file || test $am_try -eq 2; then + break + fi + # Just in case. + sleep 1 + am_has_slept=yes + done test "$2" = conftest.file ) then @@ -2442,6 +2472,16 @@ fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } +# If we didn't sleep, we still need to ensure time stamps of config.status and +# generated files are strictly newer. +am_sleep_pid= +if grep 'slept: no' conftest.file >/dev/null 2>&1; then + ( sleep 1 ) & + am_sleep_pid=$! +fi + +rm -f conftest.file + test "$program_prefix" != NONE && program_transform_name="s&^&$program_prefix&;$program_transform_name" # Use a double $ so make ignores it. @@ -2464,12 +2504,12 @@ esac fi # Use eval to expand $SHELL -if eval "$MISSING --run true"; then - am_missing_run="$MISSING --run " +if eval "$MISSING --is-lightweight"; then + am_missing_run="$MISSING " else am_missing_run= - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5 -$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5 +$as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} fi if test x"${install_sh}" != xset; then @@ -2481,10 +2521,10 @@ esac fi -# Installed binaries are usually stripped using `strip' when the user -# run `make install-strip'. However `strip' might not be the right +# Installed binaries are usually stripped using 'strip' when the user +# run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake -# will honor the `STRIP' environment variable to overrule this program. +# will honor the 'STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. @@ -2503,7 +2543,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2543,7 +2583,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2594,7 +2634,7 @@ test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do - { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue + as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ @@ -2623,12 +2663,6 @@ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 $as_echo "$MKDIR_P" >&6; } -mkdir_p="$MKDIR_P" -case $mkdir_p in - [\\/$]* | ?:[\\/]*) ;; - */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; -esac - for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. @@ -2647,7 +2681,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2711,6 +2745,45 @@ fi rmdir .tst 2>/dev/null +# Check whether --enable-silent-rules was given. +if test "${enable_silent_rules+set}" = set; then : + enableval=$enable_silent_rules; +fi + +case $enable_silent_rules in # ((( + yes) AM_DEFAULT_VERBOSITY=0;; + no) AM_DEFAULT_VERBOSITY=1;; + *) AM_DEFAULT_VERBOSITY=1;; +esac +am_make=${MAKE-make} +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5 +$as_echo_n "checking whether $am_make supports nested variables... " >&6; } +if ${am_cv_make_support_nested_variables+:} false; then : + $as_echo_n "(cached) " >&6 +else + if $as_echo 'TRUE=$(BAR$(V)) +BAR0=false +BAR1=true +V=1 +am__doit: + @$(TRUE) +.PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then + am_cv_make_support_nested_variables=yes +else + am_cv_make_support_nested_variables=no +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5 +$as_echo "$am_cv_make_support_nested_variables" >&6; } +if test $am_cv_make_support_nested_variables = yes; then + AM_V='$(V)' + AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' +else + AM_V=$AM_DEFAULT_VERBOSITY + AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY +fi +AM_BACKSLASH='\' + if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." @@ -2733,7 +2806,7 @@ # Define the identity of the package. PACKAGE='liblognorm' - VERSION='0.3.7' + VERSION='1.0.1' cat >>confdefs.h <<_ACEOF @@ -2761,27 +2834,38 @@ MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} +# For better backward compatibility. To be removed once Automake 1.9.x +# dies out for good. For more background, see: +# +# +mkdir_p='$(MKDIR_P)' + # We need awk for the "check" target. The system "awk" is bad on # some platforms. # Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AMTAR='$${TAR-tar}' + +# We'll loop over all known methods to create a tar archive until one works. +_am_tools='gnutar pax cpio none' + am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' + # Check whether --enable-silent-rules was given. if test "${enable_silent_rules+set}" = set; then : enableval=$enable_silent_rules; fi -case $enable_silent_rules in -yes) AM_DEFAULT_VERBOSITY=0;; -no) AM_DEFAULT_VERBOSITY=1;; -*) AM_DEFAULT_VERBOSITY=0;; +case $enable_silent_rules in # ((( + yes) AM_DEFAULT_VERBOSITY=0;; + no) AM_DEFAULT_VERBOSITY=1;; + *) AM_DEFAULT_VERBOSITY=0;; esac am_make=${MAKE-make} { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5 @@ -2839,7 +2923,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2879,7 +2963,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2932,7 +3016,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -2973,7 +3057,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue @@ -3031,7 +3115,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3075,7 +3159,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -3521,8 +3605,7 @@ /* end confdefs.h. */ #include #include -#include -#include +struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); @@ -3625,7 +3708,7 @@ _am_result=none # First try GNU make style include. echo "include confinc" > confmf -# Ignore all kinds of additional output from `make'. +# Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include @@ -3681,8 +3764,8 @@ # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up - # making a dummy file named `D' -- because `-MD' means `put the output - # in D'. + # making a dummy file named 'D' -- because '-MD' means "put the output + # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're @@ -3717,16 +3800,16 @@ : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c - # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with - # Solaris 8's {/usr,}/bin/sh. - touch sub/conftst$i.h + # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with + # Solaris 10 /bin/sh. + echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf - # We check with `-c' and `-o' for the sake of the "dashmstdout" + # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly - # handle `-M -o', and we need to detect this. Also, some Intel - # versions had trouble with output in subdirs + # handle '-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in @@ -3735,8 +3818,8 @@ test "$am__universal" = false || continue ;; nosideeffect) - # after this tag, mechanisms are not by side-effect, so they'll - # only be used when explicitly requested + # After this tag, mechanisms are not by side-effect, so they'll + # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else @@ -3744,7 +3827,7 @@ fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) - # This compiler won't grok `-c -o', but also, the minuso test has + # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} @@ -3924,10 +4007,186 @@ fi + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C99" >&5 +$as_echo_n "checking for $CC option to accept ISO C99... " >&6; } +if ${ac_cv_prog_cc_c99+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c99=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include +#include + +// Check varargs macros. These examples are taken from C99 6.10.3.5. +#define debug(...) fprintf (stderr, __VA_ARGS__) +#define showlist(...) puts (#__VA_ARGS__) +#define report(test,...) ((test) ? puts (#test) : printf (__VA_ARGS__)) +static void +test_varargs_macros (void) +{ + int x = 1234; + int y = 5678; + debug ("Flag"); + debug ("X = %d\n", x); + showlist (The first, second, and third items.); + report (x>y, "x is %d but y is %d", x, y); +} + +// Check long long types. +#define BIG64 18446744073709551615ull +#define BIG32 4294967295ul +#define BIG_OK (BIG64 / BIG32 == 4294967297ull && BIG64 % BIG32 == 0) +#if !BIG_OK + your preprocessor is broken; +#endif +#if BIG_OK +#else + your preprocessor is broken; +#endif +static long long int bignum = -9223372036854775807LL; +static unsigned long long int ubignum = BIG64; + +struct incomplete_array +{ + int datasize; + double data[]; +}; + +struct named_init { + int number; + const wchar_t *name; + double average; +}; + +typedef const char *ccp; + +static inline int +test_restrict (ccp restrict text) +{ + // See if C++-style comments work. + // Iterate through items via the restricted pointer. + // Also check for declarations in for loops. + for (unsigned int i = 0; *(text+i) != '\0'; ++i) + continue; + return 0; +} + +// Check varargs and va_copy. +static void +test_varargs (const char *format, ...) +{ + va_list args; + va_start (args, format); + va_list args_copy; + va_copy (args_copy, args); + + const char *str; + int number; + float fnumber; + + while (*format) + { + switch (*format++) + { + case 's': // string + str = va_arg (args_copy, const char *); + break; + case 'd': // int + number = va_arg (args_copy, int); + break; + case 'f': // float + fnumber = va_arg (args_copy, double); + break; + default: + break; + } + } + va_end (args_copy); + va_end (args); +} + +int +main () +{ + + // Check bool. + _Bool success = false; + + // Check restrict. + if (test_restrict ("String literal") == 0) + success = true; + char *restrict newvar = "Another string"; + + // Check varargs. + test_varargs ("s, d' f .", "string", 65, 34.234); + test_varargs_macros (); + + // Check flexible array members. + struct incomplete_array *ia = + malloc (sizeof (struct incomplete_array) + (sizeof (double) * 10)); + ia->datasize = 10; + for (int i = 0; i < ia->datasize; ++i) + ia->data[i] = i * 1.234; + + // Check named initializers. + struct named_init ni = { + .number = 34, + .name = L"Test wide string", + .average = 543.34343, + }; + + ni.number = 58; + + int dynamic_array[ni.number]; + dynamic_array[ni.number - 1] = 543; + + // work around unused variable warnings + return (!success || bignum == 0LL || ubignum == 0uLL || newvar[0] == 'x' + || dynamic_array[ni.number - 1] != 543); + + ; + return 0; +} +_ACEOF +for ac_arg in '' -std=gnu99 -std=c99 -c99 -AC99 -D_STDC_C99= -qlanglvl=extc99 +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c99=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c99" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC + +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c99" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c99" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c99" >&5 +$as_echo "$ac_cv_prog_cc_c99" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c99" != xno; then : + +fi + + if test "$GCC" = "yes" then CFLAGS="$CFLAGS -W -Wall -Wformat-security -Wshadow -Wcast-align -Wpointer-arith -Wmissing-format-attribute -g" fi - case `pwd` in *\ * | *\ *) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 @@ -4115,7 +4374,7 @@ for ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue + as_fn_executable_p "$ac_path_SED" || continue # Check for GNU ac_path_SED and select it if it is found. # Check for GNU $ac_path_SED case `"$ac_path_SED" --version 2>&1` in @@ -4191,7 +4450,7 @@ for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue + as_fn_executable_p "$ac_path_GREP" || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in @@ -4257,7 +4516,7 @@ for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue + as_fn_executable_p "$ac_path_EGREP" || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in @@ -4324,7 +4583,7 @@ for ac_prog in fgrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue + as_fn_executable_p "$ac_path_FGREP" || continue # Check for GNU ac_path_FGREP and select it if it is found. # Check for GNU $ac_path_FGREP case `"$ac_path_FGREP" --version 2>&1` in @@ -4580,7 +4839,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -4624,7 +4883,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5048,7 +5307,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5088,7 +5347,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OBJDUMP="objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5260,7 +5519,7 @@ lt_cv_deplibs_check_method=pass_all ;; -netbsd* | netbsdelf*-gnu) +netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' else @@ -5394,7 +5653,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5434,7 +5693,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DLLTOOL="dlltool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5538,7 +5797,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AR="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5582,7 +5841,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_AR="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5707,7 +5966,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5747,7 +6006,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5806,7 +6065,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -5846,7 +6105,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6338,7 +6597,7 @@ rm -rf conftest* ;; -x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ +x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext @@ -6356,7 +6615,10 @@ x86_64-*linux*) LD="${LD-ld} -m elf_i386" ;; - ppc64-*linux*|powerpc64-*linux*) + powerpc64le-*linux*) + LD="${LD-ld} -m elf32lppclinux" + ;; + powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) @@ -6375,7 +6637,10 @@ x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; - ppc*-*linux*|powerpc*-*linux*) + powerpcle-*linux*) + LD="${LD-ld} -m elf64lppc" + ;; + powerpc-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) @@ -6495,7 +6760,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6535,7 +6800,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6615,7 +6880,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6655,7 +6920,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6707,7 +6972,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6747,7 +7012,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_NMEDIT="nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6799,7 +7064,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_LIPO="${ac_tool_prefix}lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6839,7 +7104,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_LIPO="lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6891,7 +7156,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OTOOL="${ac_tool_prefix}otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6931,7 +7196,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OTOOL="otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -6983,7 +7248,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -7023,7 +7288,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OTOOL64="otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -8661,9 +8926,6 @@ openbsd*) with_gnu_ld=no ;; - linux* | k*bsd*-gnu | gnu*) - link_all_deplibs=no - ;; esac ld_shlibs=yes @@ -8885,7 +9147,7 @@ fi ;; - netbsd* | netbsdelf*-gnu) + netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= @@ -9062,7 +9324,6 @@ if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi - link_all_deplibs=no else # not using gcc if test "$host_cpu" = ia64; then @@ -9516,7 +9777,7 @@ link_all_deplibs=yes ;; - netbsd* | netbsdelf*-gnu) + netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else @@ -10529,10 +10790,14 @@ # before this can be enabled. hardcode_into_libs=yes + # Add ABI-specific directories to the system library path. + sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" + # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` - sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" + sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" + fi # We used to test for /lib/ld.so.1 and disable shared libraries on @@ -10544,18 +10809,6 @@ dynamic_linker='GNU/Linux ld.so' ;; -netbsdelf*-gnu) - version_type=linux - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='NetBSD ld.elf_so' - ;; - netbsd*) version_type=sunos need_lib_prefix=no @@ -11715,11 +11968,11 @@ int main () { -/* FIXME: Include the comments suggested by Paul. */ + #ifndef __cplusplus - /* Ultrix mips cc rejects this. */ + /* Ultrix mips cc rejects this sort of thing. */ typedef int charset[2]; - const charset cs; + const charset cs = { 0, 0 }; /* SunOS 4.1.1 cc rejects this. */ char const *const *pcpcc; char **ppc; @@ -11736,8 +11989,9 @@ ++pcpcc; ppc = (char**) pcpcc; pcpcc = (char const *const *) ppc; - { /* SCO 3.2v4 cc rejects this. */ - char *t; + { /* SCO 3.2v4 cc rejects this sort of thing. */ + char tx; + char *t = &tx; char const *s = 0 ? (char *) 0 : (char const *) 0; *t++ = 0; @@ -11753,10 +12007,10 @@ iptr p = 0; ++p; } - { /* AIX XL C 1.02.0.0 rejects this saying + { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ - struct s { int j; const int *ap[3]; }; - struct s *b; b->j = 5; + struct s { int j; const int *ap[3]; } bx; + struct s *b = &bx; b->j = 5; } { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ const int foo = 10; @@ -12009,7 +12263,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -12052,7 +12306,7 @@ IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 @@ -12195,19 +12449,174 @@ fi pkg_failed=no -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for LIBEE" >&5 -$as_echo_n "checking for LIBEE... " >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JSON_C" >&5 +$as_echo_n "checking for JSON_C... " >&6; } + +if test -n "$JSON_C_CFLAGS"; then + pkg_cv_JSON_C_CFLAGS="$JSON_C_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"json-c\""; } >&5 + ($PKG_CONFIG --exists --print-errors "json-c") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JSON_C_CFLAGS=`$PKG_CONFIG --cflags "json-c" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$JSON_C_LIBS"; then + pkg_cv_JSON_C_LIBS="$JSON_C_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"json-c\""; } >&5 + ($PKG_CONFIG --exists --print-errors "json-c") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JSON_C_LIBS=`$PKG_CONFIG --libs "json-c" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + JSON_C_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "json-c" 2>&1` + else + JSON_C_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "json-c" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$JSON_C_PKG_ERRORS" >&5 + + + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JSON_C" >&5 +$as_echo_n "checking for JSON_C... " >&6; } + +if test -n "$JSON_C_CFLAGS"; then + pkg_cv_JSON_C_CFLAGS="$JSON_C_CFLAGS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"json\""; } >&5 + ($PKG_CONFIG --exists --print-errors "json") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JSON_C_CFLAGS=`$PKG_CONFIG --cflags "json" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi +if test -n "$JSON_C_LIBS"; then + pkg_cv_JSON_C_LIBS="$JSON_C_LIBS" + elif test -n "$PKG_CONFIG"; then + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"json\""; } >&5 + ($PKG_CONFIG --exists --print-errors "json") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JSON_C_LIBS=`$PKG_CONFIG --libs "json" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes +else + pkg_failed=yes +fi + else + pkg_failed=untried +fi + + + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi + if test $_pkg_short_errors_supported = yes; then + JSON_C_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "json" 2>&1` + else + JSON_C_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "json" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$JSON_C_PKG_ERRORS" >&5 + + as_fn_error $? "Package requirements (json) were not met: + +$JSON_C_PKG_ERRORS + +Consider adjusting the PKG_CONFIG_PATH environment variable if you +installed software in a non-standard prefix. + +Alternatively, you may set the environment variables JSON_C_CFLAGS +and JSON_C_LIBS to avoid the need to call pkg-config. +See the pkg-config man page for more details." "$LINENO" 5 +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it +is in your PATH or set the PKG_CONFIG environment variable to the full +path to pkg-config. + +Alternatively, you may set the environment variables JSON_C_CFLAGS +and JSON_C_LIBS to avoid the need to call pkg-config. +See the pkg-config man page for more details. + +To get pkg-config, see . +See \`config.log' for more details" "$LINENO" 5; } +else + JSON_C_CFLAGS=$pkg_cv_JSON_C_CFLAGS + JSON_C_LIBS=$pkg_cv_JSON_C_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + +fi +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + + +pkg_failed=no +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JSON_C" >&5 +$as_echo_n "checking for JSON_C... " >&6; } -if test -n "$LIBEE_CFLAGS"; then - pkg_cv_LIBEE_CFLAGS="$LIBEE_CFLAGS" +if test -n "$JSON_C_CFLAGS"; then + pkg_cv_JSON_C_CFLAGS="$JSON_C_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libee >= 0.3.2\""; } >&5 - ($PKG_CONFIG --exists --print-errors "libee >= 0.3.2") 2>&5 + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"json\""; } >&5 + ($PKG_CONFIG --exists --print-errors "json") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then - pkg_cv_LIBEE_CFLAGS=`$PKG_CONFIG --cflags "libee >= 0.3.2" 2>/dev/null` + pkg_cv_JSON_C_CFLAGS=`$PKG_CONFIG --cflags "json" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes @@ -12215,16 +12624,16 @@ else pkg_failed=untried fi -if test -n "$LIBEE_LIBS"; then - pkg_cv_LIBEE_LIBS="$LIBEE_LIBS" +if test -n "$JSON_C_LIBS"; then + pkg_cv_JSON_C_LIBS="$JSON_C_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libee >= 0.3.2\""; } >&5 - ($PKG_CONFIG --exists --print-errors "libee >= 0.3.2") 2>&5 + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"json\""; } >&5 + ($PKG_CONFIG --exists --print-errors "json") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then - pkg_cv_LIBEE_LIBS=`$PKG_CONFIG --libs "libee >= 0.3.2" 2>/dev/null` + pkg_cv_JSON_C_LIBS=`$PKG_CONFIG --libs "json" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes @@ -12245,22 +12654,22 @@ _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBEE_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libee >= 0.3.2" 2>&1` + JSON_C_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "json" 2>&1` else - LIBEE_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libee >= 0.3.2" 2>&1` + JSON_C_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "json" 2>&1` fi # Put the nasty error message in config.log where it belongs - echo "$LIBEE_PKG_ERRORS" >&5 + echo "$JSON_C_PKG_ERRORS" >&5 - as_fn_error $? "Package requirements (libee >= 0.3.2) were not met: + as_fn_error $? "Package requirements (json) were not met: -$LIBEE_PKG_ERRORS +$JSON_C_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. -Alternatively, you may set the environment variables LIBEE_CFLAGS -and LIBEE_LIBS to avoid the need to call pkg-config. +Alternatively, you may set the environment variables JSON_C_CFLAGS +and JSON_C_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 @@ -12271,15 +12680,22 @@ is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. -Alternatively, you may set the environment variables LIBEE_CFLAGS -and LIBEE_LIBS to avoid the need to call pkg-config. +Alternatively, you may set the environment variables JSON_C_CFLAGS +and JSON_C_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else - LIBEE_CFLAGS=$pkg_cv_LIBEE_CFLAGS - LIBEE_LIBS=$pkg_cv_LIBEE_LIBS + JSON_C_CFLAGS=$pkg_cv_JSON_C_CFLAGS + JSON_C_LIBS=$pkg_cv_JSON_C_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + +fi +else + JSON_C_CFLAGS=$pkg_cv_JSON_C_CFLAGS + JSON_C_LIBS=$pkg_cv_JSON_C_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } @@ -12309,9 +12725,72 @@ fi +# docs (html) build settings +# Check whether --enable-docs was given. +if test "${enable_docs+set}" = set; then : + enableval=$enable_docs; enable_docs="no" +else + enable_docs="yes" + +fi + +for ac_prog in sphinx-build sphinx-build3 sphinx-build2 +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_SPHINXBUILD+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$SPHINXBUILD"; then + ac_cv_prog_SPHINXBUILD="$SPHINXBUILD" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_SPHINXBUILD="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +SPHINXBUILD=$ac_cv_prog_SPHINXBUILD +if test -n "$SPHINXBUILD"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $SPHINXBUILD" >&5 +$as_echo "$SPHINXBUILD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$SPHINXBUILD" && break +done +test -n "$SPHINXBUILD" || SPHINXBUILD="no" + +if test "$enable_docs" = "yes" -a "x$SPHINXBUILD" = xno; then : + as_fn_error install it or try --disable-docs "sphinx-build is required to build documentation" "$LINENO" 5 + +fi + if test "$enable_docs" = yes; then + ENABLE_DOCS_TRUE= + ENABLE_DOCS_FALSE='#' +else + ENABLE_DOCS_TRUE='#' + ENABLE_DOCS_FALSE= +fi -ac_config_files="$ac_config_files Makefile lognorm.pc src/Makefile" +ac_config_files="$ac_config_files Makefile lognorm.pc doc/Makefile src/Makefile" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure @@ -12422,6 +12901,14 @@ LTLIBOBJS=$ac_ltlibobjs +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5 +$as_echo_n "checking that generated files are newer than configure... " >&6; } + if test -n "$am_sleep_pid"; then + # Hide warnings about reused PIDs. + wait $am_sleep_pid 2>/dev/null + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5 +$as_echo "done" >&6; } if test -n "$EXEEXT"; then am__EXEEXT_TRUE= am__EXEEXT_FALSE='#' @@ -12438,6 +12925,10 @@ as_fn_error $? "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi +if test -z "${ENABLE_DOCS_TRUE}" && test -z "${ENABLE_DOCS_FALSE}"; then + as_fn_error $? "conditional \"ENABLE_DOCS\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi : "${CONFIG_STATUS=./config.status}" ac_write_fail=0 @@ -12736,16 +13227,16 @@ # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. + # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' + as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null @@ -12805,28 +13296,16 @@ as_mkdir_p=false fi -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -12847,8 +13326,8 @@ # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by liblognorm $as_me 0.3.7, which was -generated by GNU Autoconf 2.68. Invocation command line was +This file was extended by liblognorm $as_me 1.0.1, which was +generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS @@ -12913,11 +13392,11 @@ cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -liblognorm config.status 0.3.7 -configured by $0, generated by GNU Autoconf 2.68, +liblognorm config.status 1.0.1 +configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" -Copyright (C) 2010 Free Software Foundation, Inc. +Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." @@ -13008,7 +13487,7 @@ _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then - set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' @@ -13324,6 +13803,7 @@ "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "lognorm.pc") CONFIG_FILES="$CONFIG_FILES lognorm.pc" ;; + "doc/Makefile") CONFIG_FILES="$CONFIG_FILES doc/Makefile" ;; "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; @@ -13921,7 +14401,7 @@ case $ac_file$ac_mode in "depfiles":C) test x"$AMDEP_TRUE" != x"" || { - # Autoconf 2.62 quotes --file arguments for eval, but not when files + # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in @@ -13934,7 +14414,7 @@ # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. - # We used to match only the files named `Makefile.in', but + # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. @@ -13968,21 +14448,19 @@ continue fi # Extract the definition of DEPDIR, am__include, and am__quote - # from the Makefile without running `make'. + # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` - test -z "am__include" && continue + test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` - # When using ansi2knr, U may be empty or an underscore; expand it - U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ - sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do + sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`$as_dirname -- "$file" || diff -Nru liblognorm-0.3.7/configure.ac liblognorm-1.0.1/configure.ac --- liblognorm-0.3.7/configure.ac 2013-07-18 07:37:42.000000000 +0000 +++ liblognorm-1.0.1/configure.ac 2014-04-11 04:24:50.000000000 +0000 @@ -2,7 +2,7 @@ # Process this file with autoconf to produce a configure script. AC_PREREQ(2.61) -AC_INIT([liblognorm], [0.3.7], [rgerhards@adiscon.com]) +AC_INIT([liblognorm], [1.0.1], [rgerhards@adiscon.com]) AM_INIT_AUTOMAKE m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([yes])]) AC_CONFIG_SRCDIR([src/lognorm.c]) @@ -11,10 +11,10 @@ # Checks for programs. AC_PROG_CC AM_PROG_CC_C_O +AC_PROG_CC_C99 if test "$GCC" = "yes" then CFLAGS="$CFLAGS -W -Wall -Wformat-security -Wshadow -Wcast-align -Wpointer-arith -Wmissing-format-attribute -g" fi - AC_PROG_LIBTOOL # Checks for libraries. @@ -46,7 +46,8 @@ # modules we require PKG_CHECK_MODULES(LIBESTR, libestr >= 0.0.0) -PKG_CHECK_MODULES(LIBEE, libee >= 0.3.2) +PKG_CHECK_MODULES(JSON_C, json-c,, [ + PKG_CHECK_MODULES(JSON_C, json)]) # debug mode settings AC_ARG_ENABLE(debug, @@ -65,10 +66,21 @@ AC_DEFINE(NDEBUG, 1, [Defined if debug mode is disabled.]) fi - +# docs (html) build settings +AC_ARG_ENABLE(docs, + [AS_HELP_STRING([--disable-docs],[Disable building HTML docs (requires Sphinx)])], + [enable_docs="no"], + [enable_docs="yes"] +) +AC_CHECK_PROGS([SPHINXBUILD], [sphinx-build sphinx-build3 sphinx-build2], [no]) +AS_IF([test "$enable_docs" = "yes" -a "x$SPHINXBUILD" = xno], + AC_MSG_ERROR(sphinx-build is required to build documentation, install it or try --disable-docs) + ) +AM_CONDITIONAL([ENABLE_DOCS], [test "$enable_docs" = yes]) AC_CONFIG_FILES([Makefile \ lognorm.pc \ + doc/Makefile \ src/Makefile]) AC_OUTPUT AC_CONFIG_MACRO_DIR([m4]) diff -Nru liblognorm-0.3.7/COPYING liblognorm-1.0.1/COPYING --- liblognorm-0.3.7/COPYING 2013-07-18 07:37:17.000000000 +0000 +++ liblognorm-1.0.1/COPYING 2014-01-22 15:52:06.000000000 +0000 @@ -1,6 +1,4 @@ -liblognorm is available under the terms of the GNU LGPL v2.1 or above (full -text below). ------------------------------------------------------------------------ + GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 diff -Nru liblognorm-0.3.7/debian/changelog liblognorm-1.0.1/debian/changelog --- liblognorm-0.3.7/debian/changelog 2014-01-31 17:33:12.000000000 +0000 +++ liblognorm-1.0.1/debian/changelog 2014-04-28 11:06:03.000000000 +0000 @@ -1,51 +1,80 @@ -liblognorm (0.3.7-1ubuntu1) trusty; urgency=medium +liblognorm (1.0.1-0adiscon1trusty1) trusty; urgency=low - * Use dh-autoreconf to get new libtool macros for ppc64el. + * Initial build for Ubuntu trusty - -- Logan Rosen Fri, 31 Jan 2014 12:33:01 -0500 + -- Andre Lorbach Mon, 28 Apr 2014 13:05:52 +0200 -liblognorm (0.3.7-1) unstable; urgency=low +liblognorm (1.0.1-0adiscon1saucy1) saucy; urgency=low - * Imported Upstream version 0.3.7 - * Update symbols file - * Bump Standards Version to 3.9.4 + * New debs for liblognorm 1.0.1 - -- Pierre Chifflier Tue, 20 Aug 2013 17:21:03 +0200 + -- Andre Lorbach Fri, 11 Apr 2014 15:37:51 +0200 -liblognorm (0.3.6-2) unstable; urgency=low +liblognorm (1.0.0-0adiscon7) saucy; urgency=low - * Upload to unstable + * Fixed typo - -- Pierre Chifflier Thu, 09 May 2013 21:45:39 +0200 + -- Andre Lorbach Fri, 29 Nov 2013 08:42:44 +0100 -liblognorm (0.3.6-1) experimental; urgency=low +liblognorm (1.0.0-0adiscon6) saucy; urgency=low - * Upload to experimental - * Imported Upstream version 0.3.6 + * Fix for conflicting files with liblognorm0 package + + -- Andre Lorbach Fri, 29 Nov 2013 07:58:49 +0100 + +liblognorm (1.0.0-0adiscon5) saucy; urgency=low + + * Final Final build for liblognorm 1.0.0 + + -- Andre Lorbach Thu, 28 Nov 2013 14:54:24 +0100 + +liblognorm (1.0.0-0adiscon4) saucy; urgency=low + + * Final build for liblognorm 1.0.0 + + -- Andre Lorbach Thu, 28 Nov 2013 11:53:32 +0100 + +liblognorm (1.0.0-0adiscon3) saucy; urgency=low + + * Build new major version for liblognorm - -- Pierre Chifflier Fri, 22 Mar 2013 11:34:36 +0100 + -- Andre Lorbach Wed, 27 Nov 2013 11:28:43 +0100 -liblognorm (0.3.5-1) unstable; urgency=low +liblognorm (0.3.7-0adiscon1) saucy; urgency=low - * Imported Upstream version 0.3.5 + * Latest version from old liblognorm branch - -- Pierre Chifflier Sat, 10 Nov 2012 16:39:29 +0100 + -- Andre Lorbach Wed, 27 Nov 2013 10:37:47 +0100 -liblognorm (0.3.4-1) unstable; urgency=low +liblognorm (1.0.0-0adiscon2) saucy; urgency=low - * Imported Upstream version 0.3.4 + * rebuild - -- Pierre Chifflier Mon, 16 Apr 2012 22:26:37 +0200 + -- Andre Lorbach Mon, 25 Nov 2013 16:15:26 +0100 -liblognorm (0.3.3-1) unstable; urgency=low +liblognorm (1.0.0-0adiscon1) saucy; urgency=low - * Imported Upstream version 0.3.3 - * Bump DH version to 9 - - drop build-dep on hardening-wrapper, replace by dpkg-buildflags - - enable multiarch support - * Bump Standards Version to 3.9.3 + * Imported Upstream version 1.0.0 + + -- Andre Lorbach Mon, 25 Nov 2013 13:56:41 +0100 + +liblognorm (0.3.6-0adiscon3) saucy; urgency=low + + * Initial Packages for saucy + + -- Andre Lorbach Fri, 18 Oct 2013 17:45:57 +0200 + +liblognorm (0.3.6-0adiscon2) precise; urgency=low + + * repacked + + -- Andre Lorbach Wed, 12 Jun 2013 14:04:01 +0200 + +liblognorm (0.3.6-0adiscon1) precise; urgency=low + + * Imported Upstream version 0.3.6 - -- Pierre Chifflier Sat, 25 Feb 2012 15:11:24 +0100 + -- Andre Lorbach Thu, 21 Mar 2013 06:31:03 -0700 liblognorm (0.3.2-1) unstable; urgency=low diff -Nru liblognorm-0.3.7/debian/compat liblognorm-1.0.1/debian/compat --- liblognorm-0.3.7/debian/compat 2012-04-08 13:32:42.000000000 +0000 +++ liblognorm-1.0.1/debian/compat 2014-04-28 11:05:51.000000000 +0000 @@ -1 +1 @@ -9 +8 diff -Nru liblognorm-0.3.7/debian/control liblognorm-1.0.1/debian/control --- liblognorm-0.3.7/debian/control 2014-01-31 17:33:21.000000000 +0000 +++ liblognorm-1.0.1/debian/control 2014-04-28 11:05:51.000000000 +0000 @@ -1,24 +1,28 @@ Source: liblognorm Priority: extra -Maintainer: Ubuntu Developers -XSBC-Original-Maintainer: Pierre Chifflier -Build-Depends: debhelper (>= 9), - dh-autoreconf, +Maintainer: Andre Lorbach +Build-Depends: debhelper (>= 8), + autotools-dev, pkg-config, + python-sphinx, libxml2-dev, libestr-dev, - libee-dev (>= 0.3.2) -Standards-Version: 3.9.4 + libee-dev (>= 0.3.2), + libjson0-dev, + hardening-wrapper +Standards-Version: 3.9.2 Section: libs Homepage: http://www.liblognorm.com/ #Vcs-Git: git://git.debian.org/collab-maint/liblognorm.git #Vcs-Browser: http://git.debian.org/?p=collab-maint/liblognorm.git;a=summary -Package: liblognorm-dev +Package: liblognorm1-dev Section: libdevel Architecture: any -Depends: liblognorm0 (= ${binary:Version}), ${misc:Depends}, +Depends: liblognorm1 (= ${binary:Version}), ${misc:Depends}, libee-dev, libestr-dev +Replaces: liblognorm-dev +Breaks: liblognorm-dev Description: Log normalizing library Liblognorm is a event and log normalization library that is capable of real-time processing. It provides the capability to normalize events to @@ -26,11 +30,12 @@ . This package contains the development files. -Package: liblognorm0 +Package: liblognorm1 Section: libs Architecture: any Depends: ${shlibs:Depends}, ${misc:Depends} -Pre-Depends: multiarch-support +Replaces: liblognorm0 +Breaks: liblognorm0 Description: Log normalizing library Liblognorm is a event and log normalization library that is capable of real-time processing. It provides the capability to normalize events to diff -Nru liblognorm-0.3.7/debian/liblognorm0.install liblognorm-1.0.1/debian/liblognorm0.install --- liblognorm-0.3.7/debian/liblognorm0.install 2012-04-08 13:33:10.000000000 +0000 +++ liblognorm-1.0.1/debian/liblognorm0.install 2014-04-28 11:05:51.000000000 +0000 @@ -1,2 +1,2 @@ -usr/lib/*/lib*.so.* +usr/lib/lib*.so.* usr/bin/* usr/lib/lognorm/ diff -Nru liblognorm-0.3.7/debian/liblognorm0.symbols liblognorm-1.0.1/debian/liblognorm0.symbols --- liblognorm-0.3.7/debian/liblognorm0.symbols 2013-08-20 15:18:57.000000000 +0000 +++ liblognorm-1.0.1/debian/liblognorm0.symbols 2014-04-28 11:05:51.000000000 +0000 @@ -16,13 +16,11 @@ ln_findAnnot@Base 0.3.2 ln_genDotPTreeGraph@Base 0.3.0 ln_initCtx@Base 0.3.0 - ln_loadSample@Base 0.3.7 ln_loadSamples@Base 0.3.0 ln_newAnnot@Base 0.3.2 ln_newAnnotSet@Base 0.3.2 ln_newPTree@Base 0.3.0 ln_normalize@Base 0.3.0 - ln_processSamp@Base 0.3.7 ln_sampClose@Base 0.3.0 ln_sampCreate@Base 0.3.0 ln_sampFree@Base 0.3.0 diff -Nru liblognorm-0.3.7/debian/liblognorm1-dev.dirs liblognorm-1.0.1/debian/liblognorm1-dev.dirs --- liblognorm-0.3.7/debian/liblognorm1-dev.dirs 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/debian/liblognorm1-dev.dirs 2014-04-28 11:05:51.000000000 +0000 @@ -0,0 +1,2 @@ +usr/lib +usr/include diff -Nru liblognorm-0.3.7/debian/liblognorm1-dev.install liblognorm-1.0.1/debian/liblognorm1-dev.install --- liblognorm-0.3.7/debian/liblognorm1-dev.install 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/debian/liblognorm1-dev.install 2014-04-28 11:05:51.000000000 +0000 @@ -0,0 +1,4 @@ +usr/include/* +usr/lib/lib*.a +usr/lib/lib*.so +usr/lib/pkgconfig/* diff -Nru liblognorm-0.3.7/debian/liblognorm1.dirs liblognorm-1.0.1/debian/liblognorm1.dirs --- liblognorm-0.3.7/debian/liblognorm1.dirs 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/debian/liblognorm1.dirs 2014-04-28 11:05:51.000000000 +0000 @@ -0,0 +1,2 @@ +usr/lib +usr/lib/lognorm diff -Nru liblognorm-0.3.7/debian/liblognorm1.install liblognorm-1.0.1/debian/liblognorm1.install --- liblognorm-0.3.7/debian/liblognorm1.install 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/debian/liblognorm1.install 2014-04-28 11:05:51.000000000 +0000 @@ -0,0 +1,2 @@ +usr/lib/lib*.so.* +usr/bin/* usr/lib/lognorm/ diff -Nru liblognorm-0.3.7/debian/liblognorm1.symbols liblognorm-1.0.1/debian/liblognorm1.symbols --- liblognorm-0.3.7/debian/liblognorm1.symbols 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/debian/liblognorm1.symbols 2014-04-28 11:05:51.000000000 +0000 @@ -0,0 +1,32 @@ +liblognorm.so.0 liblognorm0 #MINVER# + ln_addAnnotOp@Base 0.3.2 + ln_addAnnotToSet@Base 0.3.2 + ln_addFDescrToPTree@Base 0.3.0 + ln_addPTree@Base 0.3.0 + ln_annotateEvent@Base 0.3.2 + ln_buildPTree@Base 0.3.0 + ln_combineAnnot@Base 0.3.2 + ln_dbgprintf@Base 0.3.0 + ln_deleteAnnot@Base 0.3.2 + ln_deleteAnnotSet@Base 0.3.2 + ln_deletePTree@Base 0.3.0 + ln_displayPTree@Base 0.3.0 + ln_enableDebug@Base 0.3.0 + ln_exitCtx@Base 0.3.0 + ln_findAnnot@Base 0.3.2 + ln_genDotPTreeGraph@Base 0.3.0 + ln_initCtx@Base 0.3.0 + ln_loadSamples@Base 0.3.0 + ln_newAnnot@Base 0.3.2 + ln_newAnnotSet@Base 0.3.2 + ln_newPTree@Base 0.3.0 + ln_normalize@Base 0.3.0 + ln_sampClose@Base 0.3.0 + ln_sampCreate@Base 0.3.0 + ln_sampFree@Base 0.3.0 + ln_sampOpen@Base 0.3.0 + ln_sampRead@Base 0.3.0 + ln_setDebugCB@Base 0.3.0 + ln_setEECtx@Base 0.3.0 + ln_traversePTree@Base 0.3.0 + ln_version@Base 0.3.0 diff -Nru liblognorm-0.3.7/debian/liblognorm-dev.install liblognorm-1.0.1/debian/liblognorm-dev.install --- liblognorm-0.3.7/debian/liblognorm-dev.install 2012-04-08 13:33:08.000000000 +0000 +++ liblognorm-1.0.1/debian/liblognorm-dev.install 2014-04-28 11:05:51.000000000 +0000 @@ -1,3 +1,4 @@ usr/include/* -usr/lib/*/lib*.so -usr/lib/*/pkgconfig/* +usr/lib/lib*.a +usr/lib/lib*.so +usr/lib/pkgconfig/* diff -Nru liblognorm-0.3.7/debian/rules liblognorm-1.0.1/debian/rules --- liblognorm-0.3.7/debian/rules 2014-01-31 17:32:51.000000000 +0000 +++ liblognorm-1.0.1/debian/rules 2014-04-28 11:05:51.000000000 +0000 @@ -12,4 +12,4 @@ export DEB_BUILD_HARDENING=1 %: - dh $@ --with autoreconf + dh $@ diff -Nru liblognorm-0.3.7/depcomp liblognorm-1.0.1/depcomp --- liblognorm-0.3.7/depcomp 2012-04-04 09:16:38.000000000 +0000 +++ liblognorm-1.0.1/depcomp 2014-04-11 04:30:18.000000000 +0000 @@ -1,10 +1,9 @@ #! /bin/sh # depcomp - compile a program generating dependencies as side-effects -scriptversion=2011-12-04.11; # UTC +scriptversion=2013-05-30.07; # UTC -# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2009, 2010, -# 2011 Free Software Foundation, Inc. +# Copyright (C) 1999-2013 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -28,9 +27,9 @@ case $1 in '') - echo "$0: No command. Try \`$0 --help' for more information." 1>&2 - exit 1; - ;; + echo "$0: No command. Try '$0 --help' for more information." 1>&2 + exit 1; + ;; -h | --h*) cat <<\EOF Usage: depcomp [--help] [--version] PROGRAM [ARGS] @@ -40,8 +39,8 @@ Environment variables: depmode Dependency tracking mode. - source Source file read by `PROGRAMS ARGS'. - object Object file output by `PROGRAMS ARGS'. + source Source file read by 'PROGRAMS ARGS'. + object Object file output by 'PROGRAMS ARGS'. DEPDIR directory where to store dependencies. depfile Dependency file to output. tmpdepfile Temporary file to use when outputting dependencies. @@ -57,6 +56,66 @@ ;; esac +# Get the directory component of the given path, and save it in the +# global variables '$dir'. Note that this directory component will +# be either empty or ending with a '/' character. This is deliberate. +set_dir_from () +{ + case $1 in + */*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;; + *) dir=;; + esac +} + +# Get the suffix-stripped basename of the given path, and save it the +# global variable '$base'. +set_base_from () +{ + base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'` +} + +# If no dependency file was actually created by the compiler invocation, +# we still have to create a dummy depfile, to avoid errors with the +# Makefile "include basename.Plo" scheme. +make_dummy_depfile () +{ + echo "#dummy" > "$depfile" +} + +# Factor out some common post-processing of the generated depfile. +# Requires the auxiliary global variable '$tmpdepfile' to be set. +aix_post_process_depfile () +{ + # If the compiler actually managed to produce a dependency file, + # post-process it. + if test -f "$tmpdepfile"; then + # Each line is of the form 'foo.o: dependency.h'. + # Do two passes, one to just change these to + # $object: dependency.h + # and one to simply output + # dependency.h: + # which is needed to avoid the deleted-header problem. + { sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile" + sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile" + } > "$depfile" + rm -f "$tmpdepfile" + else + make_dummy_depfile + fi +} + +# A tabulation character. +tab=' ' +# A newline character. +nl=' +' +# Character ranges might be problematic outside the C locale. +# These definitions help. +upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ +lower=abcdefghijklmnopqrstuvwxyz +digits=0123456789 +alpha=${upper}${lower} + if test -z "$depmode" || test -z "$source" || test -z "$object"; then echo "depcomp: Variables source, object and depmode must be set" 1>&2 exit 1 @@ -69,6 +128,9 @@ rm -f "$tmpdepfile" +# Avoid interferences from the environment. +gccflag= dashmflag= + # Some modes work just like other modes, but use different flags. We # parameterize here, but still list the modes in the big case below, # to make depend.m4 easier to write. Note that we *cannot* use a case @@ -80,26 +142,32 @@ fi if test "$depmode" = dashXmstdout; then - # This is just like dashmstdout with a different argument. - dashmflag=-xM - depmode=dashmstdout + # This is just like dashmstdout with a different argument. + dashmflag=-xM + depmode=dashmstdout fi cygpath_u="cygpath -u -f -" if test "$depmode" = msvcmsys; then - # This is just like msvisualcpp but w/o cygpath translation. - # Just convert the backslash-escaped backslashes to single forward - # slashes to satisfy depend.m4 - cygpath_u='sed s,\\\\,/,g' - depmode=msvisualcpp + # This is just like msvisualcpp but w/o cygpath translation. + # Just convert the backslash-escaped backslashes to single forward + # slashes to satisfy depend.m4 + cygpath_u='sed s,\\\\,/,g' + depmode=msvisualcpp fi if test "$depmode" = msvc7msys; then - # This is just like msvc7 but w/o cygpath translation. - # Just convert the backslash-escaped backslashes to single forward - # slashes to satisfy depend.m4 - cygpath_u='sed s,\\\\,/,g' - depmode=msvc7 + # This is just like msvc7 but w/o cygpath translation. + # Just convert the backslash-escaped backslashes to single forward + # slashes to satisfy depend.m4 + cygpath_u='sed s,\\\\,/,g' + depmode=msvc7 +fi + +if test "$depmode" = xlc; then + # IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information. + gccflag=-qmakedep=gcc,-MF + depmode=gcc fi case "$depmode" in @@ -122,8 +190,7 @@ done "$@" stat=$? - if test $stat -eq 0; then : - else + if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi @@ -131,13 +198,17 @@ ;; gcc) +## Note that this doesn't just cater to obsosete pre-3.x GCC compilers. +## but also to in-use compilers like IMB xlc/xlC and the HP C compiler. +## (see the conditional assignment to $gccflag above). ## There are various ways to get dependency output from gcc. Here's ## why we pick this rather obscure method: ## - Don't want to use -MD because we'd like the dependencies to end ## up in a subdir. Having to rename by hand is ugly. ## (We might end up doing this anyway to support other compilers.) ## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like -## -MM, not -M (despite what the docs say). +## -MM, not -M (despite what the docs say). Also, it might not be +## supported by the other compilers which use the 'gcc' depmode. ## - Using -M directly means running the compiler twice (even worse ## than renaming). if test -z "$gccflag"; then @@ -145,33 +216,31 @@ fi "$@" -Wp,"$gccflag$tmpdepfile" stat=$? - if test $stat -eq 0; then : - else + if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" echo "$object : \\" > "$depfile" - alpha=ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz -## The second -e expression handles DOS-style file names with drive letters. + # The second -e expression handles DOS-style file names with drive + # letters. sed -e 's/^[^:]*: / /' \ -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile" -## This next piece of magic avoids the `deleted header file' problem. +## This next piece of magic avoids the "deleted header file" problem. ## The problem is that when a header file which appears in a .P file ## is deleted, the dependency causes make to die (because there is ## typically no way to rebuild the header). We avoid this by adding ## dummy dependencies for each header file. Too bad gcc doesn't do ## this for us directly. - tr ' ' ' -' < "$tmpdepfile" | -## Some versions of gcc put a space before the `:'. On the theory +## Some versions of gcc put a space before the ':'. On the theory ## that the space means something, we add a space to the output as ## well. hp depmode also adds that space, but also prefixes the VPATH ## to the object. Take care to not repeat it in the output. ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. - sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \ - | sed -e 's/$/ :/' >> "$depfile" + tr ' ' "$nl" < "$tmpdepfile" \ + | sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \ + | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; @@ -189,8 +258,7 @@ "$@" -MDupdate "$tmpdepfile" fi stat=$? - if test $stat -eq 0; then : - else + if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi @@ -198,43 +266,41 @@ if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files echo "$object : \\" > "$depfile" - # Clip off the initial element (the dependent). Don't try to be # clever and replace this with sed code, as IRIX sed won't handle # lines with more than a fixed number of characters (4096 in # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines; - # the IRIX cc adds comments like `#:fec' to the end of the + # the IRIX cc adds comments like '#:fec' to the end of the # dependency line. - tr ' ' ' -' < "$tmpdepfile" \ - | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \ - tr ' -' ' ' >> "$depfile" + tr ' ' "$nl" < "$tmpdepfile" \ + | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \ + | tr "$nl" ' ' >> "$depfile" echo >> "$depfile" - # The second pass generates a dummy entry for each header file. - tr ' ' ' -' < "$tmpdepfile" \ - | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ - >> "$depfile" + tr ' ' "$nl" < "$tmpdepfile" \ + | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ + >> "$depfile" else - # The sourcefile does not contain any dependencies, so just - # store a dummy comment line, to avoid errors with the Makefile - # "include basename.Plo" scheme. - echo "#dummy" > "$depfile" + make_dummy_depfile fi rm -f "$tmpdepfile" ;; +xlc) + # This case exists only to let depend.m4 do its work. It works by + # looking at the text of this script. This case will never be run, + # since it is checked for above. + exit 1 + ;; + aix) # The C for AIX Compiler uses -M and outputs the dependencies # in a .u file. In older versions, this file always lives in the - # current directory. Also, the AIX compiler puts `$object:' at the + # current directory. Also, the AIX compiler puts '$object:' at the # start of each line; $object doesn't have directory information. # Version 6 uses the directory in both cases. - dir=`echo "$object" | sed -e 's|/[^/]*$|/|'` - test "x$dir" = "x$object" && dir= - base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'` + set_dir_from "$object" + set_base_from "$object" if test "$libtool" = yes; then tmpdepfile1=$dir$base.u tmpdepfile2=$base.u @@ -247,9 +313,7 @@ "$@" -M fi stat=$? - - if test $stat -eq 0; then : - else + if test $stat -ne 0; then rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" exit $stat fi @@ -258,44 +322,100 @@ do test -f "$tmpdepfile" && break done - if test -f "$tmpdepfile"; then - # Each line is of the form `foo.o: dependent.h'. - # Do two passes, one to just change these to - # `$object: dependent.h' and one to simply `dependent.h:'. - sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile" - # That's a tab and a space in the []. - sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile" - else - # The sourcefile does not contain any dependencies, so just - # store a dummy comment line, to avoid errors with the Makefile - # "include basename.Plo" scheme. - echo "#dummy" > "$depfile" + aix_post_process_depfile + ;; + +tcc) + # tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26 + # FIXME: That version still under development at the moment of writing. + # Make that this statement remains true also for stable, released + # versions. + # It will wrap lines (doesn't matter whether long or short) with a + # trailing '\', as in: + # + # foo.o : \ + # foo.c \ + # foo.h \ + # + # It will put a trailing '\' even on the last line, and will use leading + # spaces rather than leading tabs (at least since its commit 0394caf7 + # "Emit spaces for -MD"). + "$@" -MD -MF "$tmpdepfile" + stat=$? + if test $stat -ne 0; then + rm -f "$tmpdepfile" + exit $stat fi + rm -f "$depfile" + # Each non-empty line is of the form 'foo.o : \' or ' dep.h \'. + # We have to change lines of the first kind to '$object: \'. + sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile" + # And for each line of the second kind, we have to emit a 'dep.h:' + # dummy dependency, to avoid the deleted-header problem. + sed -n -e 's|^ *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile" rm -f "$tmpdepfile" ;; -icc) - # Intel's C compiler understands `-MD -MF file'. However on - # icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c - # ICC 7.0 will fill foo.d with something like - # foo.o: sub/foo.c - # foo.o: sub/foo.h - # which is wrong. We want: - # sub/foo.o: sub/foo.c - # sub/foo.o: sub/foo.h - # sub/foo.c: - # sub/foo.h: - # ICC 7.1 will output +## The order of this option in the case statement is important, since the +## shell code in configure will try each of these formats in the order +## listed in this file. A plain '-MD' option would be understood by many +## compilers, so we must ensure this comes after the gcc and icc options. +pgcc) + # Portland's C compiler understands '-MD'. + # Will always output deps to 'file.d' where file is the root name of the + # source file under compilation, even if file resides in a subdirectory. + # The object file name does not affect the name of the '.d' file. + # pgcc 10.2 will output # foo.o: sub/foo.c sub/foo.h - # and will wrap long lines using \ : + # and will wrap long lines using '\' : # foo.o: sub/foo.c ... \ # sub/foo.h ... \ # ... + set_dir_from "$object" + # Use the source, not the object, to determine the base name, since + # that's sadly what pgcc will do too. + set_base_from "$source" + tmpdepfile=$base.d + + # For projects that build the same source file twice into different object + # files, the pgcc approach of using the *source* file root name can cause + # problems in parallel builds. Use a locking strategy to avoid stomping on + # the same $tmpdepfile. + lockdir=$base.d-lock + trap " + echo '$0: caught signal, cleaning up...' >&2 + rmdir '$lockdir' + exit 1 + " 1 2 13 15 + numtries=100 + i=$numtries + while test $i -gt 0; do + # mkdir is a portable test-and-set. + if mkdir "$lockdir" 2>/dev/null; then + # This process acquired the lock. + "$@" -MD + stat=$? + # Release the lock. + rmdir "$lockdir" + break + else + # If the lock is being held by a different process, wait + # until the winning process is done or we timeout. + while test -d "$lockdir" && test $i -gt 0; do + sleep 1 + i=`expr $i - 1` + done + fi + i=`expr $i - 1` + done + trap - 1 2 13 15 + if test $i -le 0; then + echo "$0: failed to acquire lock after $numtries attempts" >&2 + echo "$0: check lockdir '$lockdir'" >&2 + exit 1 + fi - "$@" -MD -MF "$tmpdepfile" - stat=$? - if test $stat -eq 0; then : - else + if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi @@ -307,8 +427,8 @@ sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process this invocation # correctly. Breaking it into two sed invocations is a workaround. - sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" | - sed -e 's/$/ :/' >> "$depfile" + sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \ + | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; @@ -319,9 +439,8 @@ # 'foo.d', which lands next to the object file, wherever that # happens to be. # Much of this is similar to the tru64 case; see comments there. - dir=`echo "$object" | sed -e 's|/[^/]*$|/|'` - test "x$dir" = "x$object" && dir= - base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'` + set_dir_from "$object" + set_base_from "$object" if test "$libtool" = yes; then tmpdepfile1=$dir$base.d tmpdepfile2=$dir.libs/$base.d @@ -332,8 +451,7 @@ "$@" +Maked fi stat=$? - if test $stat -eq 0; then : - else + if test $stat -ne 0; then rm -f "$tmpdepfile1" "$tmpdepfile2" exit $stat fi @@ -343,77 +461,61 @@ test -f "$tmpdepfile" && break done if test -f "$tmpdepfile"; then - sed -e "s,^.*\.[a-z]*:,$object:," "$tmpdepfile" > "$depfile" - # Add `dependent.h:' lines. + sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile" + # Add 'dependent.h:' lines. sed -ne '2,${ - s/^ *// - s/ \\*$// - s/$/:/ - p - }' "$tmpdepfile" >> "$depfile" + s/^ *// + s/ \\*$// + s/$/:/ + p + }' "$tmpdepfile" >> "$depfile" else - echo "#dummy" > "$depfile" + make_dummy_depfile fi rm -f "$tmpdepfile" "$tmpdepfile2" ;; tru64) - # The Tru64 compiler uses -MD to generate dependencies as a side - # effect. `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'. - # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put - # dependencies in `foo.d' instead, so we check for that too. - # Subdirectories are respected. - dir=`echo "$object" | sed -e 's|/[^/]*$|/|'` - test "x$dir" = "x$object" && dir= - base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'` - - if test "$libtool" = yes; then - # With Tru64 cc, shared objects can also be used to make a - # static library. This mechanism is used in libtool 1.4 series to - # handle both shared and static libraries in a single compilation. - # With libtool 1.4, dependencies were output in $dir.libs/$base.lo.d. - # - # With libtool 1.5 this exception was removed, and libtool now - # generates 2 separate objects for the 2 libraries. These two - # compilations output dependencies in $dir.libs/$base.o.d and - # in $dir$base.o.d. We have to check for both files, because - # one of the two compilations can be disabled. We should prefer - # $dir$base.o.d over $dir.libs/$base.o.d because the latter is - # automatically cleaned when .libs/ is deleted, while ignoring - # the former would cause a distcleancheck panic. - tmpdepfile1=$dir.libs/$base.lo.d # libtool 1.4 - tmpdepfile2=$dir$base.o.d # libtool 1.5 - tmpdepfile3=$dir.libs/$base.o.d # libtool 1.5 - tmpdepfile4=$dir.libs/$base.d # Compaq CCC V6.2-504 - "$@" -Wc,-MD - else - tmpdepfile1=$dir$base.o.d - tmpdepfile2=$dir$base.d - tmpdepfile3=$dir$base.d - tmpdepfile4=$dir$base.d - "$@" -MD - fi + # The Tru64 compiler uses -MD to generate dependencies as a side + # effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'. + # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put + # dependencies in 'foo.d' instead, so we check for that too. + # Subdirectories are respected. + set_dir_from "$object" + set_base_from "$object" - stat=$? - if test $stat -eq 0; then : - else - rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4" - exit $stat - fi - - for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" "$tmpdepfile4" - do - test -f "$tmpdepfile" && break - done - if test -f "$tmpdepfile"; then - sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile" - # That's a tab and a space in the []. - sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile" - else - echo "#dummy" > "$depfile" - fi - rm -f "$tmpdepfile" - ;; + if test "$libtool" = yes; then + # Libtool generates 2 separate objects for the 2 libraries. These + # two compilations output dependencies in $dir.libs/$base.o.d and + # in $dir$base.o.d. We have to check for both files, because + # one of the two compilations can be disabled. We should prefer + # $dir$base.o.d over $dir.libs/$base.o.d because the latter is + # automatically cleaned when .libs/ is deleted, while ignoring + # the former would cause a distcleancheck panic. + tmpdepfile1=$dir$base.o.d # libtool 1.5 + tmpdepfile2=$dir.libs/$base.o.d # Likewise. + tmpdepfile3=$dir.libs/$base.d # Compaq CCC V6.2-504 + "$@" -Wc,-MD + else + tmpdepfile1=$dir$base.d + tmpdepfile2=$dir$base.d + tmpdepfile3=$dir$base.d + "$@" -MD + fi + + stat=$? + if test $stat -ne 0; then + rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" + exit $stat + fi + + for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" + do + test -f "$tmpdepfile" && break + done + # Same post-processing that is required for AIX mode. + aix_post_process_depfile + ;; msvc7) if test "$libtool" = yes; then @@ -424,8 +526,7 @@ "$@" $showIncludes > "$tmpdepfile" stat=$? grep -v '^Note: including file: ' "$tmpdepfile" - if test "$stat" = 0; then : - else + if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi @@ -443,14 +544,15 @@ p }' | $cygpath_u | sort -u | sed -n ' s/ /\\ /g -s/\(.*\)/ \1 \\/p +s/\(.*\)/'"$tab"'\1 \\/p s/.\(.*\) \\/\1:/ H $ { - s/.*/ / + s/.*/'"$tab"'/ G p }' >> "$depfile" + echo >> "$depfile" # make sure the fragment doesn't end with a backslash rm -f "$tmpdepfile" ;; @@ -478,7 +580,7 @@ shift fi - # Remove `-o $object'. + # Remove '-o $object'. IFS=" " for arg do @@ -498,18 +600,18 @@ done test -z "$dashmflag" && dashmflag=-M - # Require at least two characters before searching for `:' + # Require at least two characters before searching for ':' # in the target name. This is to cope with DOS-style filenames: - # a dependency such as `c:/foo/bar' could be seen as target `c' otherwise. + # a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise. "$@" $dashmflag | - sed 's:^[ ]*[^: ][^:][^:]*\:[ ]*:'"$object"'\: :' > "$tmpdepfile" + sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile" rm -f "$depfile" cat < "$tmpdepfile" > "$depfile" - tr ' ' ' -' < "$tmpdepfile" | \ -## Some versions of the HPUX 10.20 sed can't process this invocation -## correctly. Breaking it into two sed invocations is a workaround. - sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" + # Some versions of the HPUX 10.20 sed can't process this sed invocation + # correctly. Breaking it into two sed invocations is a workaround. + tr ' ' "$nl" < "$tmpdepfile" \ + | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \ + | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; @@ -562,11 +664,12 @@ # makedepend may prepend the VPATH from the source file name to the object. # No need to regex-escape $object, excess matching of '.' is harmless. sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile" - sed '1,2d' "$tmpdepfile" | tr ' ' ' -' | \ -## Some versions of the HPUX 10.20 sed can't process this invocation -## correctly. Breaking it into two sed invocations is a workaround. - sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" + # Some versions of the HPUX 10.20 sed can't process the last invocation + # correctly. Breaking it into two sed invocations is a workaround. + sed '1,2d' "$tmpdepfile" \ + | tr ' ' "$nl" \ + | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \ + | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" "$tmpdepfile".bak ;; @@ -583,7 +686,7 @@ shift fi - # Remove `-o $object'. + # Remove '-o $object'. IFS=" " for arg do @@ -602,10 +705,10 @@ esac done - "$@" -E | - sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ - -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' | - sed '$ s: \\$::' > "$tmpdepfile" + "$@" -E \ + | sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ + -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ + | sed '$ s: \\$::' > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" cat < "$tmpdepfile" >> "$depfile" @@ -637,23 +740,23 @@ shift ;; "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI") - set fnord "$@" - shift - shift - ;; + set fnord "$@" + shift + shift + ;; *) - set fnord "$@" "$arg" - shift - shift - ;; + set fnord "$@" "$arg" + shift + shift + ;; esac done "$@" -E 2>/dev/null | sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" - sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s:: \1 \\:p' >> "$depfile" - echo " " >> "$depfile" + sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile" + echo "$tab" >> "$depfile" sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile" rm -f "$tmpdepfile" ;; diff -Nru liblognorm-0.3.7/doc/changes.rst liblognorm-1.0.1/doc/changes.rst --- liblognorm-0.3.7/doc/changes.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/changes.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,7 @@ +ChangeLog +========= + +See below for a list of changes. + +.. literalinclude:: ../ChangeLog + diff -Nru liblognorm-0.3.7/doc/configuration.rst liblognorm-1.0.1/doc/configuration.rst --- liblognorm-0.3.7/doc/configuration.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/configuration.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,328 @@ +How to configure +================ + +To use liblognorm, you need 3 things. + +1. An installed and working copy of liblognorm. The installation process + has been discussed in the chapter :doc:`installation`. +2. Log files. +3. A rulebase, which is heart of liblognorm configuration. + +Log files +--------- + +A log file is a text file, which typically holds many lines. Each line is +a log message. These are usually a bit strange to read, thus to analyze. +This mostly happens, if you have a lot of different devices, that are all +creating log messages in a different format. + +Rulebase +-------- + +The rulebase holds all the schemes for your logs. It basically consists of +many lines that reflect the structure of your log messages. When the +normalization process is started, a parse-tree will be generated from +the rulebase and put into the memory. This will then be used to parse the +log messages. + +Each line in rulebase file is evaluated separately. + +Commentaries +------------ + +To keep your rulebase tidy, you can use commentaries. Start a commentary +with "#" like in many other configurations. It should look like this:: + + # The following prefix and rules are for firewall logs + +Empty lines are just skipped, they can be inserted for readability. + +Rules +----- + +If the line starts with 'rule=', then it contains a rule. This line has +following format:: + + rule=[[,...]]: + +Everything before a colon is treated as comma-separated list of tags, which +will be attached to a match. After the colon, match description should be +given. It consists of string literals and field selectors. String literals +should match exactly, whereas field selectors may match variable parts +of a message. + +A rule could look like this:: + + rule=:%date:date-rfc3164% %host:word% %tag:char-to:\x3a%: no longer listening on %ip:ipv4%#%port:number%' + +This excerpt is a common rule. A rule always contains several different +"parts"/properties and reflects the structure of the message you want to +normalize (e.g. Host, IP, Source, Syslogtag...). + +Literals +-------- + +Literal is just a sequence of characters, which must match exactly. +Percent sign characters must be escaped to prevent them from starting a +field accidentally. Replace each "%" with "\\x25" or "%%", when it occurs +in a string literal. + +Fields +------ + +The structure of a field selector is as follows:: + + %:[:]% + +field name -> that name can be selected freely. It should be a description +of what kind of information the field is holding, e.g. SRC is the field +contains the source IP address of the message. These names should also be +chosen carefully, since the field name can be used in every rule and +therefore should fit for the same kind of information in different rules. + +If field name is "-", then this field is matched but not saved. + +field type -> selects the accordant parser, which are described below. + +Special characters that need to be escaped when used inside a field +description are "%" and ":". For example, this will match anything up to +(but not including) a colon:: + + %variable:char-to:\x3a% + +Additional information is dependent on the field type; only some field +types need additional information. + +Field types +----------- + +number +###### + +One or more decimal digits. + +:: + + %port:number% + +word +#### + +One or more characters, up to the next space (\\x20), or +up to end of line. + +:: + + %host:word% + +char-to +####### + +One or more characters, up to the next character given in +extra data. Additional data must contain exactly one character, which +can be escaped. + +:: + + %field_name:char-to:,% + %field_name:char-to:\x25% + +char-sep +######## + +Zero or more characters, up to the next character given in extra data, or +up to end of line. Additional data must contain exactly one character, +which can be escaped. + +:: + + %field_name:char-sep:,% + %field_name:char-sep:\x25% + +rest +#### + +Zero or more characters till end of line. Should be always at end of the +rule. + +:: + + %field_name:rest% + +quoted-string +############# + +Zero or more characters, surrounded by double quote marks. +Quote marks are stripped from the match. + +:: + + %field_name:quoted-string% + +date-iso +######## + +Date in ISO format ('YYYY-MM-DD'). + +:: + + %field-name:date-iso% + +time-24hr +######### + +Time of format 'HH:MM:SS', where HH is 00..23. + +:: + + %time:time-24hr% + +time-12hr +######### + +Time of format 'HH:MM:SS', where HH is 00..12. + +:: + + %time:time-12hr% + +date-rfc3164 +############ + +Valid date/time in RFC3164 format, i.e.: 'Oct 29 09:47:08'. +This parser implements several quirks to match malformed +timestamps from some devices. + +:: + + %date:date-rfc3164% + +date-rfc5424 +############ + +Valid date/time in RFC5424 format, i.e.: +'1985-04-12T19:20:50.52-04:00'. +Slightly different formats are allowed. + +:: + + %date:date-rfc5424% + +ipv4 +#### + +IPv4 address, in dot-decimal notation (AAA.BBB.CCC.DDD). + +:: + + %ip-src:ipv4% + +iptables +######## + +Name=value pairs, separated by spaces, as in Netfilter log messages. +Name of the selector is not used; names from the line are +used instead. This selector always matches everything till +end of the line. Cannot match zero characters. + +:: + + %-:iptables% + +Prefixes +-------- + +Several rules can have a common prefix. You can set it once with this +syntax:: + + prefix= + +Prefix match description syntax is the same as rule match description. +Every following rule will be treated as an addition to this prefix. + +Prefix can be reset to default (empty value) by the line:: + + prefix= + +You can define a prefix for devices that produce the same header in each +message. We assume, that you have your rules sorted by device. In such a +case you can take the header of the rules and use it with the prefix +variable. Here is a example of a rule for IPTables:: + + prefix=%date:date-rfc3164% %host:word% %tag:char-to:-\x3a%: + rule=:INBOUND%INBOUND:char-to:-\x3a%: IN=%IN:word% PHYSIN=%PHYSIN:word% OUT=%OUT:word% PHYSOUT=%PHYSOUT:word% SRC=%source:ipv4% DST=%destination:ipv4% LEN=%LEN:number% TOS=%TOS:char-to: % PREC=%PREC:word% TTL=%TTL:number% ID=%ID:number% DF PROTO=%PROTO:word% SPT=%SPT:number% DPT=%DPT:number% WINDOW=%WINDOW:number% RES=0x00 ACK SYN URGP=%URGP:number% + +Usually, every rule would hold what is defined in the prefix at its +beginning. But since we can define the prefix, we can save that work in +every line and just make the rules for the log lines. This saves us a lot +of work and even saves space. + +In a rulebase you can use multiple prefixes obviously. The prefix will be +used for the following rules. If then another prefix is set, the first one +will be erased, and new one will be used for the following rules. + +Rule tags +--------- + +Rule tagging capability permits very easy classification of syslog +messages and log records in general. So you can not only extract data from +your various log source, you can also classify events, for example, as +being a "login", a "logout" or a firewall "denied access". This makes it +very easy to look at specific subsets of messages and process them in ways +specific to the information being conveyed. + +To see how it works, let’s first define what a tag is: + +A tag is a simple alphanumeric string that identifies a specific type of +object, action, status, etc. For example, we can have object tags for +firewalls and servers. For simplicity, let’s call them "firewall" and +"server". Then, we can have action tags like "login", "logout" and +"connectionOpen". Status tags could include "success" or "fail", among +others. Tags form a flat space, there is no inherent relationship between +them (but this may be added later on top of the current implementation). +Think of tags like the tag cloud in a blogging system. Tags can be defined +for any reason and need. A single event can be associated with as many +tags as required. + +Assigning tags to messages is simple. A rule contains both the sample of +the message (including the extracted fields) as well as the tags. +Have a look at this sample:: + + rule=:sshd[%pid:number%]: Invalid user %user:word% from %src-ip:ipv4% + +Here, we have a rule that shows an invalid ssh login request. The various +fields are used to extract information into a well-defined structure. Have +you ever wondered why every rule starts with a colon? Now, here is the +answer: the colon separates the tag part from the actual sample part. +Now, you can create a rule like this:: + + rule=ssh,user,login,fail:sshd[%pid:number%]: Invalid user %user:word% from %src-ip:ipv4% + +Note the "ssh,user,login,fail" part in front of the colon. These are the +four tags the user has decided to assign to this event. What now happens +is that the normalizer does not only extract the information from the +message if it finds a match, but it also adds the tags as metadata. Once +normalization is done, one can not only query the individual fields, but +also query if a specific tag is associated with this event. For example, +to find all ssh-related events (provided the rules are built that way), +you can normalize a large log and select only that subset of the +normalized log that contains the tag "ssh". + +Log annotations +--------------- + +In short, annotations allow to add arbitrary attributes to a parsed +message, depending on rule tags. Values of these attributes are fixed, +they cannot be derived from variable fields. Syntax is as following:: + + annotate=:+="" + +Field value should always be enclosed in double quote marks. + +There can be multiple annotations for the same tag. + +Examples +-------- + +Look at :doc:`sample rulebase ` for configuration +examples and matching log lines. diff -Nru liblognorm-0.3.7/doc/conf.py liblognorm-1.0.1/doc/conf.py --- liblognorm-0.3.7/doc/conf.py 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/conf.py 2014-04-11 04:29:00.000000000 +0000 @@ -0,0 +1,244 @@ +# -*- coding: utf-8 -*- +# +# Liblognorm documentation build configuration file, created by +# sphinx-quickstart on Mon Dec 16 13:12:44 2013. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys, os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Liblognorm' +copyright = u'Adiscon' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '1.0' +# The full version, including alpha/beta/rc tags. +release = '1.0.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'haiku' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None +html_title = "A fast log normalization library" + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None +html_short_title = project + " " + release + " documentation" + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +html_use_index = False + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +html_show_copyright = False + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Liblognormdoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'Liblognorm.tex', u'Liblognorm Documentation', + u'Pavel Levshin', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'liblognorm', u'Liblognorm Documentation', + [u'Pavel Levshin'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------------ + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'Liblognorm', u'Liblognorm Documentation', + u'Pavel Levshin', 'Liblognorm', 'Fast log normalization library.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' diff -Nru liblognorm-0.3.7/doc/contacts.rst liblognorm-1.0.1/doc/contacts.rst --- liblognorm-0.3.7/doc/contacts.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/contacts.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,28 @@ +Contacts +======== + +Mailing list +------------ + +If you have any questions about the library, you may mail to the project +mailing list lognorm@lists.adiscon.com. + +To subscribe: http://lists.adiscon.net/mailman/listinfo/lognorm + +Web site +-------- + +http://www.liblognorm.com/ + +Git repositories +---------------- + +- https://github.com/rsyslog/liblognorm.git +- git://git.adiscon.com/git/liblognorm.git + +Authors +------- + +Rainer Gerhards , Adiscon GmbH + + His blog: http://blog.gerhards.net/ Binary files /tmp/fsmu0K9RTf/liblognorm-0.3.7/doc/graph.png and /tmp/t8WaGWJqAO/liblognorm-1.0.1/doc/graph.png differ diff -Nru liblognorm-0.3.7/doc/index.rst liblognorm-1.0.1/doc/index.rst --- liblognorm-0.3.7/doc/index.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/index.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,30 @@ +.. Liblognorm documentation master file, created by + sphinx-quickstart on Mon Dec 16 13:12:44 2013. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Liblognorm! +====================== + +Search +------ + +* :ref:`search` + +Contents: +--------- + +.. toctree:: + :maxdepth: 3 + + introduction + installation + configuration + sample_rulebase + lognormalizer + libraryapi + internals + license + contacts + changes + diff -Nru liblognorm-0.3.7/doc/installation.rst liblognorm-1.0.1/doc/installation.rst --- liblognorm-0.3.7/doc/installation.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/installation.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,76 @@ +How to install +============== + +Here you can find the first steps to install and try liblognorm. + +Getting liblognorm +------------------ + +There are several ways to install libognorm. You can install it +from your distribution, if it is there. You can get binary packages from +Rsyslog repositories: + +- `RedHat Enterprise Linux or CentOS `_ +- `Ubuntu `_ +- `Debian `_ + +Or you can build your own binaries from sources. You can fetch all +sources from git (below you can find all commands you need) or you can +download it as tarballs at: + +- `libestr `_ +- `liblognorm `_ + +Please note if you compile it from tarballs then you have to do the same +steps which are mentioned below, apart from:: + + $ git clone ... + $ autoreconf -vfi + +Building from git +----------------- + +To build liblognorm from sources, you need to have +`json-c `_ installed. + +Open a terminal and switch to the folder where you want to build +liblognorm. Below you will find the necessary commands. First, build +and install prerequisite library called **libestr**:: + + $ git clone git://git.adiscon.com/git/libestr.git + $ cd libestr + $ autoreconf -vfi + $ ./configure + $ make + $ sudo make install + +leave that folder and repeat this step again for liblognorm:: + + $ cd .. + $ git clone git://git.adiscon.com/git/liblognorm.git + $ cd liblognorm + $ autoreconf -vfi + $ ./configure + $ make + $ sudo make install + +That’s all you have to do. + +Testing +------- + +For a first test we need two further things, a test log and the rulebase. +Both can be downloaded `here +`_. + +After downloading these examples you can use liblognorm. Go to +liblognorm/src and use the command below:: + + $ ./lognormalize -r messages.sampdb -o json ` tool to +debug. diff -Nru liblognorm-0.3.7/doc/introduction.rst liblognorm-1.0.1/doc/introduction.rst --- liblognorm-0.3.7/doc/introduction.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/introduction.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,34 @@ +Introduction +============ + +Briefly described, liblognorm is a tool to normalize log data. + +People who need to take a look at logs often have a common problem. Logs +from different machines (from different vendors) usually have different +formats. Even if it is the same type of log (e.g. from firewalls), the log +entries are so different, that it is pretty hard to read these. This is +where liblognorm comes into the game. With this tool you can normalize all +your logs. All you need is liblognorm and its dependencies and a sample +database that fits the logs you want to normalize. + +So, for example, if you have traffic logs from three different firewalls, +liblognorm will be able to "normalize" the events into generic ones. Among +others, it will extract source and destination ip addresses and ports and +make them available via well-defined fields. As the end result, a common log +analysis application will be able to work on that common set and so this +backend will be independent from the actual firewalls feeding it. Even +better, once we have a well-understood interim format, it is also easy to +convert that into any other vendor specific format, so that you can use that +vendor's analysis tool. + +By design, liblognorm is constructed as a library. Thus, it can be used by +other tools. + +In short, liblognorm works by: + + 1. Matching a line to a rule from predefined configuration; + 2. Picking out variable fields from the line; + 3. Returning them as a JSON hash object. + +Then, a consumer of this object can construct new, normalized log line +on its own. diff -Nru liblognorm-0.3.7/doc/libraryapi.rst liblognorm-1.0.1/doc/libraryapi.rst --- liblognorm-0.3.7/doc/libraryapi.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/libraryapi.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,10 @@ +Library API +=========== + +To use the library, include liblognorm.h (which is quoted below) into your code. +The API is fairly simple and hardly needs further explanations. + +.. literalinclude:: ../src/liblognorm.h + :start-after: #define LIBLOGNORM_H_INCLUDED + :end-before: #endif + :language: c diff -Nru liblognorm-0.3.7/doc/license.rst liblognorm-1.0.1/doc/license.rst --- liblognorm-0.3.7/doc/license.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/license.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,9 @@ +Licensing +========= + +Liblognorm is available under the terms of the GNU LGPL v2.1 or above (full +text below). + +.. literalinclude:: ../COPYING + + diff -Nru liblognorm-0.3.7/doc/lognormalizer.rst liblognorm-1.0.1/doc/lognormalizer.rst --- liblognorm-0.3.7/doc/lognormalizer.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/lognormalizer.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,148 @@ +Lognormalizer +============= + +Lognormalizer is a sample tool which is often used to test and debug +rulebases before real use. Nevertheless, it can be used in production as +a simple command line interface to liblognorm. + +This tool reads log lines from its standard input and prints results +to standard output. You need to use redirections if you want to read +or write files. + +An example of the command:: + + $ lognormalizer -r messages.sampdb -o json + +Specifies name of the file containing the rulebase. + +:: + + -v + +Increase verbosity level. Can be used several times. + +:: + + -p + +Print only successfully parsed messages. + +:: + + -t + +Print only those messages which have this tag. + +:: + + -e + +Output format. By default, output is in Mitre CEE format. With this option, you can change it to JSON, XML or CSV. + +:: + + -T + +Include 'event.tags' attribute when output is in JSON format. This attribute contains list of tags of the matched +rule. + +:: + + -E + +Encoder-specific data. For CSV, it is the list of fields to be output, +separated by comma or space. It is currently unused for other formats. + +:: + + -d + +Generate DOT file describing parse tree. It is used to plot parse graph +with GraphViz. + +Examples +-------- + +These examples were created using sample rulebase from source package. + +Default (CEE) output:: + + $ lognormalizer -r rulebases/sample.rulebase + Weight: 42kg + [cee@115 event.tags="tag2" unit="kg" N="42" fat="free"] + Snow White and the Seven Dwarfs + [cee@115 event.tags="tale" company="the Seven Dwarfs"] + 2012-10-11 src=127.0.0.1 dst=88.111.222.19 + [cee@115 dst="88.111.222.19" src="127.0.0.1" date="2012-10-11"] + +JSON output, flat tags enabled:: + + $ lognormalizer -r rulebases/sample.rulebase -e json -T + %% + { "event.tags": [ "tag3", "percent" ], "percent": "100", "part": "wha", "whole": "whale" } + Weight: 42kg + { "unit": "kg", "N": "42", "event.tags": [ "tag2" ], "fat": "free" } + +CSV output with fixed field list:: + + $ lognormalizer -r rulebases/sample.rulebase -e csv -E'N unit' + Weight: 42kg + "42","kg" + Weight: 115lbs + "115","lbs" + Anything not matching the rule + , + +Creating a graph of the rulebase +-------------------------------- + +To get a better overview of a rulebase you can create a graph that shows you +the chain of normalization (parse-tree). + +At first you have to install an additional package called graphviz. Graphviz +is a tool that creates such a graph with the help of a control file (created +with the rulebase). `Here `_ you will find more +information about graphviz. + +To install it you can use the package manager. For example, on RedHat +systems it is yum command:: + + $ sudo yum install graphviz + +The next step would be creating the control file for graphviz. Therefore we +use the normalizer command with the options -d "prefered filename for the +control file" and -r "rulebase":: + + $ lognormalize -d control.dot -r messages.rb + +Please note that there is no need for an input or output file. +If you have a look at the control file now you will see that the content is +a little bit confusing, but it includes all information, like the nodes, +fields and parser, that graphviz needs to create the graph. Of course you +can edit that file, but please note that it is a lot of work. + +Now we can create the graph by typing:: + + $ dot control.dot -Tpng >graph.png + +dot + name of control file + option -T -> file format + output file + +That is just one example for using graphviz, of course you can do many +other great things with it. But I think this "simple" graph could be very +helpful for the normalizer. + +Below you see sample for such a graph, but please note that this is +not such a pretty one. Such a graph can grow very fast by editing your +rulebase. + +.. figure:: graph.png + :width: 90 % + :alt: graph sample + diff -Nru liblognorm-0.3.7/doc/Makefile.am liblognorm-1.0.1/doc/Makefile.am --- liblognorm-0.3.7/doc/Makefile.am 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/Makefile.am 2014-04-11 04:29:00.000000000 +0000 @@ -0,0 +1,52 @@ +EXTRA_DIST = _static _templates conf.py \ + index.rst introduction.rst installation.rst \ + configuration.rst sample_rulebase.rst internals.rst \ + contacts.rst changes.rst libraryapi.rst \ + lognormalizer.rst license.rst graph.png + +htmldir = $(docdir) +built_html = _build/html + +#html_DATA = $(built_html)/index.html + +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = -n -W -c $(srcdir) +#SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(SPHINXOPTS) $(srcdir) + +.PHONY: clean-local html-local man-local all-local dist-hook install-data-hook + +dist-hook: + find $(distdir)/ -name .gitignore | xargs rm -f + +clean-local: + -rm -rf $(BUILDDIR)/* + +html-local: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +man-local: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +all-local: html-local + +install-data-hook: + find $(built_html) -type f -printf "%P\n" | \ + while read file; do \ + echo " $(INSTALL_DATA) -D $(built_html)/$$file '$(DESTDIR)$(htmldir)/$$file'"; \ + $(INSTALL_DATA) -D $(built_html)/$$file "$(DESTDIR)$(htmldir)/$$file" || exit $$?; \ + done + +uninstall-local: + -rm -rf "$(DESTDIR)$(htmldir)" diff -Nru liblognorm-0.3.7/doc/Makefile.in liblognorm-1.0.1/doc/Makefile.in --- liblognorm-0.3.7/doc/Makefile.in 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/Makefile.in 2014-04-11 04:30:18.000000000 +0000 @@ -0,0 +1,487 @@ +# Makefile.in generated by automake 1.13.4 from Makefile.am. +# @configure_input@ + +# Copyright (C) 1994-2013 Free Software Foundation, Inc. + +# This Makefile.in is free software; the Free Software Foundation +# gives unlimited permission to copy and/or distribute it, +# with or without modifications, as long as this notice is preserved. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY, to the extent permitted by law; without +# even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. + +@SET_MAKE@ +VPATH = @srcdir@ +am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) +pkgdatadir = $(datadir)/@PACKAGE@ +pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ +am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd +install_sh_DATA = $(install_sh) -c -m 644 +install_sh_PROGRAM = $(install_sh) -c +install_sh_SCRIPT = $(install_sh) -c +INSTALL_HEADER = $(INSTALL_DATA) +transform = $(program_transform_name) +NORMAL_INSTALL = : +PRE_INSTALL = : +POST_INSTALL = : +NORMAL_UNINSTALL = : +PRE_UNINSTALL = : +POST_UNINSTALL = : +build_triplet = @build@ +host_triplet = @host@ +subdir = doc +DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am +ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ + $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ + $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ + $(top_srcdir)/configure.ac +am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ + $(ACLOCAL_M4) +mkinstalldirs = $(install_sh) -d +CONFIG_HEADER = $(top_builddir)/config.h +CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = +SOURCES = +DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +ACLOCAL = @ACLOCAL@ +AMTAR = @AMTAR@ +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ +AR = @AR@ +AUTOCONF = @AUTOCONF@ +AUTOHEADER = @AUTOHEADER@ +AUTOMAKE = @AUTOMAKE@ +AWK = @AWK@ +CC = @CC@ +CCDEPMODE = @CCDEPMODE@ +CFLAGS = @CFLAGS@ +CPP = @CPP@ +CPPFLAGS = @CPPFLAGS@ +CYGPATH_W = @CYGPATH_W@ +DEFS = @DEFS@ +DEPDIR = @DEPDIR@ +DLLTOOL = @DLLTOOL@ +DSYMUTIL = @DSYMUTIL@ +DUMPBIN = @DUMPBIN@ +ECHO_C = @ECHO_C@ +ECHO_N = @ECHO_N@ +ECHO_T = @ECHO_T@ +EGREP = @EGREP@ +EXEEXT = @EXEEXT@ +FGREP = @FGREP@ +GREP = @GREP@ +INSTALL = @INSTALL@ +INSTALL_DATA = @INSTALL_DATA@ +INSTALL_PROGRAM = @INSTALL_PROGRAM@ +INSTALL_SCRIPT = @INSTALL_SCRIPT@ +INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +JSON_C_CFLAGS = @JSON_C_CFLAGS@ +JSON_C_LIBS = @JSON_C_LIBS@ +LD = @LD@ +LDFLAGS = @LDFLAGS@ +LIBESTR_CFLAGS = @LIBESTR_CFLAGS@ +LIBESTR_LIBS = @LIBESTR_LIBS@ +LIBLOGNORM_CFLAGS = @LIBLOGNORM_CFLAGS@ +LIBLOGNORM_LIBS = @LIBLOGNORM_LIBS@ +LIBOBJS = @LIBOBJS@ +LIBS = @LIBS@ +LIBTOOL = @LIBTOOL@ +LIPO = @LIPO@ +LN_S = @LN_S@ +LTLIBOBJS = @LTLIBOBJS@ +MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ +MKDIR_P = @MKDIR_P@ +NM = @NM@ +NMEDIT = @NMEDIT@ +OBJDUMP = @OBJDUMP@ +OBJEXT = @OBJEXT@ +OTOOL = @OTOOL@ +OTOOL64 = @OTOOL64@ +PACKAGE = @PACKAGE@ +PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ +PACKAGE_NAME = @PACKAGE_NAME@ +PACKAGE_STRING = @PACKAGE_STRING@ +PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PATH_SEPARATOR = @PATH_SEPARATOR@ +PKG_CONFIG = @PKG_CONFIG@ +PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ +PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ +RANLIB = @RANLIB@ +SED = @SED@ +SET_MAKE = @SET_MAKE@ +SHELL = @SHELL@ +SPHINXBUILD = @SPHINXBUILD@ +STRIP = @STRIP@ +VERSION = @VERSION@ +abs_builddir = @abs_builddir@ +abs_srcdir = @abs_srcdir@ +abs_top_builddir = @abs_top_builddir@ +abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ +ac_ct_CC = @ac_ct_CC@ +ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ +am__include = @am__include@ +am__leading_dot = @am__leading_dot@ +am__quote = @am__quote@ +am__tar = @am__tar@ +am__untar = @am__untar@ +bindir = @bindir@ +build = @build@ +build_alias = @build_alias@ +build_cpu = @build_cpu@ +build_os = @build_os@ +build_vendor = @build_vendor@ +builddir = @builddir@ +datadir = @datadir@ +datarootdir = @datarootdir@ +docdir = @docdir@ +dvidir = @dvidir@ +exec_prefix = @exec_prefix@ +host = @host@ +host_alias = @host_alias@ +host_cpu = @host_cpu@ +host_os = @host_os@ +host_vendor = @host_vendor@ +htmldir = $(docdir) +includedir = @includedir@ +infodir = @infodir@ +install_sh = @install_sh@ +libdir = @libdir@ +libexecdir = @libexecdir@ +localedir = @localedir@ +localstatedir = @localstatedir@ +mandir = @mandir@ +mkdir_p = @mkdir_p@ +oldincludedir = @oldincludedir@ +pdfdir = @pdfdir@ +prefix = @prefix@ +program_transform_name = @program_transform_name@ +psdir = @psdir@ +sbindir = @sbindir@ +sharedstatedir = @sharedstatedir@ +srcdir = @srcdir@ +sysconfdir = @sysconfdir@ +target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ +top_builddir = @top_builddir@ +top_srcdir = @top_srcdir@ +EXTRA_DIST = _static _templates conf.py \ + index.rst introduction.rst installation.rst \ + configuration.rst sample_rulebase.rst internals.rst \ + contacts.rst changes.rst libraryapi.rst \ + lognormalizer.rst license.rst graph.png + +built_html = _build/html + +#html_DATA = $(built_html)/index.html + +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = -n -W -c $(srcdir) +#SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(SPHINXOPTS) $(srcdir) +all: all-am + +.SUFFIXES: +$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) + @for dep in $?; do \ + case '$(am__configure_deps)' in \ + *$$dep*) \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ + exit 1;; \ + esac; \ + done; \ + echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --gnu doc/Makefile +.PRECIOUS: Makefile +Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status + @case '$?' in \ + *config.status*) \ + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ + *) \ + echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ + cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ + esac; + +$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh + +$(top_srcdir)/configure: $(am__configure_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(ACLOCAL_M4): $(am__aclocal_m4_deps) + cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): + +mostlyclean-libtool: + -rm -f *.lo + +clean-libtool: + -rm -rf .libs _libs +tags TAGS: + +ctags CTAGS: + +cscope cscopelist: + + +distdir: $(DISTFILES) + @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ + list='$(DISTFILES)'; \ + dist_files=`for file in $$list; do echo $$file; done | \ + sed -e "s|^$$srcdirstrip/||;t" \ + -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ + case $$dist_files in \ + */*) $(MKDIR_P) `echo "$$dist_files" | \ + sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ + sort -u` ;; \ + esac; \ + for file in $$dist_files; do \ + if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ + if test -d $$d/$$file; then \ + dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ + else \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ + || exit 1; \ + fi; \ + done + $(MAKE) $(AM_MAKEFLAGS) \ + top_distdir="$(top_distdir)" distdir="$(distdir)" \ + dist-hook +check-am: all-am +check: check-am +all-am: Makefile all-local +installdirs: +install: install-am +install-exec: install-exec-am +install-data: install-data-am +uninstall: uninstall-am + +install-am: all-am + @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am + +installcheck: installcheck-am +install-strip: + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi +mostlyclean-generic: + +clean-generic: + +distclean-generic: + -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) + +maintainer-clean-generic: + @echo "This command is intended for maintainers to use" + @echo "it deletes files that may require special tools to rebuild." +clean: clean-am + +clean-am: clean-generic clean-libtool clean-local mostlyclean-am + +distclean: distclean-am + -rm -f Makefile +distclean-am: clean-am distclean-generic + +dvi: dvi-am + +dvi-am: + +html: html-am + +html-am: html-local + +info: info-am + +info-am: + +install-data-am: + @$(NORMAL_INSTALL) + $(MAKE) $(AM_MAKEFLAGS) install-data-hook +install-dvi: install-dvi-am + +install-dvi-am: + +install-exec-am: + +install-html: install-html-am + +install-html-am: + +install-info: install-info-am + +install-info-am: + +install-man: + +install-pdf: install-pdf-am + +install-pdf-am: + +install-ps: install-ps-am + +install-ps-am: + +installcheck-am: + +maintainer-clean: maintainer-clean-am + -rm -f Makefile +maintainer-clean-am: distclean-am maintainer-clean-generic + +mostlyclean: mostlyclean-am + +mostlyclean-am: mostlyclean-generic mostlyclean-libtool + +pdf: pdf-am + +pdf-am: + +ps: ps-am + +ps-am: + +uninstall-am: uninstall-local + +.MAKE: install-am install-data-am install-strip + +.PHONY: all all-am all-local check check-am clean clean-generic \ + clean-libtool clean-local cscopelist-am ctags-am dist-hook \ + distclean distclean-generic distclean-libtool distdir dvi \ + dvi-am html html-am html-local info info-am install install-am \ + install-data install-data-am install-data-hook install-dvi \ + install-dvi-am install-exec install-exec-am install-html \ + install-html-am install-info install-info-am install-man \ + install-pdf install-pdf-am install-ps install-ps-am \ + install-strip installcheck installcheck-am installdirs \ + maintainer-clean maintainer-clean-generic mostlyclean \ + mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ + tags-am uninstall uninstall-am uninstall-local + + +.PHONY: clean-local html-local man-local all-local dist-hook install-data-hook + +dist-hook: + find $(distdir)/ -name .gitignore | xargs rm -f + +clean-local: + -rm -rf $(BUILDDIR)/* + +html-local: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +man-local: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +all-local: html-local + +install-data-hook: + find $(built_html) -type f -printf "%P\n" | \ + while read file; do \ + echo " $(INSTALL_DATA) -D $(built_html)/$$file '$(DESTDIR)$(htmldir)/$$file'"; \ + $(INSTALL_DATA) -D $(built_html)/$$file "$(DESTDIR)$(htmldir)/$$file" || exit $$?; \ + done + +uninstall-local: + -rm -rf "$(DESTDIR)$(htmldir)" + +# Tell versions [3.59,3.63) of GNU make to not export all variables. +# Otherwise a system limit (for SysV at least) may be exceeded. +.NOEXPORT: diff -Nru liblognorm-0.3.7/doc/sample_rulebase.rst liblognorm-1.0.1/doc/sample_rulebase.rst --- liblognorm-0.3.7/doc/sample_rulebase.rst 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/doc/sample_rulebase.rst 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,6 @@ +Sample rulebase +=============== + +.. literalinclude:: ../rulebases/sample.rulebase + :linenos: + diff -Nru liblognorm-0.3.7/INSTALL liblognorm-1.0.1/INSTALL --- liblognorm-0.3.7/INSTALL 2012-04-04 09:16:38.000000000 +0000 +++ liblognorm-1.0.1/INSTALL 2014-04-11 04:30:18.000000000 +0000 @@ -1,7 +1,7 @@ Installation Instructions ************************* -Copyright (C) 1994-1996, 1999-2002, 2004-2011 Free Software Foundation, +Copyright (C) 1994-1996, 1999-2002, 2004-2013 Free Software Foundation, Inc. Copying and distribution of this file, with or without modification, @@ -309,9 +309,10 @@ overridden in the site shell script). Unfortunately, this technique does not work for `CONFIG_SHELL' due to -an Autoconf bug. Until the bug is fixed you can use this workaround: +an Autoconf limitation. Until the limitation is lifted, you can use +this workaround: - CONFIG_SHELL=/bin/bash /bin/bash ./configure CONFIG_SHELL=/bin/bash + CONFIG_SHELL=/bin/bash ./configure CONFIG_SHELL=/bin/bash `configure' Invocation ====================== @@ -367,4 +368,3 @@ `configure' also accepts some other, not widely useful, options. Run `configure --help' for more details. - diff -Nru liblognorm-0.3.7/install-sh liblognorm-1.0.1/install-sh --- liblognorm-0.3.7/install-sh 2012-04-04 09:16:38.000000000 +0000 +++ liblognorm-1.0.1/install-sh 2014-04-11 04:30:18.000000000 +0000 @@ -1,7 +1,7 @@ #!/bin/sh # install - install a program, script, or datafile -scriptversion=2011-01-19.21; # UTC +scriptversion=2011-11-20.07; # UTC # This originates from X11R5 (mit/util/scripts/install.sh), which was # later released in X11R6 (xc/config/util/install.sh) with the @@ -35,7 +35,7 @@ # FSF changes to this file are in the public domain. # # Calling this script install-sh is preferred over install.sh, to prevent -# `make' implicit rules from creating a file called install from it +# 'make' implicit rules from creating a file called install from it # when there is no Makefile. # # This script is compatible with the BSD install script, but was written @@ -156,7 +156,7 @@ -s) stripcmd=$stripprog;; -t) dst_arg=$2 - # Protect names problematic for `test' and other utilities. + # Protect names problematic for 'test' and other utilities. case $dst_arg in -* | [=\(\)!]) dst_arg=./$dst_arg;; esac @@ -190,7 +190,7 @@ fi shift # arg dst_arg=$arg - # Protect names problematic for `test' and other utilities. + # Protect names problematic for 'test' and other utilities. case $dst_arg in -* | [=\(\)!]) dst_arg=./$dst_arg;; esac @@ -202,7 +202,7 @@ echo "$0: no input file specified." >&2 exit 1 fi - # It's OK to call `install-sh -d' without argument. + # It's OK to call 'install-sh -d' without argument. # This can happen when creating conditional directories. exit 0 fi @@ -240,7 +240,7 @@ for src do - # Protect names problematic for `test' and other utilities. + # Protect names problematic for 'test' and other utilities. case $src in -* | [=\(\)!]) src=./$src;; esac @@ -354,7 +354,7 @@ if test -z "$dir_arg" || { # Check for POSIX incompatibilities with -m. # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or - # other-writeable bit of parent directory when it shouldn't. + # other-writable bit of parent directory when it shouldn't. # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. ls_ld_tmpdir=`ls -ld "$tmpdir"` case $ls_ld_tmpdir in diff -Nru liblognorm-0.3.7/lognorm.pc.in liblognorm-1.0.1/lognorm.pc.in --- liblognorm-0.3.7/lognorm.pc.in 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/lognorm.pc.in 2014-01-22 15:52:06.000000000 +0000 @@ -6,5 +6,5 @@ Name: lognorm Description: fast samples-based log normalization library Version: @VERSION@ -Libs: -L${libdir} -llognorm -lee -lestr +Libs: -L${libdir} -llognorm -lestr Cflags: -I${includedir} diff -Nru liblognorm-0.3.7/ltmain.sh liblognorm-1.0.1/ltmain.sh --- liblognorm-0.3.7/ltmain.sh 2012-04-04 09:16:29.000000000 +0000 +++ liblognorm-1.0.1/ltmain.sh 2014-04-11 04:30:07.000000000 +0000 @@ -70,7 +70,7 @@ # compiler: $LTCC # compiler flags: $LTCFLAGS # linker: $LD (gnu? $with_gnu_ld) -# $progname: (GNU libtool) 2.4.2 Debian-2.4.2-1ubuntu1 +# $progname: (GNU libtool) 2.4.2 # automake: $automake_version # autoconf: $autoconf_version # @@ -80,7 +80,7 @@ PROGRAM=libtool PACKAGE=libtool -VERSION="2.4.2 Debian-2.4.2-1ubuntu1" +VERSION=2.4.2 TIMESTAMP="" package_revision=1.3337 @@ -6124,10 +6124,7 @@ case $pass in dlopen) libs="$dlfiles" ;; dlpreopen) libs="$dlprefiles" ;; - link) - libs="$deplibs %DEPLIBS%" - test "X$link_all_deplibs" != Xno && libs="$libs $dependency_libs" - ;; + link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; esac fi if test "$linkmode,$pass" = "lib,dlpreopen"; then @@ -6447,19 +6444,19 @@ # It is a libtool convenience library, so add in its objects. func_append convenience " $ladir/$objdir/$old_library" func_append old_convenience " $ladir/$objdir/$old_library" - tmp_libs= - for deplib in $dependency_libs; do - deplibs="$deplib $deplibs" - if $opt_preserve_dup_deps ; then - case "$tmp_libs " in - *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac - fi - func_append tmp_libs " $deplib" - done elif test "$linkmode" != prog && test "$linkmode" != lib; then func_fatal_error "\`$lib' is not a convenience library" fi + tmp_libs= + for deplib in $dependency_libs; do + deplibs="$deplib $deplibs" + if $opt_preserve_dup_deps ; then + case "$tmp_libs " in + *" $deplib "*) func_append specialdeplibs " $deplib" ;; + esac + fi + func_append tmp_libs " $deplib" + done continue fi # $pass = conv @@ -7352,9 +7349,6 @@ revision="$number_minor" lt_irix_increment=no ;; - *) - func_fatal_configuration "$modename: unknown library version type \`$version_type'" - ;; esac ;; no) diff -Nru liblognorm-0.3.7/m4/libtool.m4 liblognorm-1.0.1/m4/libtool.m4 --- liblognorm-0.3.7/m4/libtool.m4 2012-04-04 09:16:29.000000000 +0000 +++ liblognorm-1.0.1/m4/libtool.m4 2014-04-11 04:30:07.000000000 +0000 @@ -1312,7 +1312,7 @@ rm -rf conftest* ;; -x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ +x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext @@ -1326,7 +1326,10 @@ x86_64-*linux*) LD="${LD-ld} -m elf_i386" ;; - ppc64-*linux*|powerpc64-*linux*) + powerpc64le-*linux*) + LD="${LD-ld} -m elf32lppclinux" + ;; + powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) @@ -1345,7 +1348,10 @@ x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; - ppc*-*linux*|powerpc*-*linux*) + powerpcle-*linux*) + LD="${LD-ld} -m elf64lppc" + ;; + powerpc-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) @@ -2669,10 +2675,14 @@ # before this can be enabled. hardcode_into_libs=yes + # Add ABI-specific directories to the system library path. + sys_lib_dlsearch_path_spec="/lib64 /usr/lib64 /lib /usr/lib" + # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` - sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" + sys_lib_dlsearch_path_spec="$sys_lib_dlsearch_path_spec $lt_ld_extra" + fi # We used to test for /lib/ld.so.1 and disable shared libraries on @@ -2684,18 +2694,6 @@ dynamic_linker='GNU/Linux ld.so' ;; -netbsdelf*-gnu) - version_type=linux - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='NetBSD ld.elf_so' - ;; - netbsd*) version_type=sunos need_lib_prefix=no @@ -3301,7 +3299,7 @@ lt_cv_deplibs_check_method=pass_all ;; -netbsd* | netbsdelf*-gnu) +netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' else @@ -4113,7 +4111,7 @@ ;; esac ;; - netbsd* | netbsdelf*-gnu) + netbsd*) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise @@ -4590,9 +4588,6 @@ ;; esac ;; - linux* | k*bsd*-gnu | gnu*) - _LT_TAGVAR(link_all_deplibs, $1)=no - ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; @@ -4655,9 +4650,6 @@ openbsd*) with_gnu_ld=no ;; - linux* | k*bsd*-gnu | gnu*) - _LT_TAGVAR(link_all_deplibs, $1)=no - ;; esac _LT_TAGVAR(ld_shlibs, $1)=yes @@ -4879,7 +4871,7 @@ fi ;; - netbsd* | netbsdelf*-gnu) + netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= @@ -5056,7 +5048,6 @@ if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi - _LT_TAGVAR(link_all_deplibs, $1)=no else # not using gcc if test "$host_cpu" = ia64; then @@ -5361,7 +5352,7 @@ _LT_TAGVAR(link_all_deplibs, $1)=yes ;; - netbsd* | netbsdelf*-gnu) + netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else diff -Nru liblognorm-0.3.7/Makefile.am liblognorm-1.0.1/Makefile.am --- liblognorm-0.3.7/Makefile.am 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/Makefile.am 2014-01-22 15:52:06.000000000 +0000 @@ -1,5 +1,9 @@ SUBDIRS = src +if ENABLE_DOCS + SUBDIRS += doc +endif +EXTRA_DIST = rulebases pkgconfigdir = $(libdir)/pkgconfig pkgconfig_DATA = lognorm.pc diff -Nru liblognorm-0.3.7/Makefile.in liblognorm-1.0.1/Makefile.in --- liblognorm-0.3.7/Makefile.in 2013-07-18 07:37:56.000000000 +0000 +++ liblognorm-1.0.1/Makefile.in 2014-04-11 04:30:18.000000000 +0000 @@ -1,9 +1,8 @@ -# Makefile.in generated by automake 1.11.3 from Makefile.am. +# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ -# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software -# Foundation, Inc. +# Copyright (C) 1994-2013 Free Software Foundation, Inc. + # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -16,6 +15,51 @@ @SET_MAKE@ VPATH = @srcdir@ +am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ @@ -34,12 +78,13 @@ POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ +@ENABLE_DOCS_TRUE@am__append_1 = doc subdir = . -DIST_COMMON = README $(am__configure_deps) $(srcdir)/Makefile.am \ - $(srcdir)/Makefile.in $(srcdir)/config.h.in \ - $(srcdir)/lognorm.pc.in $(top_srcdir)/configure AUTHORS \ - COPYING ChangeLog INSTALL NEWS compile config.guess config.sub \ - depcomp install-sh ltmain.sh missing +DIST_COMMON = INSTALL NEWS README AUTHORS ChangeLog \ + $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ + $(top_srcdir)/configure $(am__configure_deps) \ + $(srcdir)/config.h.in $(srcdir)/lognorm.pc.in COPYING compile \ + config.guess config.sub depcomp install-sh missing ltmain.sh ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ @@ -53,21 +98,33 @@ CONFIG_HEADER = config.h CONFIG_CLEAN_FILES = lognorm.pc CONFIG_CLEAN_VPATH_FILES = +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) -am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ +am__v_at_1 = SOURCES = DIST_SOURCES = -RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ - html-recursive info-recursive install-data-recursive \ - install-dvi-recursive install-exec-recursive \ - install-html-recursive install-info-recursive \ - install-pdf-recursive install-ps-recursive install-recursive \ - installcheck-recursive installdirs-recursive pdf-recursive \ - ps-recursive uninstall-recursive +RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \ + ctags-recursive dvi-recursive html-recursive info-recursive \ + install-data-recursive install-dvi-recursive \ + install-exec-recursive install-html-recursive \ + install-info-recursive install-pdf-recursive \ + install-ps-recursive install-recursive installcheck-recursive \ + installdirs-recursive pdf-recursive ps-recursive \ + tags-recursive uninstall-recursive +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ @@ -99,12 +156,34 @@ DATA = $(pkgconfig_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive -AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \ - $(RECURSIVE_CLEAN_TARGETS:-recursive=) tags TAGS ctags CTAGS \ - distdir dist dist-all distcheck +am__recursive_targets = \ + $(RECURSIVE_TARGETS) \ + $(RECURSIVE_CLEAN_TARGETS) \ + $(am__extra_recursive_targets) +AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \ + cscope distdir dist dist-all distcheck +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) \ + $(LISP)config.h.in +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags -DIST_SUBDIRS = $(SUBDIRS) +CSCOPE = cscope +DIST_SUBDIRS = src doc DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) @@ -114,6 +193,7 @@ && rm -rf "$(distdir)" \ || { sleep 5 && rm -rf "$(distdir)"; }; \ else :; fi +am__post_remove_distdir = $(am__remove_distdir) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ @@ -141,6 +221,7 @@ reldir="$$dir2" DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best +DIST_TARGETS = dist-gzip distuninstallcheck_listfiles = find . -type f -print am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' @@ -176,10 +257,10 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +JSON_C_CFLAGS = @JSON_C_CFLAGS@ +JSON_C_LIBS = @JSON_C_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ -LIBEE_CFLAGS = @LIBEE_CFLAGS@ -LIBEE_LIBS = @LIBEE_LIBS@ LIBESTR_CFLAGS = @LIBESTR_CFLAGS@ LIBESTR_LIBS = @LIBESTR_LIBS@ LIBLOGNORM_CFLAGS = @LIBLOGNORM_CFLAGS@ @@ -214,6 +295,7 @@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ +SPHINXBUILD = @SPHINXBUILD@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ @@ -268,7 +350,8 @@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ -SUBDIRS = src +SUBDIRS = src $(am__append_1) +EXTRA_DIST = rulebases pkgconfigdir = $(libdir)/pkgconfig pkgconfig_DATA = lognorm.pc ACLOCAL_AMFLAGS = -I m4 @@ -338,8 +421,11 @@ -rm -f libtool config.lt install-pkgconfigDATA: $(pkgconfig_DATA) @$(NORMAL_INSTALL) - test -z "$(pkgconfigdir)" || $(MKDIR_P) "$(DESTDIR)$(pkgconfigdir)" @list='$(pkgconfig_DATA)'; test -n "$(pkgconfigdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(pkgconfigdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(pkgconfigdir)" || exit 1; \ + fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ @@ -356,22 +442,25 @@ dir='$(DESTDIR)$(pkgconfigdir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd -# into them and run `make' without going through this Makefile. -# To change the values of `make' variables: instead of editing Makefiles, -# (1) if the variable is set in `config.status', edit `config.status' -# (which will cause the Makefiles to be regenerated when you run `make'); -# (2) otherwise, pass the desired values on the `make' command line. -$(RECURSIVE_TARGETS): - @fail= failcom='exit 1'; \ - for f in x $$MAKEFLAGS; do \ - case $$f in \ - *=* | --[!k]*);; \ - *k*) failcom='fail=yes';; \ - esac; \ - done; \ +# into them and run 'make' without going through this Makefile. +# To change the values of 'make' variables: instead of editing Makefiles, +# (1) if the variable is set in 'config.status', edit 'config.status' +# (which will cause the Makefiles to be regenerated when you run 'make'); +# (2) otherwise, pass the desired values on the 'make' command line. +$(am__recursive_targets): + @fail=; \ + if $(am__make_keepgoing); then \ + failcom='fail=yes'; \ + else \ + failcom='exit 1'; \ + fi; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ - list='$(SUBDIRS)'; for subdir in $$list; do \ + case "$@" in \ + distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ + *) list='$(SUBDIRS)' ;; \ + esac; \ + for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ @@ -386,57 +475,12 @@ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" -$(RECURSIVE_CLEAN_TARGETS): - @fail= failcom='exit 1'; \ - for f in x $$MAKEFLAGS; do \ - case $$f in \ - *=* | --[!k]*);; \ - *k*) failcom='fail=yes';; \ - esac; \ - done; \ - dot_seen=no; \ - case "$@" in \ - distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ - *) list='$(SUBDIRS)' ;; \ - esac; \ - rev=''; for subdir in $$list; do \ - if test "$$subdir" = "."; then :; else \ - rev="$$subdir $$rev"; \ - fi; \ - done; \ - rev="$$rev ."; \ - target=`echo $@ | sed s/-recursive//`; \ - for subdir in $$rev; do \ - echo "Making $$target in $$subdir"; \ - if test "$$subdir" = "."; then \ - local_target="$$target-am"; \ - else \ - local_target="$$target"; \ - fi; \ - ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ - || eval $$failcom; \ - done && test -z "$$fail" -tags-recursive: - list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ - done -ctags-recursive: - list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ - done - -ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - mkid -fID $$unique -tags: TAGS +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-recursive +TAGS: tags -TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ @@ -452,12 +496,7 @@ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ - list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ + $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ @@ -469,15 +508,11 @@ $$unique; \ fi; \ fi -ctags: CTAGS -CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ +ctags: ctags-recursive + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique @@ -486,9 +521,31 @@ here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" +cscope: cscope.files + test ! -s cscope.files \ + || $(CSCOPE) -b -q $(AM_CSCOPEFLAGS) $(CSCOPEFLAGS) -i cscope.files $(CSCOPE_ARGS) +clean-cscope: + -rm -f cscope.files +cscope.files: clean-cscope cscopelist +cscopelist: cscopelist-recursive + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags + -rm -f cscope.out cscope.in.out cscope.po.out cscope.files distdir: $(DISTFILES) $(am__remove_distdir) @@ -524,13 +581,10 @@ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ - test -d "$(distdir)/$$subdir" \ - || $(MKDIR_P) "$(distdir)/$$subdir" \ - || exit 1; \ - fi; \ - done - @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ + $(am__make_dryrun) \ + || test -d "$(distdir)/$$subdir" \ + || $(MKDIR_P) "$(distdir)/$$subdir" \ + || exit 1; \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ @@ -559,40 +613,36 @@ || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz - $(am__remove_distdir) + $(am__post_remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 - $(am__remove_distdir) + $(am__post_remove_distdir) dist-lzip: distdir tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz - $(am__remove_distdir) - -dist-lzma: distdir - tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma - $(am__remove_distdir) + $(am__post_remove_distdir) dist-xz: distdir tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz - $(am__remove_distdir) + $(am__post_remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z - $(am__remove_distdir) + $(am__post_remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz - $(am__remove_distdir) + $(am__post_remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) - $(am__remove_distdir) + $(am__post_remove_distdir) -dist dist-all: distdir - tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz - $(am__remove_distdir) +dist dist-all: + $(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:' + $(am__post_remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another @@ -603,8 +653,6 @@ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ - *.tar.lzma*) \ - lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\ *.tar.lz*) \ lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ *.tar.xz*) \ @@ -616,9 +664,9 @@ *.zip*) \ unzip $(distdir).zip ;;\ esac - chmod -R a-w $(distdir); chmod a+w $(distdir) - mkdir $(distdir)/_build - mkdir $(distdir)/_inst + chmod -R a-w $(distdir) + chmod u+w $(distdir) + mkdir $(distdir)/_build $(distdir)/_inst chmod a-w $(distdir) test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ @@ -650,7 +698,7 @@ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ && cd "$$am__cwd" \ || exit 1 - $(am__remove_distdir) + $(am__post_remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' @@ -788,25 +836,24 @@ uninstall-am: uninstall-pkgconfigDATA -.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) all \ - ctags-recursive install-am install-strip tags-recursive +.MAKE: $(am__recursive_targets) all install-am install-strip -.PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ - all all-am am--refresh check check-am clean clean-generic \ - clean-libtool ctags ctags-recursive dist dist-all dist-bzip2 \ - dist-gzip dist-lzip dist-lzma dist-shar dist-tarZ dist-xz \ - dist-zip distcheck distclean distclean-generic distclean-hdr \ - distclean-libtool distclean-tags distcleancheck distdir \ - distuninstallcheck dvi dvi-am html html-am info info-am \ - install install-am install-data install-data-am install-dvi \ - install-dvi-am install-exec install-exec-am install-html \ - install-html-am install-info install-info-am install-man \ - install-pdf install-pdf-am install-pkgconfigDATA install-ps \ - install-ps-am install-strip installcheck installcheck-am \ - installdirs installdirs-am maintainer-clean \ +.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \ + am--refresh check check-am clean clean-cscope clean-generic \ + clean-libtool cscope cscopelist-am ctags ctags-am dist \ + dist-all dist-bzip2 dist-gzip dist-lzip dist-shar dist-tarZ \ + dist-xz dist-zip distcheck distclean distclean-generic \ + distclean-hdr distclean-libtool distclean-tags distcleancheck \ + distdir distuninstallcheck dvi dvi-am html html-am info \ + info-am install install-am install-data install-data-am \ + install-dvi install-dvi-am install-exec install-exec-am \ + install-html install-html-am install-info install-info-am \ + install-man install-pdf install-pdf-am install-pkgconfigDATA \ + install-ps install-ps-am install-strip installcheck \ + installcheck-am installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ - mostlyclean-libtool pdf pdf-am ps ps-am tags tags-recursive \ - uninstall uninstall-am uninstall-pkgconfigDATA + mostlyclean-libtool pdf pdf-am ps ps-am tags tags-am uninstall \ + uninstall-am uninstall-pkgconfigDATA # Tell versions [3.59,3.63) of GNU make to not export all variables. diff -Nru liblognorm-0.3.7/missing liblognorm-1.0.1/missing --- liblognorm-0.3.7/missing 2012-04-04 09:16:38.000000000 +0000 +++ liblognorm-1.0.1/missing 2014-04-11 04:30:18.000000000 +0000 @@ -1,11 +1,10 @@ #! /bin/sh -# Common stub for a few missing GNU programs while installing. +# Common wrapper for a few potentially missing GNU programs. -scriptversion=2012-01-06.13; # UTC +scriptversion=2012-06-26.16; # UTC -# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006, -# 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc. -# Originally by Fran,cois Pinard , 1996. +# Copyright (C) 1996-2013 Free Software Foundation, Inc. +# Originally written by Fran,cois Pinard , 1996. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -26,68 +25,40 @@ # the same distribution terms that you use for the rest of that program. if test $# -eq 0; then - echo 1>&2 "Try \`$0 --help' for more information" + echo 1>&2 "Try '$0 --help' for more information" exit 1 fi -run=: -sed_output='s/.* --output[ =]\([^ ]*\).*/\1/p' -sed_minuso='s/.* -o \([^ ]*\).*/\1/p' - -# In the cases where this matters, `missing' is being run in the -# srcdir already. -if test -f configure.ac; then - configure_ac=configure.ac -else - configure_ac=configure.in -fi +case $1 in -msg="missing on your system" + --is-lightweight) + # Used by our autoconf macros to check whether the available missing + # script is modern enough. + exit 0 + ;; -case $1 in ---run) - # Try to run requested program, and just exit if it succeeds. - run= - shift - "$@" && exit 0 - # Exit code 63 means version mismatch. This often happens - # when the user try to use an ancient version of a tool on - # a file that requires a minimum version. In this case we - # we should proceed has if the program had been absent, or - # if --run hadn't been passed. - if test $? = 63; then - run=: - msg="probably too old" - fi - ;; + --run) + # Back-compat with the calling convention used by older automake. + shift + ;; -h|--h|--he|--hel|--help) echo "\ $0 [OPTION]... PROGRAM [ARGUMENT]... -Handle \`PROGRAM [ARGUMENT]...' for when PROGRAM is missing, or return an -error status if there is no known handling for PROGRAM. +Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due +to PROGRAM being missing or too old. Options: -h, --help display this help and exit -v, --version output version information and exit - --run try to run the given command, and emulate it if it fails Supported PROGRAM values: - aclocal touch file \`aclocal.m4' - autoconf touch file \`configure' - autoheader touch file \`config.h.in' - autom4te touch the output file, or create a stub one - automake touch all \`Makefile.in' files - bison create \`y.tab.[ch]', if possible, from existing .[ch] - flex create \`lex.yy.c', if possible, from existing .c - help2man touch the output file - lex create \`lex.yy.c', if possible, from existing .c - makeinfo touch the output file - yacc create \`y.tab.[ch]', if possible, from existing .[ch] + aclocal autoconf autoheader autom4te automake makeinfo + bison yacc flex lex help2man -Version suffixes to PROGRAM as well as the prefixes \`gnu-', \`gnu', and -\`g' are ignored when checking the name. +Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and +'g' are ignored when checking the name. Send bug reports to ." exit $? @@ -99,228 +70,141 @@ ;; -*) - echo 1>&2 "$0: Unknown \`$1' option" - echo 1>&2 "Try \`$0 --help' for more information" + echo 1>&2 "$0: unknown '$1' option" + echo 1>&2 "Try '$0 --help' for more information" exit 1 ;; esac -# normalize program name to check for. -program=`echo "$1" | sed ' - s/^gnu-//; t - s/^gnu//; t - s/^g//; t'` - -# Now exit if we have it, but it failed. Also exit now if we -# don't have it and --version was passed (most likely to detect -# the program). This is about non-GNU programs, so use $1 not -# $program. -case $1 in - lex*|yacc*) - # Not GNU programs, they don't have --version. - ;; +# Run the given program, remember its exit status. +"$@"; st=$? - *) - if test -z "$run" && ($1 --version) > /dev/null 2>&1; then - # We have it, but it failed. - exit 1 - elif test "x$2" = "x--version" || test "x$2" = "x--help"; then - # Could not run --version or --help. This is probably someone - # running `$TOOL --version' or `$TOOL --help' to check whether - # $TOOL exists and not knowing $TOOL uses missing. - exit 1 - fi - ;; -esac - -# If it does not exist, or fails to run (possibly an outdated version), -# try to emulate it. -case $program in - aclocal*) - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified \`acinclude.m4' or \`${configure_ac}'. You might want - to install the \`Automake' and \`Perl' packages. Grab them from - any GNU archive site." - touch aclocal.m4 - ;; - - autoconf*) - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified \`${configure_ac}'. You might want to install the - \`Autoconf' and \`GNU m4' packages. Grab them from any GNU - archive site." - touch configure - ;; - - autoheader*) - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified \`acconfig.h' or \`${configure_ac}'. You might want - to install the \`Autoconf' and \`GNU m4' packages. Grab them - from any GNU archive site." - files=`sed -n 's/^[ ]*A[CM]_CONFIG_HEADER(\([^)]*\)).*/\1/p' ${configure_ac}` - test -z "$files" && files="config.h" - touch_files= - for f in $files; do - case $f in - *:*) touch_files="$touch_files "`echo "$f" | - sed -e 's/^[^:]*://' -e 's/:.*//'`;; - *) touch_files="$touch_files $f.in";; - esac - done - touch $touch_files - ;; +# If it succeeded, we are done. +test $st -eq 0 && exit 0 - automake*) - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified \`Makefile.am', \`acinclude.m4' or \`${configure_ac}'. - You might want to install the \`Automake' and \`Perl' packages. - Grab them from any GNU archive site." - find . -type f -name Makefile.am -print | - sed 's/\.am$/.in/' | - while read f; do touch "$f"; done - ;; - - autom4te*) - echo 1>&2 "\ -WARNING: \`$1' is needed, but is $msg. - You might have modified some files without having the - proper tools for further handling them. - You can get \`$1' as part of \`Autoconf' from any GNU - archive site." - - file=`echo "$*" | sed -n "$sed_output"` - test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"` - if test -f "$file"; then - touch $file - else - test -z "$file" || exec >$file - echo "#! /bin/sh" - echo "# Created by GNU Automake missing as a replacement of" - echo "# $ $@" - echo "exit 0" - chmod +x $file - exit 1 - fi - ;; - - bison*|yacc*) - echo 1>&2 "\ -WARNING: \`$1' $msg. You should only need it if - you modified a \`.y' file. You may need the \`Bison' package - in order for those modifications to take effect. You can get - \`Bison' from any GNU archive site." - rm -f y.tab.c y.tab.h - if test $# -ne 1; then - eval LASTARG=\${$#} - case $LASTARG in - *.y) - SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'` - if test -f "$SRCFILE"; then - cp "$SRCFILE" y.tab.c - fi - SRCFILE=`echo "$LASTARG" | sed 's/y$/h/'` - if test -f "$SRCFILE"; then - cp "$SRCFILE" y.tab.h - fi - ;; - esac - fi - if test ! -f y.tab.h; then - echo >y.tab.h - fi - if test ! -f y.tab.c; then - echo 'main() { return 0; }' >y.tab.c - fi - ;; - - lex*|flex*) - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified a \`.l' file. You may need the \`Flex' package - in order for those modifications to take effect. You can get - \`Flex' from any GNU archive site." - rm -f lex.yy.c - if test $# -ne 1; then - eval LASTARG=\${$#} - case $LASTARG in - *.l) - SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'` - if test -f "$SRCFILE"; then - cp "$SRCFILE" lex.yy.c - fi - ;; - esac - fi - if test ! -f lex.yy.c; then - echo 'main() { return 0; }' >lex.yy.c - fi - ;; - - help2man*) - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified a dependency of a manual page. You may need the - \`Help2man' package in order for those modifications to take - effect. You can get \`Help2man' from any GNU archive site." - - file=`echo "$*" | sed -n "$sed_output"` - test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"` - if test -f "$file"; then - touch $file - else - test -z "$file" || exec >$file - echo ".ab help2man is required to generate this page" - exit $? - fi - ;; - - makeinfo*) - echo 1>&2 "\ -WARNING: \`$1' is $msg. You should only need it if - you modified a \`.texi' or \`.texinfo' file, or any other file - indirectly affecting the aspect of the manual. The spurious - call might also be the consequence of using a buggy \`make' (AIX, - DU, IRIX). You might want to install the \`Texinfo' package or - the \`GNU make' package. Grab either from any GNU archive site." - # The file to touch is that specified with -o ... - file=`echo "$*" | sed -n "$sed_output"` - test -z "$file" && file=`echo "$*" | sed -n "$sed_minuso"` - if test -z "$file"; then - # ... or it is the one specified with @setfilename ... - infile=`echo "$*" | sed 's/.* \([^ ]*\) *$/\1/'` - file=`sed -n ' - /^@setfilename/{ - s/.* \([^ ]*\) *$/\1/ - p - q - }' $infile` - # ... or it is derived from the source name (dir/f.texi becomes f.info) - test -z "$file" && file=`echo "$infile" | sed 's,.*/,,;s,.[^.]*$,,'`.info - fi - # If the file does not exist, the user really needs makeinfo; - # let's fail without touching anything. - test -f $file || exit 1 - touch $file - ;; - - *) - echo 1>&2 "\ -WARNING: \`$1' is needed, and is $msg. - You might have modified some files without having the - proper tools for further handling them. Check the \`README' file, - it often tells you about the needed prerequisites for installing - this package. You may also peek at any GNU archive site, in case - some other package would contain this missing \`$1' program." - exit 1 - ;; -esac +# Also exit now if we it failed (or wasn't found), and '--version' was +# passed; such an option is passed most likely to detect whether the +# program is present and works. +case $2 in --version|--help) exit $st;; esac + +# Exit code 63 means version mismatch. This often happens when the user +# tries to use an ancient version of a tool on a file that requires a +# minimum version. +if test $st -eq 63; then + msg="probably too old" +elif test $st -eq 127; then + # Program was missing. + msg="missing on your system" +else + # Program was found and executed, but failed. Give up. + exit $st +fi -exit 0 +perl_URL=http://www.perl.org/ +flex_URL=http://flex.sourceforge.net/ +gnu_software_URL=http://www.gnu.org/software + +program_details () +{ + case $1 in + aclocal|automake) + echo "The '$1' program is part of the GNU Automake package:" + echo "<$gnu_software_URL/automake>" + echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:" + echo "<$gnu_software_URL/autoconf>" + echo "<$gnu_software_URL/m4/>" + echo "<$perl_URL>" + ;; + autoconf|autom4te|autoheader) + echo "The '$1' program is part of the GNU Autoconf package:" + echo "<$gnu_software_URL/autoconf/>" + echo "It also requires GNU m4 and Perl in order to run:" + echo "<$gnu_software_URL/m4/>" + echo "<$perl_URL>" + ;; + esac +} + +give_advice () +{ + # Normalize program name to check for. + normalized_program=`echo "$1" | sed ' + s/^gnu-//; t + s/^gnu//; t + s/^g//; t'` + + printf '%s\n' "'$1' is $msg." + + configure_deps="'configure.ac' or m4 files included by 'configure.ac'" + case $normalized_program in + autoconf*) + echo "You should only need it if you modified 'configure.ac'," + echo "or m4 files included by it." + program_details 'autoconf' + ;; + autoheader*) + echo "You should only need it if you modified 'acconfig.h' or" + echo "$configure_deps." + program_details 'autoheader' + ;; + automake*) + echo "You should only need it if you modified 'Makefile.am' or" + echo "$configure_deps." + program_details 'automake' + ;; + aclocal*) + echo "You should only need it if you modified 'acinclude.m4' or" + echo "$configure_deps." + program_details 'aclocal' + ;; + autom4te*) + echo "You might have modified some maintainer files that require" + echo "the 'automa4te' program to be rebuilt." + program_details 'autom4te' + ;; + bison*|yacc*) + echo "You should only need it if you modified a '.y' file." + echo "You may want to install the GNU Bison package:" + echo "<$gnu_software_URL/bison/>" + ;; + lex*|flex*) + echo "You should only need it if you modified a '.l' file." + echo "You may want to install the Fast Lexical Analyzer package:" + echo "<$flex_URL>" + ;; + help2man*) + echo "You should only need it if you modified a dependency" \ + "of a man page." + echo "You may want to install the GNU Help2man package:" + echo "<$gnu_software_URL/help2man/>" + ;; + makeinfo*) + echo "You should only need it if you modified a '.texi' file, or" + echo "any other file indirectly affecting the aspect of the manual." + echo "You might want to install the Texinfo package:" + echo "<$gnu_software_URL/texinfo/>" + echo "The spurious makeinfo call might also be the consequence of" + echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might" + echo "want to install GNU make:" + echo "<$gnu_software_URL/make/>" + ;; + *) + echo "You might have modified some files without having the proper" + echo "tools for further handling them. Check the 'README' file, it" + echo "often tells you about the needed prerequisites for installing" + echo "this package. You may also peek at any GNU archive site, in" + echo "case some other package contains this missing '$1' program." + ;; + esac +} + +give_advice "$1" | sed -e '1s/^/WARNING: /' \ + -e '2,$s/^/ /' >&2 + +# Propagate the correct exit status (expected to be 127 for a program +# not found, 63 for a program that failed due to version mismatch). +exit $st # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) diff -Nru liblognorm-0.3.7/README liblognorm-1.0.1/README --- liblognorm-0.3.7/README 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/README 2014-01-22 15:52:06.000000000 +0000 @@ -1 +1,33 @@ -nothing yet +Liblognorm is a fast-samples based normalization library. + +More information on liblognorm can be found at + http://www.liblognorm.com + +Liblognorm evolves since several years and was intially meant to be used primarily with +the Mitre CEE effort. Consequently, the initial version of liblognorm (0.x) +uses the libee CEE support library in its API. + +As time evolved, the initial CEE schema underwent considerable change. Even +worse, Mitre lost funding for CEE. While the CEE ideas survived as part +of Red Hat-driven "Project Lumberjack", the data structures became greatly +simplified and JSON based. That effectively made libee obsolete (and also +in parts libestr, which was specifically written to support CEE's +initial requirement of embedded NUL chars in strings). + +In 2013, Pavel Levshin converted liblognorm to native JSON, which helped +improve performance and simplicity for many client applications. +Unfortunately, this change broke interface compatibility (and there was +no way to avoid that, obviously...). + +The current library is the result of that effort. Application developers +are encouraged to switch to this version, as it provides the benefit of +a simpler API. This version is now being tracked by the git master branch. + +However, if you need to stick to the old API, there is a git branch +liblognorm0, which contains the previous version of the library. This +branch is also maintained for important bug fixes, so it is safe to use. + +We recommend that packagers create packages both for liblognorm0 and +liblognorm1. Note that liblognorm's development packages cannot +coexist on the same system as the PKGCONFIG system would get into +trouble. Adiscon's own packages follow this schema. diff -Nru liblognorm-0.3.7/rulebases/cisco.rulebase liblognorm-1.0.1/rulebases/cisco.rulebase --- liblognorm-0.3.7/rulebases/cisco.rulebase 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/rulebases/cisco.rulebase 2013-02-04 14:36:52.000000000 +0000 @@ -0,0 +1,10 @@ +prefix=%date:date-rfc3164% %host:word% %seqnum:number%: %othseq:char-to:\x3a%: %%%tag:char-to:\x3a%: +rule=: Configured from console by %tty:word:% (%ip:ipv4%) +rule=: Authentication failure for %proto:word% req from host %ip:ipv4% +rule=: Interface %interface:char-to:,%, changed state to %state:word% +rule=: Line protocol on Interface %interface:char-to:,%, changed state to %state:word% +rule=: Attempted to connect to %servname:word% from %ip:ipv4% +# too-generic syntaces (like %port:word% below) cause problems. +# Best is to have very specific syntaxes, but as an +# interim solution we may need to backtrack if there is no other way to handle it. +#: %port:word% transmit error diff -Nru liblognorm-0.3.7/rulebases/messages.rulebase liblognorm-1.0.1/rulebases/messages.rulebase --- liblognorm-0.3.7/rulebases/messages.rulebase 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/rulebases/messages.rulebase 2013-02-04 14:36:52.000000000 +0000 @@ -0,0 +1,9 @@ +prefix=%date:date-rfc3164% %host:word% %tag:char-to:\x3a%: +rule=: restart. +rule=: Bad line received from identity server at %ip:ipv4%: %port:number% +rule=: FTP session closed +rule=: wu-ftpd - TLS settings: control %wuftp-control:char-to:,%, client_cert %wuftp-clcert:char-to:,%, data %wuftp-allow:word% +rule=: User %user:word% timed out after %timeout:number% seconds at %otherdatesyntax:word% %otherdate:date-rfc3164% %otheryear:word% +rule=: getpeername (in.ftpd): Transport endpoint is not connected +# the one below is problematic (and needs some backtracking) +#: %disk:char-to:\x3a%: timeout waiting for DMA diff -Nru liblognorm-0.3.7/rulebases/sample.rulebase liblognorm-1.0.1/rulebases/sample.rulebase --- liblognorm-0.3.7/rulebases/sample.rulebase 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/rulebases/sample.rulebase 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,57 @@ +# Some sample rules and strings matching them + +# Prefix sample: +# myhostname: code=23 +prefix=%host:char-to:\x3a%: +rule=prefixed_code:code=%code:number% +# myhostname: name=somename +rule=prefixed_name:name=%name:word% +# Reset prefix to default (empty value): +prefix= + +# Quantity: 555 +rule=tag1:Quantity: %N:number% + +# Weight: 42kg +rule=tag2:Weight: %N:number%%unit:word% +annotate=tag2:+fat="free" + +# %% +rule=tag3,percent:\x25%% +annotate=percent:+percent="100" +annotate=tag3:+whole="whale" +annotate=tag3:+part="wha" + +# literal +rule=tag4,tag5,tag6,tag4:literal +annotate=tag4:+this="that" + +# first field,second field,third field,fourth field +rule=csv:%r1:char-to:,%,%r2:char-to:,%,%r3:char-to:,%,%r4:rest% + +# CSV: field1,,field3 +rule=better-csv:CSV: %f1:char-sep:,%,%f2:char-sep:,%,%f3:char-sep:,% + +# Snow White and the Seven Dwarfs +rule=tale:Snow White and %company:rest% + +# iptables: SRC=192.168.1.134 DST=46.252.161.13 LEN=48 TOS=0x00 PREC=0x00 +rule=ipt:iptables: %dummy:iptables% + +# 2012-10-11 src=127.0.0.1 dst=88.111.222.19 +rule=:%date:date-iso% src=%src:ipv4% dst=%dst:ipv4% + +# Oct 29 09:47:08 server rsyslogd: rsyslogd's groupid changed to 103 +rule=syslog:%date1:date-rfc3164% %host:word% %tag:char-to:\x3a%: %text:rest% + +# Oct 29 09:47:08 +rule=rfc3164:%date1:date-rfc3164% + +# 1985-04-12T19:20:50.52-04:00 +rule=rfc5424:%date1:date-rfc5424% + +# 1985-04-12T19:20:50.52-04:00 testing 123 +rule=rfc5424:%date1:date-rfc5424% %test:word% %test2:number% + +# quoted_string="Contents of a quoted string cannot include quote marks" +rule=quote:quoted_string=%quote:quoted-string% diff -Nru liblognorm-0.3.7/rulebases/syntax.txt liblognorm-1.0.1/rulebases/syntax.txt --- liblognorm-0.3.7/rulebases/syntax.txt 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/rulebases/syntax.txt 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,132 @@ +Basic syntax +============ + +Each line in rulebase file is evaluated separately. +Lines starting with '#' are commentaries. +Empty lines are just skipped, they can be inserted for readability. +If the line starts with 'rule=', then it contains a rule. This line has +following format: + + rule=[[,...]]: + +Everything before a colon is treated as comma-separated list of tags, which +will be attached to a match. After the colon, match description should be +given. It consists of string literals and field selectors. String literals +should match exactly. Field selector has this format: + + %:[:]% + +Percent sign is used to enclose field selector. If you need to match literal +'%', it can be written as '%%' or '\x25'. + +Behaviour of field selector depends on its type, which is decribed below. + +If field name is set to '-', this field is matched but not saved. + +Several rules can have a common prefix. You can set it once with this syntax: + + prefix= + +Every following rule will be treated as an addition to this prefix. + +Prefix can be reset to default (empty value) by the line: + + prefix= + +Tags of the matched rule are attached to the message and can be used to +annotate it. Annotation allows to add fixed fields to the message. +Syntax is as following: + + annotate=:+="" + +Field value should always be enclosed in double quote marks. + +There can be multiple annotations for the same tag. + +Field types +=========== + +Field type: 'number' +Matches: One or more decimal digits. +Extra data: Not used +Example: %field_name:number% + +Field type: 'word' +Matches: One or more characters, up to the next space (\x20), or + up to end of line. +Extra data: Not used +Example: %field_name:word% + +Field type: 'char-to' +Matches: One or more characters, up to the next character given in + extra data. +Extra data: One character (can be escaped) +Example: %field_name:char-to:,% + %field_name:char-to:\x25% + +Field type: 'char-sep' +Matches: Zero or more characters, up to the next character given in + extra data, or up to end of line. +Extra data: One character (can be escaped) +Example: %field_name:char-sep:,% + %field_name:char-sep:\x25% + +Field type: 'rest' +Matches: Zero or more characters till end of line. +Extra data: Not used +Example: %field_name:rest% +Notes: Should be always at end of the rule. + +Field type: 'quoted-string' +Matches: Zero or more characters, surrounded by double quote marks. +Extra data: Not used +Example: %field_name:quoted-string% +Notes: Quote marks are stripped from the match. + +Field type: 'date-iso' +Matches: Date of format 'YYYY-MM-DD'. +Extra data: Not used +Example: %field-name:date-iso% + +Field type: 'time-24hr' +Matches: Time of format 'HH:MM:SS', where HH is 00..23. +Extra data: Not used +Example: %field_name:time-24hr% + +Field type: 'time-12hr' +Matches: Time of format 'HH:MM:SS', where HH is 00..12. +Extra data: Not used +Example: %field_name:time-12hr% + +Field type: 'ipv4' +Matches: IPv4 address, in dot-decimal notation (AAA.BBB.CCC.DDD). +Extra data: Not used +Example: %field_name:ipv4% + +Field type: 'date-rfc3164' +Matches: Valid date/time in RFC3164 format, i.e.: 'Oct 29 09:47:08' +Extra data: Not used +Example: %field_name:date-rfc3164% +Notes: This parser implements several quirks to match malformed + timestamps from some devices. + +Field type: 'date-rfc5424' +Matches: Valid date/time in RFC5424 format, i.e.: + '1985-04-12T19:20:50.52-04:00' +Extra data: Not used +Example: %field_name:date-rfc5424% +Notes: Slightly different formats are allowed. + +Field type: 'iptables' +Matches: Name=value pairs, separated by spaces, as in Netfilter log + messages. +Extra data: Not used +Example: %-:iptables% +Notes: Name of the selector is not used; names from the line are + used instead. This selector always matches everything till + end of the line. Cannot match zero characters. + +Examples +======== + +Look at sample.rulebase for example rules and matching lines. diff -Nru liblognorm-0.3.7/src/annot.c liblognorm-1.0.1/src/annot.c --- liblognorm-0.3.7/src/annot.c 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/src/annot.c 2014-01-22 15:52:06.000000000 +0000 @@ -5,6 +5,8 @@ *//* * Copyright 2011 by Rainer Gerhards and Adiscon GmbH. * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * * This file is part of liblognorm. * * This library is free software; you can redistribute it and/or @@ -31,10 +33,7 @@ #include #include #include -#include -#include -#include "liblognorm.h" #include "lognorm.h" #include "samp.h" #include "annot.h" @@ -55,14 +54,13 @@ void ln_deleteAnnotSet(ln_annotSet *as) { - ln_annot *node, *nodeDel; + ln_annot *node, *nextnode; if(as == NULL) goto done; - for(node = as->aroot ; node != NULL ; ) { - nodeDel = node; - node = node->next; - ln_deleteAnnot(nodeDel); + for(node = as->aroot; node != NULL; node = nextnode) { + nextnode = node->next; + ln_deleteAnnot(node); } free(as); done: return; @@ -94,18 +92,20 @@ * as part of the process. * @returns 0 if ok, something else otherwise */ -inline int +static int ln_combineAnnot(ln_annot *annot, ln_annot *add) { int r = 0; - ln_annot_op *op, *opdel; + ln_annot_op *op, *nextop; - for(op = add->oproot ; op != NULL ; ) { + for(op = add->oproot; op != NULL; op = nextop) { CHKR(ln_addAnnotOp(annot, op->opc, op->name, op->value)); - opdel = op; - op = op->next; - free(opdel); + nextop = op->next; + free(op); } + es_deleteStr(add->tag); + free(add); + done: return r; } @@ -143,17 +143,17 @@ void ln_deleteAnnot(ln_annot *annot) { - ln_annot_op *node, *nodeDel; + ln_annot_op *op, *nextop; if(annot == NULL) goto done; - for(node = annot->oproot ; node != NULL ; ) { - nodeDel = node; - es_deleteStr(node->name); - if(node->value != NULL) - es_deleteStr(node->value); - node = node->next; - free(nodeDel); + es_deleteStr(annot->tag); + for(op = annot->oproot; op != NULL; op = nextop) { + es_deleteStr(op->name); + if(op->value != NULL) + es_deleteStr(op->value); + nextop = op->next; + free(op); } free(annot); done: return; @@ -186,20 +186,22 @@ * small and easy to follow. */ static inline int -ln_annotateEventWithTag(ln_ctx ctx, struct ee_event *event, es_str_t *tag) +ln_annotateEventWithTag(ln_ctx ctx, struct json_object *json, es_str_t *tag) { int r=0; ln_annot *annot; ln_annot_op *op; - struct ee_field *field; + struct json_object *field; + char *cstr; - annot = ln_findAnnot(ctx->pas, tag); + if (NULL == (annot = ln_findAnnot(ctx->pas, tag))) + goto done; for(op = annot->oproot ; op != NULL ; op = op->next) { if(op->opc == ln_annot_ADD) { - CHKN(field = ee_newField(ctx->eectx)); - CHKR(ee_nameField(field, op->name)); - CHKR(ee_addStrValueToField(field, op->value)); - CHKR(ee_addFieldToEvent(event, field)); + CHKN(cstr = ln_es_str2cstr(&op->value)); + CHKN(field = json_object_new_string(cstr)); + CHKN(cstr = ln_es_str2cstr(&op->name)); + json_object_object_add(json, cstr, field); } else { // TODO: implement } @@ -210,25 +212,27 @@ int -ln_annotateEvent(ln_ctx ctx, struct ee_event *event) +ln_annotate(ln_ctx ctx, struct json_object *json, struct json_object *tagbucket) { int r = 0; - void *cookie; - struct ee_tagbucket *tagbucket; es_str_t *tag; + struct json_object *tagObj; + const char *tagCstr; + int i; + ln_dbgprintf(ctx, "ln_annotate called"); /* shortcut: terminate immediately if nothing to do... */ if(ctx->pas->aroot == NULL) goto done; /* iterate over tagbucket */ - ee_EventGetTagbucket(event, &tagbucket); - cookie = NULL; - while(1) { - CHKR(ee_TagbucketGetNextTag(tagbucket, &cookie, &tag)); - if(cookie == NULL) - break; /* end iteration */ - CHKR(ln_annotateEventWithTag(ctx, event, tag)); + for (i = json_object_array_length(tagbucket) - 1; i >= 0; i--) { + CHKN(tagObj = json_object_array_get_idx(tagbucket, i)); + CHKN(tagCstr = json_object_get_string(tagObj)); + ln_dbgprintf(ctx, "ln_annotate, current tag %d, cstr %s", i, tagCstr); + CHKN(tag = es_newStrFromCStr(tagCstr, strlen(tagCstr))); + CHKR(ln_annotateEventWithTag(ctx, json, tag)); + es_deleteStr(tag); } done: return r; diff -Nru liblognorm-0.3.7/src/annot.h liblognorm-1.0.1/src/annot.h --- liblognorm-0.3.7/src/annot.h 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/src/annot.h 2014-01-22 15:52:06.000000000 +0000 @@ -5,6 +5,8 @@ *//* * Copyright 2011 by Rainer Gerhards and Adiscon GmbH. * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * * This file is meant to be included by applications using liblognorm. * For lognorm library files themselves, include "lognorm.h". * @@ -29,7 +31,6 @@ #ifndef LIBLOGNORM_ANNOT_H_INCLUDED #define LIBLOGNORM_ANNOT_H_INCLUDED #include -#include typedef struct ln_annotSet_s ln_annotSet; typedef struct ln_annot_s ln_annot; @@ -159,13 +160,13 @@ /** * Annotate an event. - * This adds anotations based on the event's tagbucket. + * This adds annotations based on the event's tagbucket. * @memberof ln_annot * * @param[in] ctx current context * @param[in] event event to annotate (updated with anotations on exit) * @returns 0 on success, something else otherwise */ -int ln_annotateEvent(ln_ctx ctx, struct ee_event *event); +int ln_annotate(ln_ctx ctx, struct json_object *json, struct json_object *tags); #endif /* #ifndef LOGNORM_ANNOT_H_INCLUDED */ diff -Nru liblognorm-0.3.7/src/enc_csv.c liblognorm-1.0.1/src/enc_csv.c --- liblognorm-0.3.7/src/enc_csv.c 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/src/enc_csv.c 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,217 @@ +/** + * @file enc_csv.c + * Encoder for CSV format. Note: CEE currently think about what a + * CEE-compliant CSV format may look like. As such, the format of + * this output will most probably change once the final decision + * has been made. At this time (2010-12), I do NOT even try to + * stay inline with the discussion. + * + * This file contains code from all related objects that is required in + * order to encode this format. The core idea of putting all of this into + * a single file is that this makes it very straightforward to write + * encoders for different encodings, as all is in one place. + * + */ +/* + * liblognorm - a fast samples-based log normalization library + * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * + * This file is part of liblognorm. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + * A copy of the LGPL v2.1 can be found in the file "COPYING" in this distribution. + */ +#include "config.h" +#include +#include +#include +#include +#include + +#include +#include + +#include "lognorm.h" +#include "internal.h" + +static char hexdigit[16] = + {'0', '1', '2', '3', '4', '5', '6', '7', '8', + '9', 'A', 'B', 'C', 'D', 'E', 'F' }; + +/* TODO: CSV encoding for Unicode characters is as of RFC4627 not fully + * supported. The algorithm is that we must build the wide character from + * UTF-8 (if char > 127) and build the full 4-octet Unicode character out + * of it. Then, this needs to be encoded. Currently, we work on a + * byte-by-byte basis, which simply is incorrect. + * rgerhards, 2010-11-09 + */ +int +ln_addValue_CSV(const char *buf, es_str_t **str) +{ + int r; + unsigned char c; + es_size_t i; + char numbuf[4]; + int j; + + assert(str != NULL); + assert(*str != NULL); + assert(buf != NULL); + + for(i = 0; i < strlen(buf); i++) { + c = buf[i]; + if((c >= 0x23 && c <= 0x5b) + || (c >= 0x5d /* && c <= 0x10FFFF*/) + || c == 0x20 || c == 0x21) { + /* no need to escape */ + es_addChar(str, c); + } else { + /* we must escape, try RFC4627-defined special sequences first */ + switch(c) { + case '\0': + es_addBuf(str, "\\u0000", 6); + break; + case '\"': + es_addBuf(str, "\\\"", 2); + break; + case '/': + es_addBuf(str, "\\/", 2); + break; + case '\\': + es_addBuf(str, "\\\\", 2); + break; + case '\010': + es_addBuf(str, "\\b", 2); + break; + case '\014': + es_addBuf(str, "\\f", 2); + break; + case '\n': + es_addBuf(str, "\\n", 2); + break; + case '\r': + es_addBuf(str, "\\r", 2); + break; + case '\t': + es_addBuf(str, "\\t", 2); + break; + default: + /* TODO : proper Unicode encoding (see header comment) */ + for(j = 0 ; j < 4 ; ++j) { + numbuf[3-j] = hexdigit[c % 16]; + c = c / 16; + } + es_addBuf(str, "\\u", 2); + es_addBuf(str, numbuf, 4); + break; + } + } + } + r = 0; + + return r; +} + + +int +ln_addField_CSV(struct json_object *field, es_str_t **str) +{ + int r, i; + struct json_object *obj; + int needComma; + const char *value; + + assert(field != NULL); + assert(str != NULL); + assert(*str != NULL); + + switch(json_object_get_type(field)) { + case json_type_array: + CHKR(es_addChar(str, '[')); + for (i = json_object_array_length(field) - 1; i >= 0; i--) { + if(needComma) + es_addChar(str, ','); + else + needComma = 1; + CHKN(obj = json_object_array_get_idx(field, i)); + CHKN(value = json_object_get_string(obj)); + CHKR(ln_addValue_CSV(value, str)); + } + CHKR(es_addChar(str, ']')); + break; + case json_type_string: + case json_type_int: + CHKN(value = json_object_get_string(field)); + CHKR(ln_addValue_CSV(value, str)); + break; + default: + CHKR(es_addBuf(str, "***OBJECT***", sizeof("***OBJECT***")-1)); + } + + r = 0; + +done: + return r; +} + + +int +ln_fmtEventToCSV(struct json_object *json, es_str_t **str, es_str_t *extraData) +{ + int r = -1; + int needComma = 0; + struct json_object *field; + char *namelist = NULL, *name, *nn; + + assert(json != NULL); + assert(json_object_is_type(json, json_type_object)); + + if((*str = es_newStr(256)) == NULL) + goto done; + if(extraData == NULL) + goto done; + + CHKN(namelist = es_str2cstr(extraData, NULL)); + + for (name = namelist; name != NULL; name = nn) { + for (nn = name; *nn != '\0' && *nn != ',' && *nn != ' '; nn++) + { /* do nothing */ } + if (*nn == '\0') { + nn = NULL; + } else { + *nn = '\0'; + nn++; + } + field = json_object_object_get(json, name); + if (needComma) { + CHKR(es_addChar(str, ',')); + } else { + needComma = 1; + } + if (field != NULL) { + CHKR(es_addChar(str, '"')); + ln_addField_CSV(field, str); + CHKR(es_addChar(str, '"')); + } + } + r = 0; +done: + if (namelist != NULL) + free(namelist); + return r; +} diff -Nru liblognorm-0.3.7/src/enc.h liblognorm-1.0.1/src/enc.h --- liblognorm-0.3.7/src/enc.h 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/src/enc.h 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,39 @@ +/** + * @file enc.h + * @brief Encoder functions + */ +/* + * liblognorm - a fast samples-based log normalization library + * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * + * This file is part of liblognorm. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + * A copy of the LGPL v2.1 can be found in the file "COPYING" in this distribution. + */ + +#ifndef LIBLOGNORM_ENC_H_INCLUDED +#define LIBLOGNORM_ENC_H_INCLUDED + +int ln_fmtEventToRFC5424(struct json_object *json, es_str_t **str); + +int ln_fmtEventToCSV(struct json_object *json, es_str_t **str, es_str_t *extraData); + +int ln_fmtEventToXML(struct json_object *json, es_str_t **str); + +#endif /* LIBLOGNORM_ENC_H_INCLUDED */ diff -Nru liblognorm-0.3.7/src/enc_syslog.c liblognorm-1.0.1/src/enc_syslog.c --- liblognorm-0.3.7/src/enc_syslog.c 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/src/enc_syslog.c 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,199 @@ +/** + * @file enc_syslog.c + * Encoder for syslog format. + * This file contains code from all related objects that is required in + * order to encode syslog format. The core idea of putting all of this into + * a single file is that this makes it very straightforward to write + * encoders for different encodings, as all is in one place. + */ +/* + * liblognorm - a fast samples-based log normalization library + * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * + * This file is part of liblognorm. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + * A copy of the LGPL v2.1 can be found in the file "COPYING" in this distribution. + */ +#include "config.h" +#include +#include +#include +#include +#include + +#include +#include + +#include "internal.h" +#include "liblognorm.h" + +int +ln_addValue_Syslog(const char *value, es_str_t **str) +{ + int r; + es_size_t i; + + assert(str != NULL); + assert(*str != NULL); + assert(value != NULL); + + for(i = 0; i < strlen(value); i++) { + switch(value[i]) { + case '\0': + es_addChar(str, '\\'); + es_addChar(str, '0'); + break; + case '\n': + es_addChar(str, '\\'); + es_addChar(str, 'n'); + break; + /* TODO : add rest of control characters here... */ + case ',': /* comma is CEE-reserved for lists */ + es_addChar(str, '\\'); + es_addChar(str, ','); + break; +#if 0 /* alternative encoding for discussion */ + case '^': /* CEE-reserved for lists */ + es_addChar(str, '\\'); + es_addChar(str, '^'); + break; +#endif + /* at this layer ... do we need to think about transport + * encoding at all? Or simply leave it to the transport agent? + */ + case '\\': /* RFC5424 reserved */ + es_addChar(str, '\\'); + es_addChar(str, '\\'); + break; + case ']': /* RFC5424 reserved */ + es_addChar(str, '\\'); + es_addChar(str, ']'); + break; + case '\"': /* RFC5424 reserved */ + es_addChar(str, '\\'); + es_addChar(str, '\"'); + break; + default: + es_addChar(str, value[i]); + break; + } + } + r = 0; + + return r; +} + + +int +ln_addField_Syslog(char *name, struct json_object *field, es_str_t **str) +{ + int r; + const char *value; + int needComma = 0; + struct json_object *obj; + int i; + + assert(field != NULL); + assert(str != NULL); + assert(*str != NULL); + + CHKR(es_addBuf(str, name, strlen(name))); + CHKR(es_addBuf(str, "=\"", 2)); + switch(json_object_get_type(field)) { + case json_type_array: + for (i = json_object_array_length(field) - 1; i >= 0; i--) { + if(needComma) + es_addChar(str, ','); + else + needComma = 1; + CHKN(obj = json_object_array_get_idx(field, i)); + CHKN(value = json_object_get_string(obj)); + CHKR(ln_addValue_Syslog(value, str)); + } + break; + case json_type_string: + case json_type_int: + CHKN(value = json_object_get_string(field)); + CHKR(ln_addValue_Syslog(value, str)); + break; + default: + CHKR(es_addBuf(str, "***OBJECT***", sizeof("***OBJECT***")-1)); + } + CHKR(es_addChar(str, '\"')); + r = 0; + +done: + return r; +} + + +static inline int +ln_addTags_Syslog(struct json_object *taglist, es_str_t **str) +{ + int r = 0; + struct json_object *tagObj; + int needComma = 0; + const char *tagCstr; + int i; + + assert(json_object_is_type(taglist, json_type_array)); + + CHKR(es_addBuf(str, " event.tags=\"", 13)); + for (i = json_object_array_length(taglist) - 1; i >= 0; i--) { + if(needComma) + es_addChar(str, ','); + else + needComma = 1; + CHKN(tagObj = json_object_array_get_idx(taglist, i)); + CHKN(tagCstr = json_object_get_string(tagObj)); + CHKR(es_addBuf(str, (char*)tagCstr, strlen(tagCstr))); + } + es_addChar(str, '"'); + +done: return r; +} + + +int +ln_fmtEventToRFC5424(struct json_object *json, es_str_t **str) +{ + int r = -1; + struct json_object *tags; + + assert(json != NULL); + assert(json_object_is_type(json, json_type_object)); + if((*str = es_newStr(256)) == NULL) + goto done; + + es_addBuf(str, "[cee@115", 8); + + if((tags = json_object_object_get(json, "event.tags")) != NULL) { + CHKR(ln_addTags_Syslog(tags, str)); + } + json_object_object_foreach(json, name, field) { + if (strcmp(name, "event.tags")) { + es_addChar(str, ' '); + ln_addField_Syslog(name, field, str); + } + } + es_addChar(str, ']'); + +done: + return r; +} diff -Nru liblognorm-0.3.7/src/enc_xml.c liblognorm-1.0.1/src/enc_xml.c --- liblognorm-0.3.7/src/enc_xml.c 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/src/enc_xml.c 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,221 @@ +/** + * @file enc-xml.c + * Encoder for XML format. + * + * This file contains code from all related objects that is required in + * order to encode this format. The core idea of putting all of this into + * a single file is that this makes it very straightforward to write + * encoders for different encodings, as all is in one place. + * + */ +/* + * liblognorm - a fast samples-based log normalization library + * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * + * This file is part of liblognorm. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + * A copy of the LGPL v2.1 can be found in the file "COPYING" in this distribution. + */ +#include "config.h" +#include +#include +#include +#include +#include + +#include + +#include "lognorm.h" +#include "internal.h" + +#if 0 +static char hexdigit[16] = + {'0', '1', '2', '3', '4', '5', '6', '7', '8', + '9', 'A', 'B', 'C', 'D', 'E', 'F' }; +#endif + +/* TODO: XML encoding for Unicode characters is as of RFC4627 not fully + * supported. The algorithm is that we must build the wide character from + * UTF-8 (if char > 127) and build the full 4-octet Unicode character out + * of it. Then, this needs to be encoded. Currently, we work on a + * byte-by-byte basis, which simply is incorrect. + * rgerhards, 2010-11-09 + */ +int +ln_addValue_XML(const char *value, es_str_t **str) +{ + int r; + unsigned char c; + es_size_t i; +#if 0 + char numbuf[4]; + int j; +#endif + + assert(str != NULL); + assert(*str != NULL); + assert(value != NULL); + // TODO: support other types! + es_addBuf(str, "", 7); + + for(i = 0 ; i < strlen(value) ; ++i) { + c = value[i]; + switch(c) { + case '\0': + es_addBuf(str, "�", 5); + break; +#if 0 + case '\n': + es_addBuf(str, " ", 5); + break; + case '\r': + es_addBuf(str, " ", 5); + break; + case '\t': + es_addBuf(str, "&x08;", 5); + break; + case '\"': + es_addBuf(str, """, 6); + break; +#endif + case '<': + es_addBuf(str, "<", 4); + break; + case '&': + es_addBuf(str, "&", 5); + break; +#if 0 + case ',': + es_addBuf(str, "\\,", 2); + break; + case '\'': + es_addBuf(str, "'", 6); + break; +#endif + default: + es_addChar(str, c); +#if 0 + /* TODO : proper Unicode encoding (see header comment) */ + for(j = 0 ; j < 4 ; ++j) { + numbuf[3-j] = hexdigit[c % 16]; + c = c / 16; + } + es_addBuf(str, "\\u", 2); + es_addBuf(str, numbuf, 4); + break; +#endif + } + } + es_addBuf(str, "", 8); + r = 0; + + return r; +} + + +int +ln_addField_XML(char *name, struct json_object *field, es_str_t **str) +{ + int r; + int i; + const char *value; + struct json_object *obj; + + assert(field != NULL); + assert(str != NULL); + assert(*str != NULL); + + CHKR(es_addBuf(str, "", 2)); + + switch(json_object_get_type(field)) { + case json_type_array: + for (i = json_object_array_length(field) - 1; i >= 0; i--) { + CHKN(obj = json_object_array_get_idx(field, i)); + CHKN(value = json_object_get_string(obj)); + CHKR(ln_addValue_XML(value, str)); + } + break; + case json_type_string: + case json_type_int: + CHKN(value = json_object_get_string(field)); + CHKR(ln_addValue_XML(value, str)); + break; + default: + CHKR(es_addBuf(str, "***OBJECT***", sizeof("***OBJECT***")-1)); + } + + CHKR(es_addBuf(str, "", 8)); + r = 0; + +done: + return r; +} + + +static inline int +ln_addTags_XML(struct json_object *taglist, es_str_t **str) +{ + int r = 0; + struct json_object *tagObj; + const char *tagCstr; + int i; + + CHKR(es_addBuf(str, "", 12)); + for (i = json_object_array_length(taglist) - 1; i >= 0; i--) { + CHKR(es_addBuf(str, "", 5)); + CHKN(tagObj = json_object_array_get_idx(taglist, i)); + CHKN(tagCstr = json_object_get_string(tagObj)); + CHKR(es_addBuf(str, (char*)tagCstr, strlen(tagCstr))); + CHKR(es_addBuf(str, "", 6)); + } + CHKR(es_addBuf(str, "", 13)); + +done: return r; +} + + +int +ln_fmtEventToXML(struct json_object *json, es_str_t **str) +{ + int r = -1; + struct json_object *tags; + + assert(json != NULL); + assert(json_object_is_type(json_type_object)); + + if((*str = es_newStr(256)) == NULL) + goto done; + + es_addBuf(str, "", 7); + if((tags = json_object_object_get(json, "event.tags")) != NULL) { + CHKR(ln_addTags_XML(tags, str)); + } + json_object_object_foreach(json, name, field) { + if (strcmp(name, "event.tags")) { + ln_addField_XML(name, field, str); + } + } + + es_addBuf(str, "", 8); + +done: + return r; +} diff -Nru liblognorm-0.3.7/src/internal.h liblognorm-1.0.1/src/internal.h --- liblognorm-0.3.7/src/internal.h 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/src/internal.h 2014-01-22 15:52:06.000000000 +0000 @@ -33,6 +33,8 @@ * liblognorm - a fast samples-based log normalization library * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * * This file is part of liblognorm. * * This library is free software; you can redistribute it and/or @@ -54,6 +56,8 @@ #ifndef INTERNAL_H_INCLUDED #define INTERNAL_H_INCLUDED +#include "liblognorm.h" + /* support for simple error checking */ #define CHKR(x) \ @@ -67,4 +71,19 @@ #define FAIL(e) {r = (e); goto done;} -#endif /* #ifndef LOGNORM_H_INCLUDED */ +static inline char* ln_es_str2cstr(es_str_t **str) +{ + int r = -1; + char *buf; + + if (es_strlen(*str) == (*str)->lenBuf) { + CHKR(es_extendBuf(str, 1)); + } + CHKN(buf = (char*)es_getBufAddr(*str)); + buf[es_strlen(*str)] = '\0'; + return buf; +done: + return NULL; +} + +#endif /* #ifndef INTERNAL_H_INCLUDED */ diff -Nru liblognorm-0.3.7/src/liblognorm.c liblognorm-1.0.1/src/liblognorm.c --- liblognorm-0.3.7/src/liblognorm.c 2013-07-18 07:37:17.000000000 +0000 +++ liblognorm-1.0.1/src/liblognorm.c 2014-01-22 15:52:06.000000000 +0000 @@ -2,7 +2,9 @@ * See header file for descriptions. * * liblognorm - a fast samples-based log normalization library - * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. + * Copyright 2013 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 * * This file is part of liblognorm. * @@ -23,6 +25,7 @@ * A copy of the LGPL v2.1 can be found in the file "COPYING" in this distribution. */ #include "config.h" +#include #include "liblognorm.h" #include "lognorm.h" @@ -87,6 +90,8 @@ ln_deletePTree(ctx->ptree); if(ctx->rulePrefix != NULL) es_deleteStr(ctx->rulePrefix); + if(ctx->pas != NULL) + ln_deleteAnnotSet(ctx->pas); free(ctx); done: return r; @@ -94,7 +99,7 @@ int -ln_setDebugCB(ln_ctx ctx, void (*cb)(void*, char*, size_t), void *cookie) +ln_setDebugCB(ln_ctx ctx, void (*cb)(void*, const char*, size_t), void *cookie) { int r = 0; @@ -107,7 +112,7 @@ int -ln_loadSample(ln_ctx ctx, char *buf) +ln_loadSample(ln_ctx ctx, const char *buf) { // Something bad happened - no new sample if (ln_processSamp(ctx, buf, strlen(buf)) == NULL) { @@ -118,7 +123,7 @@ int -ln_loadSamples(ln_ctx ctx, char *file) +ln_loadSamples(ln_ctx ctx, const char *file) { int r = 0; struct ln_sampRepos *repo; @@ -139,9 +144,3 @@ return r; } - -void -ln_setEECtx(ln_ctx ctx, ee_ctx eectx) -{ - ctx->eectx = eectx; -} diff -Nru liblognorm-0.3.7/src/liblognorm.h liblognorm-1.0.1/src/liblognorm.h --- liblognorm-0.3.7/src/liblognorm.h 2013-07-18 07:37:17.000000000 +0000 +++ liblognorm-1.0.1/src/liblognorm.h 2014-01-22 15:52:06.000000000 +0000 @@ -36,7 +36,9 @@ *//* * * liblognorm - a fast samples-based log normalization library - * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. + * Copyright 2010-2013 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 * * This file is part of liblognorm. * @@ -57,22 +59,19 @@ * A copy of the LGPL v2.1 can be found in the file "COPYING" in this distribution. */ #ifndef LIBLOGNORM_H_INCLUDED -#define LIBLOGNORM_H_INCLUDED +#define LIBLOGNORM_H_INCLUDED #include /* we need size_t */ -#include +#include +#include /* error codes */ #define LN_NOMEM -1 #define LN_INVLDFDESCR -1 - -/* event_t needs to come from libcee, or whatever it will be called. We - * provide a dummy to be able to compile the initial skeletons. - */ -typedef void * event_t; +#define LN_WRONGPARSER -1000 /** * The library context descriptor. - * This is used to permit multiple independednt instances of the + * This is used to permit multiple independent instances of the * library to be called within a single program. This is most * useful for plugin-based architectures. */ @@ -115,15 +114,6 @@ /** - * Set the libee context to be used by this liblognorm context. - * - * @param ctx context to be modified - * @param eectx libee context - */ -void ln_setEECtx(ln_ctx ctx, ee_ctx eectx); - - -/** * Set a debug message handler (callback). * * Liblognorm can provide helpful information for debugging @@ -155,7 +145,7 @@ * * @return Returns zero on success, something else otherwise. */ -int ln_setDebugCB(ln_ctx ctx, void (*cb)(void*, char*, size_t), void *cookie); +int ln_setDebugCB(ln_ctx ctx, void (*cb)(void*, const char*, size_t), void *cookie); /** @@ -179,7 +169,7 @@ * @return Returns zero on success, something else otherwise. */ int -ln_loadSample(ln_ctx ctx, char *buf); +ln_loadSample(ln_ctx ctx, const char *buf); /** * Load a (log) sample file. @@ -194,7 +184,7 @@ * * @return Returns zero on success, something else otherwise. */ -int ln_loadSamples(ln_ctx ctx, char *file); +int ln_loadSamples(ln_ctx ctx, const char *file); /** * Normalize a message. @@ -215,14 +205,13 @@ * must be provided. * * @param[in] ctx The library context to use. - * @param[in] msg The message string (see note above). - * @param[in] lenmsg The length of the message in bytes. - * @param[out] event A new event record or NULL if an error occured. Must be + * @param[in] str The message string (see note above). + * @param[in] strLen The length of the message in bytes. + * @param[out] json_p A new event record or NULL if an error occured. Must be * destructed if no longer needed. * * @return Returns zero on success, something else otherwise. */ -int ln_normalizeMsg(ln_ctx ctx, char *msg, size_t lenmsg, event_t *event); -int ln_normalize(ln_ctx ctx, es_str_t *str, struct ee_event **event); +int ln_normalize(ln_ctx ctx, const char *str, size_t strLen, struct json_object **json_p); #endif /* #ifndef LOGNORM_H_INCLUDED */ diff -Nru liblognorm-0.3.7/src/lognormalizer.c liblognorm-1.0.1/src/lognormalizer.c --- liblognorm-0.3.7/src/lognormalizer.c 2013-07-18 07:37:17.000000000 +0000 +++ liblognorm-1.0.1/src/lognormalizer.c 2014-01-22 15:52:06.000000000 +0000 @@ -12,7 +12,7 @@ * *//* * liblognorm - a fast samples-based log normalization library - * Copyright 2010-2011 by Rainer Gerhards and Adiscon GmbH. + * Copyright 2010-2013 by Rainer Gerhards and Adiscon GmbH. * * This file is part of liblognorm. * @@ -36,17 +36,17 @@ #include #include #include -#include +#include #include "liblognorm.h" #include "ptree.h" #include "lognorm.h" +#include "enc.h" static ln_ctx ctx; -static ee_ctx eectx; static int verbose = 0; static int parsedOnly = 0; /**< output unparsed messages? */ -static int flatTags = 0; /**< output unparsed messages? */ +static int flatTags = 0; /**< print event.tags in JSON? */ static FILE *fpDOT; static es_str_t *encFmt = NULL; /**< a format string for encoder use */ static es_str_t *mandatoryTag = NULL; /**< tag which must be given so that mesg will @@ -54,7 +54,7 @@ static enum { f_syslog, f_json, f_xml, f_csv } outfmt = f_syslog; void -dbgCallBack(void __attribute__((unused)) *cookie, char *msg, +dbgCallBack(void __attribute__((unused)) *cookie, const char *msg, size_t __attribute__((unused)) lenMsg) { printf("liblognorm: %s\n", msg); @@ -71,32 +71,61 @@ * of the string on every call. */ static inline void -outputEvent(struct ee_event *event) +outputEvent(struct json_object *json) { char *cstr; es_str_t *str = NULL; switch(outfmt) { case f_json: - ee_fmtEventToJSON(event, &str); + if(!flatTags) { + json_object_object_del(json, "event.tags"); + } + cstr = (char*)json_object_to_json_string(json); break; case f_syslog: - ee_fmtEventToRFC5424(event, &str); + ln_fmtEventToRFC5424(json, &str); break; case f_xml: - ee_fmtEventToXML(event, &str); + ln_fmtEventToXML(json, &str); break; case f_csv: - ee_fmtEventToCSV(event, &str, encFmt); + ln_fmtEventToCSV(json, &str, encFmt); break; } - cstr = es_str2cstr(str, NULL); + if (str != NULL) + cstr = es_str2cstr(str, NULL); if(verbose > 0) printf("normalized: '%s'\n", cstr); printf("%s\n", cstr); - free(cstr); + if (str != NULL) + free(cstr); es_deleteStr(str); } +/* test if the tag exists */ +static int +eventHasTag(struct json_object *json, const char *tag) +{ + struct json_object *tagbucket, *tagObj; + int i; + const char *tagCstr; + + if (tag == NULL) + return 1; + if ((tagbucket = json_object_object_get(json, "event.tags")) != NULL) { + if (json_object_get_type(tagbucket) == json_type_array) { + for (i = json_object_array_length(tagbucket) - 1; i >= 0; i--) { + tagObj = json_object_array_get_idx(tagbucket, i); + tagCstr = json_object_get_string(tagObj); + if (!strcmp(tag, tagCstr)) + return 1; + } + } + } + if (verbose > 1) + printf("Mandatory tag '%s' has not been found\n", tag); + return 0; +} /* normalize input data */ @@ -105,44 +134,41 @@ { FILE *fp = stdin; char buf[10*1024]; - es_str_t *str; - struct ee_event *event = NULL; - es_str_t *constUnparsed; + struct json_object *json = NULL; long long unsigned numUnparsed = 0; long long unsigned numWrongTag = 0; - - constUnparsed = es_newStrFromBuf("unparsed-data", sizeof("unparsed-data") - 1); + char *mandatoryTagCstr = NULL; + + if (mandatoryTag != NULL) { + mandatoryTagCstr = es_str2cstr(mandatoryTag, NULL); + } while((fgets(buf, sizeof(buf), fp)) != NULL) { buf[strlen(buf)-1] = '\0'; if(strlen(buf) > 0 && buf[strlen(buf)-1] == '\r') buf[strlen(buf)-1] = '\0'; if(verbose > 0) printf("To normalize: '%s'\n", buf); - str = es_newStrFromCStr(buf, strlen(buf)); - ln_normalize(ctx, str, &event); - //printf("normalize result: %d\n", ln_normalizeRec(ctx, ctx->ptree, str, 0, &event)); - if(event != NULL) { - if( mandatoryTag == NULL - || (mandatoryTag != NULL && ee_EventHasTag(event, mandatoryTag))) { - if( parsedOnly == 1 - && ee_getEventField(event, constUnparsed) != NULL){ + ln_normalize(ctx, buf, strlen(buf), &json); + if(json != NULL) { + if(eventHasTag(json, mandatoryTagCstr)) { + if( parsedOnly == 1 + && json_object_object_get(json, "unparsed-data") != NULL) { numUnparsed++; } else { - outputEvent(event); + outputEvent(json); } } else { numWrongTag++; } - ee_deleteEvent(event); - event = NULL; + json_object_put(json); + json = NULL; } - es_deleteStr(str); } if(numUnparsed > 0) fprintf(stderr, "%llu unparsable entries\n", numUnparsed); if(numWrongTag > 0) fprintf(stderr, "%llu entries with wrong tag dropped\n", numWrongTag); - es_deleteStr(constUnparsed); + free(mandatoryTagCstr); } @@ -214,18 +240,10 @@ errout("Could not initialize liblognorm context"); } - if((eectx = ee_initCtx()) == NULL) { - errout("Could not initialize libee context"); - } - if(flatTags) { - ee_setFlags(eectx, EE_CTX_FLAG_INCLUDE_FLAT_TAGS); - } - if(verbose) { ln_setDebugCB(ctx, dbgCallBack, NULL); ln_enableDebug(ctx, 1); } - ln_setEECtx(ctx, eectx); ln_loadSamples(ctx, repository); @@ -242,5 +260,7 @@ normalize(); ln_exitCtx(ctx); + if (encFmt != NULL) + free(encFmt); return 0; } diff -Nru liblognorm-0.3.7/src/lognorm.h liblognorm-1.0.1/src/lognorm.h --- liblognorm-0.3.7/src/lognorm.h 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/src/lognorm.h 2014-01-22 15:52:06.000000000 +0000 @@ -27,6 +27,7 @@ #ifndef LIBLOGNORM_LOGNORM_HINCLUDED #define LIBLOGNORM_LOGNORM_HINCLUDED #include /* we need size_t */ +#include "liblognorm.h" #include "ptree.h" #include "annot.h" @@ -34,11 +35,10 @@ #define LN_ObjID_CTX 0xFEFE0001 struct ln_ctx_s { - unsigned objID; /**< a magic number to prevent some memory adressing errors */ - void (*dbgCB)(void *cookie, char *msg, size_t lenMsg); + unsigned objID; /**< a magic number to prevent some memory addressing errors */ + void (*dbgCB)(void *cookie, const char *msg, size_t lenMsg); /**< user-provided debug output callback */ void *dbgCookie; /**< cookie to be passed to debug callback */ - ee_ctx eectx; ln_ptree *ptree; /**< parse tree being used by this context */ ln_annotSet *pas; /**< associated set of annotations */ unsigned nNodes; /**< number of nodes in our parse tree */ diff -Nru liblognorm-0.3.7/src/Makefile.am liblognorm-1.0.1/src/Makefile.am --- liblognorm-0.3.7/src/Makefile.am 2013-07-18 07:37:17.000000000 +0000 +++ liblognorm-1.0.1/src/Makefile.am 2014-01-22 15:52:06.000000000 +0000 @@ -8,8 +8,8 @@ # milestone (latest at initial release!) bin_PROGRAMS = lognormalizer lognormalizer_SOURCES = lognormalizer.c -lognormalizer_CPPFLAGS = -I$(top_srcdir) $(LIBEE_CFLAGS) $(LIBESTR_CFLAGS) -lognormalizer_LDADD = $(LIBEE_LIBS) $(LIBLOGNORM_LIBS) $(LIBESTR_LIBS) +lognormalizer_CPPFLAGS = -I$(top_srcdir) $(JSON_C_CFLAGS) $(LIBESTR_CFLAGS) +lognormalizer_LDADD = $(JSON_C_LIBS) $(LIBLOGNORM_LIBS) $(LIBESTR_LIBS) lognormalizer_DEPENDENCIES = liblognorm.la lib_LTLIBRARIES = liblognorm.la @@ -19,11 +19,15 @@ ptree.c \ annot.c \ samp.c \ - lognorm.c + lognorm.c \ + parser.c \ + enc_syslog.c \ + enc_csv.c \ + enc_xml.c -liblognorm_la_CPPFLAGS = $(LIBEE_CFLAGS) $(LIBESTR_CFLAGS) -liblognorm_la_LIBADD = $(rt_libs) $(LIBEE_LIBS) $(LIBESTR_LIBS) -lestr -liblognorm_la_LDFLAGS = -version-info 0:0:0 +liblognorm_la_CPPFLAGS = $(JSON_C_CFLAGS) $(LIBESTR_CFLAGS) +liblognorm_la_LIBADD = $(rt_libs) $(JSON_C_LIBS) $(LIBESTR_LIBS) -lestr +liblognorm_la_LDFLAGS = -version-info 1:0:0 EXTRA_DIST = \ internal.h \ @@ -31,6 +35,8 @@ lognorm.h \ ptree.h \ annot.h \ - samp.h + samp.h \ + enc.h \ + parser.h -include_HEADERS = liblognorm.h samp.h lognorm.h ptree.h annot.h +include_HEADERS = liblognorm.h samp.h lognorm.h ptree.h annot.h enc.h parser.h diff -Nru liblognorm-0.3.7/src/Makefile.in liblognorm-1.0.1/src/Makefile.in --- liblognorm-0.3.7/src/Makefile.in 2013-07-18 07:37:56.000000000 +0000 +++ liblognorm-1.0.1/src/Makefile.in 2014-04-11 04:30:18.000000000 +0000 @@ -1,9 +1,8 @@ -# Makefile.in generated by automake 1.11.3 from Makefile.am. +# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ -# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software -# Foundation, Inc. +# Copyright (C) 1994-2013 Free Software Foundation, Inc. + # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -18,6 +17,51 @@ VPATH = @srcdir@ +am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' +am__make_running_with_option = \ + case $${target_option-} in \ + ?) ;; \ + *) echo "am__make_running_with_option: internal error: invalid" \ + "target option '$${target_option-}' specified" >&2; \ + exit 1;; \ + esac; \ + has_opt=no; \ + sane_makeflags=$$MAKEFLAGS; \ + if $(am__is_gnu_make); then \ + sane_makeflags=$$MFLAGS; \ + else \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + bs=\\; \ + sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ + | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ + esac; \ + fi; \ + skip_next=no; \ + strip_trailopt () \ + { \ + flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ + }; \ + for flg in $$sane_makeflags; do \ + test $$skip_next = yes && { skip_next=no; continue; }; \ + case $$flg in \ + *=*|--*) continue;; \ + -*I) strip_trailopt 'I'; skip_next=yes;; \ + -*I?*) strip_trailopt 'I';; \ + -*O) strip_trailopt 'O'; skip_next=yes;; \ + -*O?*) strip_trailopt 'O';; \ + -*l) strip_trailopt 'l'; skip_next=yes;; \ + -*l?*) strip_trailopt 'l';; \ + -[dEDm]) skip_next=yes;; \ + -[JT]) skip_next=yes;; \ + esac; \ + case $$flg in \ + *$$target_option*) has_opt=yes; break;; \ + esac; \ + done; \ + test $$has_opt = yes +am__make_dryrun = (target_option=n; $(am__make_running_with_option)) +am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ @@ -38,8 +82,8 @@ host_triplet = @host@ bin_PROGRAMS = lognormalizer$(EXEEXT) subdir = src -DIST_COMMON = $(include_HEADERS) $(srcdir)/Makefile.am \ - $(srcdir)/Makefile.in +DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ + $(top_srcdir)/depcomp $(include_HEADERS) ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ @@ -86,17 +130,32 @@ $(am__DEPENDENCIES_1) am_liblognorm_la_OBJECTS = liblognorm_la-liblognorm.lo \ liblognorm_la-ptree.lo liblognorm_la-annot.lo \ - liblognorm_la-samp.lo liblognorm_la-lognorm.lo + liblognorm_la-samp.lo liblognorm_la-lognorm.lo \ + liblognorm_la-parser.lo liblognorm_la-enc_syslog.lo \ + liblognorm_la-enc_csv.lo liblognorm_la-enc_xml.lo liblognorm_la_OBJECTS = $(am_liblognorm_la_OBJECTS) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent +am__v_lt_1 = liblognorm_la_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(liblognorm_la_LDFLAGS) $(LDFLAGS) -o $@ PROGRAMS = $(bin_PROGRAMS) am_lognormalizer_OBJECTS = lognormalizer-lognormalizer.$(OBJEXT) lognormalizer_OBJECTS = $(am_lognormalizer_OBJECTS) +AM_V_P = $(am__v_P_@AM_V@) +am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) +am__v_P_0 = false +am__v_P_1 = : +AM_V_GEN = $(am__v_GEN_@AM_V@) +am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) +am__v_GEN_0 = @echo " GEN " $@; +am__v_GEN_1 = +AM_V_at = $(am__v_at_@AM_V@) +am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) +am__v_at_0 = @ +am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles @@ -109,23 +168,41 @@ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) -am__v_CC_0 = @echo " CC " $@; -AM_V_at = $(am__v_at_@AM_V@) -am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) -am__v_at_0 = @ +am__v_CC_0 = @echo " CC " $@; +am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) -am__v_CCLD_0 = @echo " CCLD " $@; -AM_V_GEN = $(am__v_GEN_@AM_V@) -am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) -am__v_GEN_0 = @echo " GEN " $@; +am__v_CCLD_0 = @echo " CCLD " $@; +am__v_CCLD_1 = SOURCES = $(liblognorm_la_SOURCES) $(lognormalizer_SOURCES) DIST_SOURCES = $(liblognorm_la_SOURCES) $(lognormalizer_SOURCES) +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac HEADERS = $(include_HEADERS) +am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) +# Read a list of newline-separated strings from the standard input, +# and print each of them once, without duplicates. Input order is +# *not* preserved. +am__uniquify_input = $(AWK) '\ + BEGIN { nonempty = 0; } \ + { items[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in items) print i; }; } \ +' +# Make sure the list of sources is unique. This is necessary because, +# e.g., the same source file might be shared among _SOURCES variables +# for different programs/libraries. +am__define_uniq_tagged_files = \ + list='$(am__tagged_files)'; \ + unique=`for i in $$list; do \ + if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ + done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) @@ -160,10 +237,10 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ +JSON_C_CFLAGS = @JSON_C_CFLAGS@ +JSON_C_LIBS = @JSON_C_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ -LIBEE_CFLAGS = @LIBEE_CFLAGS@ -LIBEE_LIBS = @LIBEE_LIBS@ LIBESTR_CFLAGS = @LIBESTR_CFLAGS@ LIBESTR_LIBS = @LIBESTR_LIBS@ LIBLOGNORM_CFLAGS = @LIBLOGNORM_CFLAGS@ @@ -198,6 +275,7 @@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ +SPHINXBUILD = @SPHINXBUILD@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ @@ -257,8 +335,8 @@ DEBUG = -g PTHREADS_CFLAGS = -pthread lognormalizer_SOURCES = lognormalizer.c -lognormalizer_CPPFLAGS = -I$(top_srcdir) $(LIBEE_CFLAGS) $(LIBESTR_CFLAGS) -lognormalizer_LDADD = $(LIBEE_LIBS) $(LIBLOGNORM_LIBS) $(LIBESTR_LIBS) +lognormalizer_CPPFLAGS = -I$(top_srcdir) $(JSON_C_CFLAGS) $(LIBESTR_CFLAGS) +lognormalizer_LDADD = $(JSON_C_LIBS) $(LIBLOGNORM_LIBS) $(LIBESTR_LIBS) lognormalizer_DEPENDENCIES = liblognorm.la lib_LTLIBRARIES = liblognorm.la liblognorm_la_SOURCES = \ @@ -266,20 +344,26 @@ ptree.c \ annot.c \ samp.c \ - lognorm.c - -liblognorm_la_CPPFLAGS = $(LIBEE_CFLAGS) $(LIBESTR_CFLAGS) -liblognorm_la_LIBADD = $(rt_libs) $(LIBEE_LIBS) $(LIBESTR_LIBS) -lestr -liblognorm_la_LDFLAGS = -version-info 0:0:0 + lognorm.c \ + parser.c \ + enc_syslog.c \ + enc_csv.c \ + enc_xml.c + +liblognorm_la_CPPFLAGS = $(JSON_C_CFLAGS) $(LIBESTR_CFLAGS) +liblognorm_la_LIBADD = $(rt_libs) $(JSON_C_LIBS) $(LIBESTR_LIBS) -lestr +liblognorm_la_LDFLAGS = -version-info 1:0:0 EXTRA_DIST = \ internal.h \ liblognorm.h \ lognorm.h \ ptree.h \ annot.h \ - samp.h + samp.h \ + enc.h \ + parser.h -include_HEADERS = liblognorm.h samp.h lognorm.h ptree.h annot.h +include_HEADERS = liblognorm.h samp.h lognorm.h ptree.h annot.h enc.h parser.h all: all-am .SUFFIXES: @@ -314,9 +398,9 @@ $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): + install-libLTLIBRARIES: $(lib_LTLIBRARIES) @$(NORMAL_INSTALL) - test -z "$(libdir)" || $(MKDIR_P) "$(DESTDIR)$(libdir)" @list='$(lib_LTLIBRARIES)'; test -n "$(libdir)" || list=; \ list2=; for p in $$list; do \ if test -f $$p; then \ @@ -324,6 +408,8 @@ else :; fi; \ done; \ test -z "$$list2" || { \ + echo " $(MKDIR_P) '$(DESTDIR)$(libdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(libdir)" || exit 1; \ echo " $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 '$(DESTDIR)$(libdir)'"; \ $(LIBTOOL) $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=install $(INSTALL) $(INSTALL_STRIP_FLAG) $$list2 "$(DESTDIR)$(libdir)"; \ } @@ -339,24 +425,32 @@ clean-libLTLIBRARIES: -test -z "$(lib_LTLIBRARIES)" || rm -f $(lib_LTLIBRARIES) - @list='$(lib_LTLIBRARIES)'; for p in $$list; do \ - dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \ - test "$$dir" != "$$p" || dir=.; \ - echo "rm -f \"$${dir}/so_locations\""; \ - rm -f "$${dir}/so_locations"; \ - done + @list='$(lib_LTLIBRARIES)'; \ + locs=`for p in $$list; do echo $$p; done | \ + sed 's|^[^/]*$$|.|; s|/[^/]*$$||; s|$$|/so_locations|' | \ + sort -u`; \ + test -z "$$locs" || { \ + echo rm -f $${locs}; \ + rm -f $${locs}; \ + } + liblognorm.la: $(liblognorm_la_OBJECTS) $(liblognorm_la_DEPENDENCIES) $(EXTRA_liblognorm_la_DEPENDENCIES) $(AM_V_CCLD)$(liblognorm_la_LINK) -rpath $(libdir) $(liblognorm_la_OBJECTS) $(liblognorm_la_LIBADD) $(LIBS) install-binPROGRAMS: $(bin_PROGRAMS) @$(NORMAL_INSTALL) - test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)" @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ + fi; \ for p in $$list; do echo "$$p $$p"; done | \ sed 's/$(EXEEXT)$$//' | \ - while read p p1; do if test -f $$p || test -f $$p1; \ - then echo "$$p"; echo "$$p"; else :; fi; \ + while read p p1; do if test -f $$p \ + || test -f $$p1 \ + ; then echo "$$p"; echo "$$p"; else :; fi; \ done | \ - sed -e 'p;s,.*/,,;n;h' -e 's|.*|.|' \ + sed -e 'p;s,.*/,,;n;h' \ + -e 's|.*|.|' \ -e 'p;x;s,.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/' | \ sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1 } \ @@ -377,7 +471,8 @@ @list='$(bin_PROGRAMS)'; test -n "$(bindir)" || list=; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 'h;s,^.*/,,;s/$(EXEEXT)$$//;$(transform)' \ - -e 's/$$/$(EXEEXT)/' `; \ + -e 's/$$/$(EXEEXT)/' \ + `; \ test -n "$$list" || exit 0; \ echo " ( cd '$(DESTDIR)$(bindir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(bindir)" && rm -f $$files @@ -390,6 +485,7 @@ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list + lognormalizer$(EXEEXT): $(lognormalizer_OBJECTS) $(lognormalizer_DEPENDENCIES) $(EXTRA_lognormalizer_DEPENDENCIES) @rm -f lognormalizer$(EXEEXT) $(AM_V_CCLD)$(LINK) $(lognormalizer_OBJECTS) $(lognormalizer_LDADD) $(LIBS) @@ -401,8 +497,12 @@ -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-annot.Plo@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-enc_csv.Plo@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-enc_syslog.Plo@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-enc_xml.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-liblognorm.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-lognorm.Plo@am__quote@ +@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-parser.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-ptree.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/liblognorm_la-samp.Plo@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/lognormalizer-lognormalizer.Po@am__quote@ @@ -463,6 +563,34 @@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o liblognorm_la-lognorm.lo `test -f 'lognorm.c' || echo '$(srcdir)/'`lognorm.c +liblognorm_la-parser.lo: parser.c +@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT liblognorm_la-parser.lo -MD -MP -MF $(DEPDIR)/liblognorm_la-parser.Tpo -c -o liblognorm_la-parser.lo `test -f 'parser.c' || echo '$(srcdir)/'`parser.c +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/liblognorm_la-parser.Tpo $(DEPDIR)/liblognorm_la-parser.Plo +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='parser.c' object='liblognorm_la-parser.lo' libtool=yes @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o liblognorm_la-parser.lo `test -f 'parser.c' || echo '$(srcdir)/'`parser.c + +liblognorm_la-enc_syslog.lo: enc_syslog.c +@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT liblognorm_la-enc_syslog.lo -MD -MP -MF $(DEPDIR)/liblognorm_la-enc_syslog.Tpo -c -o liblognorm_la-enc_syslog.lo `test -f 'enc_syslog.c' || echo '$(srcdir)/'`enc_syslog.c +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/liblognorm_la-enc_syslog.Tpo $(DEPDIR)/liblognorm_la-enc_syslog.Plo +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='enc_syslog.c' object='liblognorm_la-enc_syslog.lo' libtool=yes @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o liblognorm_la-enc_syslog.lo `test -f 'enc_syslog.c' || echo '$(srcdir)/'`enc_syslog.c + +liblognorm_la-enc_csv.lo: enc_csv.c +@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT liblognorm_la-enc_csv.lo -MD -MP -MF $(DEPDIR)/liblognorm_la-enc_csv.Tpo -c -o liblognorm_la-enc_csv.lo `test -f 'enc_csv.c' || echo '$(srcdir)/'`enc_csv.c +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/liblognorm_la-enc_csv.Tpo $(DEPDIR)/liblognorm_la-enc_csv.Plo +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='enc_csv.c' object='liblognorm_la-enc_csv.lo' libtool=yes @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o liblognorm_la-enc_csv.lo `test -f 'enc_csv.c' || echo '$(srcdir)/'`enc_csv.c + +liblognorm_la-enc_xml.lo: enc_xml.c +@am__fastdepCC_TRUE@ $(AM_V_CC)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT liblognorm_la-enc_xml.lo -MD -MP -MF $(DEPDIR)/liblognorm_la-enc_xml.Tpo -c -o liblognorm_la-enc_xml.lo `test -f 'enc_xml.c' || echo '$(srcdir)/'`enc_xml.c +@am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/liblognorm_la-enc_xml.Tpo $(DEPDIR)/liblognorm_la-enc_xml.Plo +@AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='enc_xml.c' object='liblognorm_la-enc_xml.lo' libtool=yes @AMDEPBACKSLASH@ +@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ +@am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(liblognorm_la_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o liblognorm_la-enc_xml.lo `test -f 'enc_xml.c' || echo '$(srcdir)/'`enc_xml.c + lognormalizer-lognormalizer.o: lognormalizer.c @am__fastdepCC_TRUE@ $(AM_V_CC)$(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(lognormalizer_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT lognormalizer-lognormalizer.o -MD -MP -MF $(DEPDIR)/lognormalizer-lognormalizer.Tpo -c -o lognormalizer-lognormalizer.o `test -f 'lognormalizer.c' || echo '$(srcdir)/'`lognormalizer.c @am__fastdepCC_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/lognormalizer-lognormalizer.Tpo $(DEPDIR)/lognormalizer-lognormalizer.Po @@ -484,8 +612,11 @@ -rm -rf .libs _libs install-includeHEADERS: $(include_HEADERS) @$(NORMAL_INSTALL) - test -z "$(includedir)" || $(MKDIR_P) "$(DESTDIR)$(includedir)" @list='$(include_HEADERS)'; test -n "$(includedir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(includedir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(includedir)" || exit 1; \ + fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ @@ -501,26 +632,15 @@ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(includedir)'; $(am__uninstall_files_from_dir) -ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ - mkid -fID $$unique -tags: TAGS +ID: $(am__tagged_files) + $(am__define_uniq_tagged_files); mkid -fID $$unique +tags: tags-am +TAGS: tags -TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) +tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ + $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ @@ -532,15 +652,11 @@ $$unique; \ fi; \ fi -ctags: CTAGS -CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ - END { if (nonempty) { for (i in files) print i; }; }'`; \ +ctags: ctags-am + +CTAGS: ctags +ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) + $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique @@ -549,6 +665,21 @@ here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" +cscopelist: cscopelist-am + +cscopelist-am: $(am__tagged_files) + list='$(am__tagged_files)'; \ + case "$(srcdir)" in \ + [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ + *) sdir=$(subdir)/$(srcdir) ;; \ + esac; \ + for i in $$list; do \ + if test -f "$$i"; then \ + echo "$(subdir)/$$i"; \ + else \ + echo "$$sdir/$$i"; \ + fi; \ + done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags @@ -696,20 +827,21 @@ .MAKE: install-am install-strip -.PHONY: CTAGS GTAGS all all-am check check-am clean clean-binPROGRAMS \ - clean-generic clean-libLTLIBRARIES clean-libtool ctags \ - distclean distclean-compile distclean-generic \ - distclean-libtool distclean-tags distdir dvi dvi-am html \ - html-am info info-am install install-am install-binPROGRAMS \ - install-data install-data-am install-dvi install-dvi-am \ - install-exec install-exec-am install-html install-html-am \ +.PHONY: CTAGS GTAGS TAGS all all-am check check-am clean \ + clean-binPROGRAMS clean-generic clean-libLTLIBRARIES \ + clean-libtool cscopelist-am ctags ctags-am distclean \ + distclean-compile distclean-generic distclean-libtool \ + distclean-tags distdir dvi dvi-am html html-am info info-am \ + install install-am install-binPROGRAMS install-data \ + install-data-am install-dvi install-dvi-am install-exec \ + install-exec-am install-html install-html-am \ install-includeHEADERS install-info install-info-am \ install-libLTLIBRARIES install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ - tags uninstall uninstall-am uninstall-binPROGRAMS \ + tags tags-am uninstall uninstall-am uninstall-binPROGRAMS \ uninstall-includeHEADERS uninstall-libLTLIBRARIES diff -Nru liblognorm-0.3.7/src/parser.c liblognorm-1.0.1/src/parser.c --- liblognorm-0.3.7/src/parser.c 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/src/parser.c 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,794 @@ +/* + * liblognorm - a fast samples-based log normalization library + * Copyright 2010-2013 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * + * This file is part of liblognorm. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + * A copy of the LGPL v2.1 can be found in the file "COPYING" in this distribution. + */ +#include "config.h" +#include +#include +#include +#include +#include +#include +#include + +#include "liblognorm.h" +#include "internal.h" +#include "parser.h" + +/* some helpers */ +static inline int +hParseInt(const unsigned char **buf, size_t *lenBuf) +{ + const unsigned char *p = *buf; + size_t len = *lenBuf; + int i = 0; + + while(len > 0 && isdigit(*p)) { + i = i * 10 + *p - '0'; + ++p; + --len; + } + + *buf = p; + *lenBuf = len; + return i; +} + +/* parsers for the primitive types + * + * All parsers receive + * + * @param[in] str the to-be-parsed string + * @param[in] strLen length of the to-be-parsed string + * @param[in] offs an offset into the string + * @param[in] ed string with extra data for parser use + * @param[out] parsed bytes + * @param[out] json object containing parsed data (can be unused) + * + * They will try to parse out "their" object from the string. If they + * succeed, they: + * + * return 0 on success and LN_WRONGPARSER if this parser could + * not successfully parse (but all went well otherwise) and something + * else in case of an error. + */ +#define BEGINParser(ParserName) \ +int ln_parse##ParserName(const char *str, size_t strLen, size_t *offs, \ + __attribute__((unused)) es_str_t *ed, size_t *parsed,\ + __attribute__((unused)) struct json_object **value) \ +{ \ + int r = LN_WRONGPARSER; \ + *parsed = 0; + +#define ENDParser \ + goto done; /* suppress warnings */ \ +done: \ + r = 0; \ + goto fail; /* suppress warnings */ \ +fail: \ + return r; \ +} + + +/** + * Parse a TIMESTAMP as specified in RFC5424 (subset of RFC3339). + */ +BEGINParser(RFC5424Date) + const unsigned char *pszTS; + /* variables to temporarily hold time information while we parse */ + __attribute__((unused)) int year; + int month; + int day; + int hour; /* 24 hour clock */ + int minute; + int second; + __attribute__((unused)) int secfrac; /* fractional seconds (must be 32 bit!) */ + __attribute__((unused)) int secfracPrecision; + __attribute__((unused)) char OffsetMode; /* UTC offset + or - */ + char OffsetHour; /* UTC offset in hours */ + int OffsetMinute; /* UTC offset in minutes */ + size_t len; + size_t orglen; + /* end variables to temporarily hold time information while we parse */ + + pszTS = (unsigned char*) str + *offs; + len = orglen = strLen - *offs; + + year = hParseInt(&pszTS, &len); + + /* We take the liberty to accept slightly malformed timestamps e.g. in + * the format of 2003-9-1T1:0:0. */ + if(len == 0 || *pszTS++ != '-') goto fail; + --len; + month = hParseInt(&pszTS, &len); + if(month < 1 || month > 12) goto fail; + + if(len == 0 || *pszTS++ != '-') + goto fail; + --len; + day = hParseInt(&pszTS, &len); + if(day < 1 || day > 31) goto fail; + + if(len == 0 || *pszTS++ != 'T') goto fail; + --len; + + hour = hParseInt(&pszTS, &len); + if(hour < 0 || hour > 23) goto fail; + + if(len == 0 || *pszTS++ != ':') + goto fail; + --len; + minute = hParseInt(&pszTS, &len); + if(minute < 0 || minute > 59) goto fail; + + if(len == 0 || *pszTS++ != ':') goto fail; + --len; + second = hParseInt(&pszTS, &len); + if(second < 0 || second > 60) goto fail; + + /* Now let's see if we have secfrac */ + if(len > 0 && *pszTS == '.') { + --len; + const unsigned char *pszStart = ++pszTS; + secfrac = hParseInt(&pszTS, &len); + secfracPrecision = (int) (pszTS - pszStart); + } else { + secfracPrecision = 0; + secfrac = 0; + } + + /* check the timezone */ + if(len == 0) goto fail; + + if(*pszTS == 'Z') { + --len; + pszTS++; /* eat Z */ + OffsetMode = 'Z'; + OffsetHour = 0; + OffsetMinute = 0; + } else if((*pszTS == '+') || (*pszTS == '-')) { + OffsetMode = *pszTS; + --len; + pszTS++; + + OffsetHour = hParseInt(&pszTS, &len); + if(OffsetHour < 0 || OffsetHour > 23) + goto fail; + + if(len == 0 || *pszTS++ != ':') + goto fail; + --len; + OffsetMinute = hParseInt(&pszTS, &len); + if(OffsetMinute < 0 || OffsetMinute > 59) + goto fail; + } else { + /* there MUST be TZ information */ + goto fail; + } + + if(len > 0) { + if(*pszTS != ' ') /* if it is not a space, it can not be a "good" time */ + goto fail; + } + + /* we had success, so update parse pointer */ + *parsed = orglen - len; + +ENDParser + + +/** + * Parse a RFC3164 Date. + */ +BEGINParser(RFC3164Date) + const unsigned char *p; + size_t len, orglen; + /* variables to temporarily hold time information while we parse */ + __attribute__((unused)) int month; + int day; + //int year = 0; /* 0 means no year provided */ + int hour; /* 24 hour clock */ + int minute; + int second; + + p = (unsigned char*) str + *offs; + orglen = len = strLen - *offs; + /* If we look at the month (Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec), + * we may see the following character sequences occur: + * + * J(an/u(n/l)), Feb, Ma(r/y), A(pr/ug), Sep, Oct, Nov, Dec + * + * We will use this for parsing, as it probably is the + * fastest way to parse it. + */ + if(len < 3) + goto fail; + + switch(*p++) + { + case 'j': + case 'J': + if(*p == 'a' || *p == 'A') { + ++p; + if(*p == 'n' || *p == 'N') { + ++p; + month = 1; + } else + goto fail; + } else if(*p == 'u' || *p == 'U') { + ++p; + if(*p == 'n' || *p == 'N') { + ++p; + month = 6; + } else if(*p == 'l' || *p == 'L') { + ++p; + month = 7; + } else + goto fail; + } else + goto fail; + break; + case 'f': + case 'F': + if(*p == 'e' || *p == 'E') { + ++p; + if(*p == 'b' || *p == 'B') { + ++p; + month = 2; + } else + goto fail; + } else + goto fail; + break; + case 'm': + case 'M': + if(*p == 'a' || *p == 'A') { + ++p; + if(*p == 'r' || *p == 'R') { + ++p; + month = 3; + } else if(*p == 'y' || *p == 'Y') { + ++p; + month = 5; + } else + goto fail; + } else + goto fail; + break; + case 'a': + case 'A': + if(*p == 'p' || *p == 'P') { + ++p; + if(*p == 'r' || *p == 'R') { + ++p; + month = 4; + } else + goto fail; + } else if(*p == 'u' || *p == 'U') { + ++p; + if(*p == 'g' || *p == 'G') { + ++p; + month = 8; + } else + goto fail; + } else + goto fail; + break; + case 's': + case 'S': + if(*p == 'e' || *p == 'E') { + ++p; + if(*p == 'p' || *p == 'P') { + ++p; + month = 9; + } else + goto fail; + } else + goto fail; + break; + case 'o': + case 'O': + if(*p == 'c' || *p == 'C') { + ++p; + if(*p == 't' || *p == 'T') { + ++p; + month = 10; + } else + goto fail; + } else + goto fail; + break; + case 'n': + case 'N': + if(*p == 'o' || *p == 'O') { + ++p; + if(*p == 'v' || *p == 'V') { + ++p; + month = 11; + } else + goto fail; + } else + goto fail; + break; + case 'd': + case 'D': + if(*p == 'e' || *p == 'E') { + ++p; + if(*p == 'c' || *p == 'C') { + ++p; + month = 12; + } else + goto fail; + } else + goto fail; + break; + default: + goto fail; + } + + len -= 3; + + /* done month */ + + if(len == 0 || *p++ != ' ') + goto fail; + --len; + + /* we accept a slightly malformed timestamp with one-digit days. */ + if(*p == ' ') { + --len; + ++p; + } + + day = hParseInt(&p, &len); + if(day < 1 || day > 31) + goto fail; + + if(len == 0 || *p++ != ' ') + goto fail; + --len; + + /* time part */ + hour = hParseInt(&p, &len); + if(hour > 1970 && hour < 2100) { + /* if so, we assume this actually is a year. This is a format found + * e.g. in Cisco devices. + * + year = hour; + */ + + /* re-query the hour, this time it must be valid */ + if(len == 0 || *p++ != ' ') + goto fail; + --len; + hour = hParseInt(&p, &len); + } + + if(hour < 0 || hour > 23) + goto fail; + + if(len == 0 || *p++ != ':') + goto fail; + --len; + minute = hParseInt(&p, &len); + if(minute < 0 || minute > 59) + goto fail; + + if(len == 0 || *p++ != ':') + goto fail; + --len; + second = hParseInt(&p, &len); + if(second < 0 || second > 60) + goto fail; + + /* we provide support for an extra ":" after the date. While this is an + * invalid format, it occurs frequently enough (e.g. with Cisco devices) + * to permit it as a valid case. -- rgerhards, 2008-09-12 + */ + if(len > 0 && *p == ':') { + ++p; /* just skip past it */ + --len; + } + + /* we had success, so update parse pointer */ + *parsed = orglen - len; + +ENDParser + + +/** + * Parse a Number. + * Note that a number is an abstracted concept. We always represent it + * as 64 bits (but may later change our mind if performance dictates so). + */ +BEGINParser(Number) + const char *c; + size_t i; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + c = str; + + for (i = *offs; i < strLen && isdigit(c[i]); i++); + if (i == *offs) + goto fail; + + /* success, persist */ + *parsed = i - *offs; + +ENDParser + + +/** + * Parse a word. + * A word is a SP-delimited entity. The parser always works, except if + * the offset is position on a space upon entry. + */ +BEGINParser(Word) + const char *c; + size_t i; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + c = str; + i = *offs; + + /* search end of word */ + while(i < strLen && c[i] != ' ') + i++; + + if(i == *offs) { + goto fail; + } + + /* success, persist */ + *parsed = i - *offs; + +ENDParser + + +/** + * Parse everything up to a specific character. + * The character must be the only char inside extra data passed to the parser. + * It is a program error if strlen(ed) != 1. It is considered a format error if + * a) the to-be-parsed buffer is already positioned on the terminator character + * b) there is no terminator until the end of the buffer + * In those cases, the parsers declares itself as not being successful, in all + * other cases a string is extracted. + */ +BEGINParser(CharTo) + const char *c; + unsigned char cTerm; + size_t i; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + assert(es_strlen(ed) == 1); + cTerm = *(es_getBufAddr(ed)); + c = str; + i = *offs; + + /* search end of word */ + while(i < strLen && c[i] != cTerm) + i++; + + if(i == *offs || i == strLen || c[i] != cTerm) { + r = LN_WRONGPARSER; + goto fail; + } + + /* success, persist */ + *parsed = i - *offs; + +ENDParser + + +/** + * Parse everything up to a specific character, or up to the end of string. + * The character must be the only char inside extra data passed to the parser. + * It is a program error if strlen(ed) != 1. + * This parser always returns success. + * By nature of the parser, it is required that end of string or the separator + * follows this field in rule. + */ +BEGINParser(CharSeparated) + const char *c; + unsigned char cTerm; + size_t i; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + assert(es_strlen(ed) == 1); + cTerm = *(es_getBufAddr(ed)); + c = str; + i = *offs; + + /* search end of word */ + while(i < strLen && c[i] != cTerm) + i++; + + /* success, persist */ + *parsed = i - *offs; + +ENDParser + + +/** + * Just get everything till the end of string. + */ +BEGINParser(Rest) + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + + /* silence the warning about unused variable */ + (void)str; + /* success, persist */ + *parsed = strLen - *offs; + +ENDParser + + +/** + * Parse a quoted string. In this initial implementation, escaping of the quote + * char is not supported. A quoted string is one start starts with a double quote, + * has some text (not containing double quotes) and ends with the first double + * quote character seen. The extracted string does NOT include the quote characters. + * rgerhards, 2011-01-14 + */ +BEGINParser(QuotedString) + const char *c; + size_t i; + char *cstr; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + c = str; + i = *offs; + if(i + 2 > strLen) + goto fail; /* needs at least 2 characters */ + + if(c[i] != '"') + goto fail; + ++i; + + /* search end of string */ + while(i < strLen && c[i] != '"') + i++; + + if(i == strLen || c[i] != '"') { + r = LN_WRONGPARSER; + goto fail; + } + + /* success, persist */ + *parsed = i + 1 - *offs; /* "eat" terminal double quote */ + /* create JSON value to save quoted string contents */ + CHKN(cstr = strndup((char*)c + *offs + 1, *parsed - 2)); + CHKN(*value = json_object_new_string(cstr)); + free(cstr); + +ENDParser + + +/** + * Parse an ISO date, that is YYYY-MM-DD (exactly this format). + * Note: we do manual loop unrolling -- this is fast AND efficient. + * rgerhards, 2011-01-14 + */ +BEGINParser(ISODate) + const char *c; + size_t i; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + c = str; + i = *offs; + + if(*offs+10 > strLen) + goto fail; /* if it is not 10 chars, it can't be an ISO date */ + + /* year */ + if(!isdigit(c[i])) goto fail; + if(!isdigit(c[i+1])) goto fail; + if(!isdigit(c[i+2])) goto fail; + if(!isdigit(c[i+3])) goto fail; + if(c[i+4] != '-') goto fail; + /* month */ + if(c[i+5] == '0') { + if(c[i+6] < '1' || c[i+6] > '9') goto fail; + } else if(c[i+5] == '1') { + if(c[i+6] < '0' || c[i+6] > '2') goto fail; + } else { + goto fail; + } + if(c[i+7] != '-') goto fail; + /* day */ + if(c[i+8] == '0') { + if(c[i+9] < '1' || c[i+9] > '9') goto fail; + } else if(c[i+8] == '1' || c[i+8] == '2') { + if(!isdigit(c[i+9])) goto fail; + } else if(c[i+8] == '3') { + if(c[i+9] != '0' && c[i+9] != '1') goto fail; + } else { + goto fail; + } + + /* success, persist */ + *parsed = 10; + +ENDParser + +/** + * Parse a timestamp in 24hr format (exactly HH:MM:SS). + * Note: we do manual loop unrolling -- this is fast AND efficient. + * rgerhards, 2011-01-14 + */ +BEGINParser(Time24hr) + const char *c; + size_t i; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + c = str; + i = *offs; + + if(*offs+8 > strLen) + goto fail; /* if it is not 8 chars, it can't be us */ + + /* hour */ + if(c[i] == '0' || c[i] == '1') { + if(!isdigit(c[i+1])) goto fail; + } else if(c[i] == '2') { + if(c[i+1] < '0' || c[i+1] > '3') goto fail; + } else { + goto fail; + } + /* TODO: the code below is a duplicate of 24hr parser - create common function */ + if(c[i+2] != ':') goto fail; + if(c[i+3] < '0' || c[i+3] > '5') goto fail; + if(!isdigit(c[i+4])) goto fail; + if(c[i+5] != ':') goto fail; + if(c[i+6] < '0' || c[i+6] > '5') goto fail; + if(!isdigit(c[i+7])) goto fail; + + /* success, persist */ + *parsed = 8; + +ENDParser + +/** + * Parse a timestamp in 12hr format (exactly HH:MM:SS). + * Note: we do manual loop unrolling -- this is fast AND efficient. + * TODO: the code below is a duplicate of 24hr parser - create common function? + * rgerhards, 2011-01-14 + */ +BEGINParser(Time12hr) + const char *c; + size_t i; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + c = str; + i = *offs; + + if(*offs+8 > strLen) + goto fail; /* if it is not 8 chars, it can't be us */ + + /* hour */ + if(c[i] == '0') { + if(!isdigit(c[i+1])) goto fail; + } else if(c[i] == '1') { + if(c[i+1] < '0' || c[i+1] > '2') goto fail; + } else { + goto fail; + } + if(c[i+2] != ':') goto fail; + if(c[i+3] < '0' || c[i+3] > '5') goto fail; + if(!isdigit(c[i+4])) goto fail; + if(c[i+5] != ':') goto fail; + if(c[i+6] < '0' || c[i+6] > '5') goto fail; + if(!isdigit(c[i+7])) goto fail; + + /* success, persist */ + *parsed = 8; + +ENDParser + + + + +/* helper to IPv4 address parser, checks the next set of numbers. + * Syntax 1 to 3 digits, value together not larger than 255. + * @param[in] str parse buffer + * @param[in/out] offs offset into buffer, updated if successful + * @return 0 if OK, 1 otherwise + */ +static int +chkIPv4AddrByte(const char *str, size_t strLen, size_t *offs) +{ + int val = 0; + int r = 1; /* default: fail -- simplifies things */ + const char *c; + size_t i = *offs; + + c = str; + if(i == strLen || !isdigit(c[i])) + goto fail; + val = c[i++] - '0'; + if(i < strLen && isdigit(c[i])) { + val = val * 10 + c[i++] - '0'; + if(i < strLen && isdigit(c[i])) + val = val * 10 + c[i++] - '0'; + } + if(val > 255) /* cannot be a valid IP address byte! */ + goto fail; + + *offs = i; + r = 0; +fail: + return r; +} + +/** + * Parser for IPv4 addresses. + */ +BEGINParser(IPv4) + const char *c; + size_t i; + + assert(str != NULL); + assert(offs != NULL); + assert(parsed != NULL); + i = *offs; + if(i + 7 > strLen) { + /* IPv4 addr requires at least 7 characters */ + goto fail; + } + c = str; + + /* byte 1*/ + if(chkIPv4AddrByte(str, strLen, &i) != 0) goto fail; + if(i == strLen || c[i++] != '.') goto fail; + /* byte 2*/ + if(chkIPv4AddrByte(str, strLen, &i) != 0) goto fail; + if(i == strLen || c[i++] != '.') goto fail; + /* byte 3*/ + if(chkIPv4AddrByte(str, strLen, &i) != 0) goto fail; + if(i == strLen || c[i++] != '.') goto fail; + /* byte 4 - we do NOT need any char behind it! */ + if(chkIPv4AddrByte(str, strLen, &i) != 0) goto fail; + + /* if we reach this point, we found a valid IP address */ + *parsed = i - *offs; + +ENDParser diff -Nru liblognorm-0.3.7/src/parser.h liblognorm-1.0.1/src/parser.h --- liblognorm-0.3.7/src/parser.h 1970-01-01 00:00:00.000000000 +0000 +++ liblognorm-1.0.1/src/parser.h 2014-01-22 15:52:06.000000000 +0000 @@ -0,0 +1,105 @@ +/* + * + * liblognorm - a fast samples-based log normalization library + * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * + * This file is part of liblognorm. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + * + * A copy of the LGPL v2.1 can be found in the file "COPYING" in this distribution. + */ +#ifndef LIBLOGNORM_PARSER_H_INCLUDED +#define LIBLOGNORM_PARSER_H_INCLUDED + +/** + * Parser interface. + * @param[in] str input string + * @param[in] offs offset where parsing has to start inside str. + * @param[in] ed string with extra data (if needed) + * @param[out] parsed int number of characters consumed by the parser. + * @return 0 on success, something else otherwise + */ + + +/** + * Parser for RFC5424 date. + */ +int ln_parseRFC5424Date(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + +/** + * Parser for RFC3164 date. + */ +int ln_parseRFC3164Date(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + +/** + * Parser for numbers. + */ +int ln_parseNumber(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + + +/** + * Parser for Words (SP-terminated strings). + */ +int ln_parseWord(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + + +/** + * Parse everything up to a specific character. + */ +int ln_parseCharTo(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + +/** + * Parse everything up to a specific character (relaxed constraints, suitable for CSV) + */ +int ln_parseCharSeparated(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + + +/** + * Get everything till the rest of string. + */ +int ln_parseRest(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + + +/** + * Parse a quoted string. + */ +int ln_parseQuotedString(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + +/** + * Parse an ISO date. + */ +int ln_parseISODate(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + + +/** + * Parse a timestamp in 12hr format. + */ +int ln_parseTime12hr(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + + +/** + * Parse a timestamp in 24hr format. + */ +int ln_parseTime24hr(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + +/** + * Parser for IPv4 addresses. + */ +int ln_parseIPv4(const char *str, size_t strlen, size_t *offs, es_str_t *ed, size_t *parsed, struct json_object **value); + +#endif /* #ifndef LIBLOGNORM_PARSER_H_INCLUDED */ diff -Nru liblognorm-0.3.7/src/ptree.c liblognorm-1.0.1/src/ptree.c --- liblognorm-0.3.7/src/ptree.c 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/src/ptree.c 2014-04-11 04:24:50.000000000 +0000 @@ -5,6 +5,8 @@ *//* * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * * This file is part of liblognorm. * * This library is free software; you can redistribute it and/or @@ -31,6 +33,7 @@ #include #include #include +#include #include "liblognorm.h" #include "lognorm.h" @@ -38,6 +41,7 @@ #include "ptree.h" #include "annot.h" #include "internal.h" +#include "parser.h" /** * Get base addr of common prefix. Takes length of prefix in account @@ -65,26 +69,30 @@ done: return tree; } +static void +ln_deletePTreeNode(ln_fieldList_t *node) +{ + ln_deletePTree(node->subtree); + es_deleteStr(node->name); + if(node->data != NULL) + es_deleteStr(node->data); + free(node); +} void ln_deletePTree(struct ln_ptree *tree) { - ln_fieldList_t *node, *nodeDel; - es_size_t i; + ln_fieldList_t *node, *nextnode; + size_t i; if(tree == NULL) goto done; if(tree->tags != NULL) - ee_deleteTagbucket(tree->tags); - for(node = tree->froot ; node != NULL ; ) { - ln_deletePTree(node->subtree); - nodeDel = node; - es_deleteStr(node->name); - if(node->data != NULL) - es_deleteStr(node->data); - node = node->next; - free(nodeDel); + json_object_put(tree->tags); + for(node = tree->froot; node != NULL; node = nextnode) { + nextnode = node->next; + ln_deletePTreeNode(node); } /* need to free a large prefix buffer? */ @@ -100,48 +108,16 @@ } -struct ln_ptree* -ln_traversePTree(struct ln_ptree *subtree, es_str_t *str, es_size_t *parsedTo) -{ - es_size_t i = 0; - unsigned char *c; - struct ln_ptree *curr = subtree; - struct ln_ptree *prev = NULL; - - ln_dbgprintf(subtree->ctx, "traversePTree: begin at %p", curr); - c = es_getBufAddr(str); - while(curr != NULL && i < es_strlen(str)) { - // TODO: implement commonPrefix - ln_dbgprintf(subtree->ctx, "traversePTree: curr %p, char '%u'", curr, c[i]); - prev = curr; - curr = curr->subtree[c[i++]]; - }; - ln_dbgprintf(subtree->ctx, "traversePTree: after search %p", curr); - - if(curr == NULL) { - curr = prev; - } - - if(i == es_strlen(str)) - --i; - - *parsedTo = i; - ln_dbgprintf(subtree->ctx, "traversePTree: returns node %p, offset %u", curr, (unsigned) i); - return curr; -} - - - /** * Set the common prefix inside a note, taking into account the subtle * issues associated with it. * @return 0 on success, something else otherwise */ static int -setPrefix(struct ln_ptree *tree, unsigned char *buf, es_size_t lenBuf, es_size_t offs) +setPrefix(struct ln_ptree *tree, unsigned char *buf, size_t lenBuf, size_t offs) { int r; -ln_dbgprintf(tree->ctx, "setPrefix lenBuf %u, offs %d", lenBuf, offs); +ln_dbgprintf(tree->ctx, "setPrefix lenBuf %zu, offs %zu", lenBuf, offs); tree->lenPrefix = lenBuf - offs; if(tree->lenPrefix > sizeof(tree->prefix)) { /* too-large for standard buffer, need to alloc one */ @@ -186,23 +162,24 @@ /** * Check if the provided tree is a true leaf. This means that it - * does not contain any subtrees of any kind and no prefix. + * does not contain any subtrees of any kind and no prefix, + * and it is not terminal leaf. * @return 1 if it is a leaf, 0 otherwise */ static inline int isTrueLeaf(struct ln_ptree *tree) { - return((tree->lenPrefix == 0) && isLeaf(tree)); + return((tree->lenPrefix == 0) && isLeaf(tree)) && !tree->flags.isTerminal; } struct ln_ptree * -ln_addPTree(struct ln_ptree *tree, es_str_t *str, es_size_t offs) +ln_addPTree(struct ln_ptree *tree, es_str_t *str, size_t offs) { struct ln_ptree *r; struct ln_ptree **parentptr; /**< pointer in parent that needs to be updated */ -ln_dbgprintf(tree->ctx, "addPTree: offs %u", offs); +ln_dbgprintf(tree->ctx, "addPTree: offs %zu", offs); parentptr = &(tree->subtree[es_getBufAddr(str)[offs]]); /* First check if tree node is totaly empty. If so, we can simply add * the prefix to this node. This case is important, because it happens @@ -218,9 +195,9 @@ } if(tree->ctx->debug) { - char * cstr = es_str2cstr(str, NULL); - ln_dbgprintf(tree->ctx, "addPTree: add '%s', offs %u, tree %p", - cstr+offs, (unsigned) offs, tree); + char *cstr = es_str2cstr(str, NULL); + ln_dbgprintf(tree->ctx, "addPTree: add '%s', offs %zu, tree %p", + cstr + offs, offs, tree); free(cstr); } @@ -307,16 +284,16 @@ struct ln_ptree * -ln_buildPTree(struct ln_ptree *tree, es_str_t *str, es_size_t offs) +ln_buildPTree(struct ln_ptree *tree, es_str_t *str, size_t offs) { struct ln_ptree *r; unsigned char *c; unsigned char *cpfix; - es_size_t i; + size_t i; unsigned short ipfix; assert(tree != NULL); - ln_dbgprintf(tree->ctx, "buildPTree: begin at %p, offs %u", tree, offs); + ln_dbgprintf(tree->ctx, "buildPTree: begin at %p, offs %zu", tree, offs); c = es_getBufAddr(str); /* check if the prefix matches and, if not, at what offset it is different */ @@ -326,7 +303,7 @@ ; (i < es_strlen(str)) && (ipfix < tree->lenPrefix) && (c[i] == cpfix[ipfix]) ; ++i, ++ipfix) { ; /*DO NOTHING - just find end of match */ - ln_dbgprintf(tree->ctx, "buildPTree: tree %p, i %d, char '%c'", tree, (int)i, c[i]); + ln_dbgprintf(tree->ctx, "buildPTree: tree %p, i %zu, char '%c'", tree, i, c[i]); } /* if we reach this point, we have processed as much of the common prefix @@ -345,11 +322,11 @@ r = splitTree(tree, ipfix); } } else if(ipfix < tree->lenPrefix) { - ln_dbgprintf(tree->ctx, "case 2, i=%u, ipfix=%u", i, ipfix); + ln_dbgprintf(tree->ctx, "case 2, i=%zu, ipfix=%u", i, ipfix); /* we need to split the node at the current position */ if((r = splitTree(tree, ipfix)) == NULL) goto done; /* fail */ -ln_dbgprintf(tree->ctx, "pre addPTree: i %u", i); +ln_dbgprintf(tree->ctx, "pre addPTree: i %zu", i); if((r = ln_addPTree(r, str, i)) == NULL) goto done; //r = ln_buildPTree(r, str, i + 1); @@ -394,8 +371,13 @@ * TODO: optimized, check logic */ for(curr = (*tree)->froot ; curr != NULL ; curr = curr->next) { - if(!es_strcmp(curr->name, node->name)) { + if(!es_strcmp(curr->name, node->name) + && curr->parser == node->parser + && ((curr->data == NULL && node->data == NULL) + || (curr->data != NULL && node->data != NULL + && !es_strcmp(curr->data, node->data)))) { *tree = curr->subtree; + ln_deletePTreeNode(node); r = 0; ln_dbgprintf((*tree)->ctx, "merging with tree %p\n", *tree); goto done; @@ -477,7 +459,7 @@ { char buf[64]; int i; - i = snprintf(buf, sizeof(buf), "%llu", (unsigned long long) p); + i = snprintf(buf, sizeof(buf), "%p", p); es_addBuf(str, buf, i); } /** @@ -538,54 +520,32 @@ } -/* TODO: Move to a better location? */ - -static inline int -addField(ln_ctx ctx, struct ee_event **event, es_str_t *name, struct ee_value *value) -{ - int r; - struct ee_field *field; - - if(*event == NULL) { - CHKN(*event = ee_newEvent(ctx->eectx)); - } - - CHKN(field = ee_newField(ctx->eectx)); - CHKR(ee_nameField(field, name)); - CHKR(ee_addValueToField(field, value)); - CHKR(ee_addFieldToEvent(*event, field)); - r = 0; - -done: return r; -} - - /** * add unparsed string to event. */ static inline int -addUnparsedField(ln_ctx ctx, es_str_t *str, es_size_t offs, struct ee_event **event) +addUnparsedField(const char *str, size_t strLen, int offs, struct json_object *json) { - struct ee_value *value; - es_str_t *namestr; - es_str_t *valstr; - int r; + int r = 1; + struct json_object *value; + char *s = NULL; + CHKN(s = strndup(str, strLen)); + value = json_object_new_string(s); + if (value == NULL) { + goto done; + } + json_object_object_add(json, "originalmsg", value); + + value = json_object_new_string(s + offs); + if (value == NULL) { + goto done; + } + json_object_object_add(json, "unparsed-data", value); - CHKN(value = ee_newValue(ctx->eectx)); - CHKN(namestr = es_newStrFromCStr("originalmsg", sizeof("originalmsg") - 1)); - CHKN(valstr = es_strdup(str)); - ee_setStrValue(value, valstr); - addField(ctx, event, namestr, value); - es_deleteStr(namestr); - - CHKN(value = ee_newValue(ctx->eectx)); - CHKN(namestr = es_newStrFromCStr("unparsed-data", sizeof("unparsed-data") - 1)); - CHKN(valstr = es_newStrFromSubStr(str, offs, es_strlen(str) - offs)); - ee_setStrValue(value, valstr); - addField(ctx, event, namestr, value); - es_deleteStr(namestr); r = 0; -done: return r; +done: + free(s); + return r; } @@ -597,65 +557,70 @@ * can otherwise not be processed by liblognorm in a meaningful way. * * @param[in] tree current tree to process - * @param[in] string string to be matched against (the to-be-normalized data) + * @param[in] str string to be matched against (the to-be-normalized data) + * @param[in] strLen length of str * @param[in/out] offs start position in input data, on exit first unparsed position * @param[in/out] event handle to event that is being created during normalization * * @return 0 if parser was successfully, something else on error */ static int -ln_iptablesParser(struct ln_ptree *tree, es_str_t *str, es_size_t *offs, - struct ee_event **event) +ln_iptablesParser(struct ln_ptree *tree, const char *str, size_t strLen, size_t *offs, + struct json_object *json) { int r; - es_size_t o = *offs; + size_t o = *offs; es_str_t *fname; es_str_t *fval; - struct ee_value *value; - unsigned char *pstr; - unsigned char *end; + const char *pstr; + const char *end; + struct json_object *value; -ln_dbgprintf(tree->ctx, "%d enter iptable parser, len %d", (int) *offs, (int) es_strlen(str)); - if(o == es_strlen(str)) { +ln_dbgprintf(tree->ctx, "%zu enter iptables parser, len %zu", *offs, strLen); + if(o == strLen) { r = -1; /* can not be, we have no n/v pairs! */ goto done; } - end = es_getBufAddr(str) + es_strlen(str); - pstr = es_getBufAddr(str) + o; + end = str + strLen; + pstr = str + o; while(pstr < end) { - while(isspace(*pstr)) + while(pstr < end && isspace(*pstr)) ++pstr; - fname = es_newStr(16); - while(!isspace(*pstr) && *pstr != '=') { + CHKN(fname = es_newStr(16)); + while(pstr < end && !isspace(*pstr) && *pstr != '=') { es_addChar(&fname, *pstr); ++pstr; } - if(*pstr == '=') { - fval = es_newStr(16); + if(pstr < end && *pstr == '=') { + CHKN(fval = es_newStr(16)); ++pstr; /* error on space */ - while(!isspace(*pstr) && pstr < end) { + while(pstr < end && !isspace(*pstr)) { es_addChar(&fval, *pstr); ++pstr; } } else { - fval = es_newStrFromCStr("[*PRESENT*]", sizeof("[*PRESENT*]")-1); + CHKN(fval = es_newStrFromCStr("[*PRESENT*]", + sizeof("[*PRESENT*]")-1)); } char *cn, *cv; - cn = es_str2cstr(fname, NULL); - cv = es_str2cstr(fval, NULL); -ln_dbgprintf(tree->ctx, "iptable parser extracts %s=%s", cn, cv); - value = ee_newValue(tree->ctx->eectx); - ee_setStrValue(value, fval); - CHKR(addField(tree->ctx, event, fname, value)); + CHKN(cn = ln_es_str2cstr(&fname)); + CHKN(cv = ln_es_str2cstr(&fval)); + if (tree->ctx->debug) { + ln_dbgprintf(tree->ctx, "iptables parser extracts %s=%s", cn, cv); + } + CHKN(value = json_object_new_string(cv)); + json_object_object_add(json, cn, value); + es_deleteStr(fval); + es_deleteStr(fname); } r = 0; - *offs = es_strlen(str); + *offs = strLen; done: - ln_dbgprintf(tree->ctx, "%d iptable parser returns %d", (int) *offs, (int) r); + ln_dbgprintf(tree->ctx, "%zu iptables parser returns %d", *offs, r); return r; } @@ -676,34 +641,36 @@ * characters. */ static int -ln_normalizeRec(struct ln_ptree *tree, es_str_t *str, es_size_t offs, struct ee_event **event, +ln_normalizeRec(struct ln_ptree *tree, const char *str, size_t strLen, size_t offs, struct json_object *json, struct ln_ptree **endNode) { int r; int localR; - es_size_t i; + size_t i; int left; ln_fieldList_t *node; - struct ee_value *value; char *cstr; - unsigned char *c; + const char *c; unsigned char *cpfix; unsigned ipfix; + size_t parsed; + char *namestr; + struct json_object *value; - if(offs >= es_strlen(str)) { + if(offs >= strLen) { *endNode = tree; r = -tree->lenPrefix; goto done; } - c = es_getBufAddr(str); + c = str; cpfix = prefixBase(tree); node = tree->froot; - r = es_strlen(str) - offs; + r = strLen - offs; /* first we need to check if the common prefix matches (and consume input data while we do) */ ipfix = 0; - while(offs < es_strlen(str) && ipfix < tree->lenPrefix) { - ln_dbgprintf(tree->ctx, "%d: prefix compare '%c', '%c'", (int) offs, c[offs], cpfix[ipfix]); + while(offs < strLen && ipfix < tree->lenPrefix) { + ln_dbgprintf(tree->ctx, "%zu: prefix compare '%c', '%c'", offs, c[offs], cpfix[ipfix]); if(c[offs] != cpfix[ipfix]) { r -= ipfix; goto done; @@ -718,61 +685,73 @@ } r -= ipfix; - ln_dbgprintf(tree->ctx, "%d: prefix compare succeeded, still valid", (int) offs); - - if(offs == es_strlen(str)) { - *endNode = tree; - r = 0; - goto done; - } - + ln_dbgprintf(tree->ctx, "%zu: prefix compare succeeded, still valid", offs); /* now try the parsers */ while(node != NULL) { if(tree->ctx->debug) { cstr = es_str2cstr(node->name, NULL); - ln_dbgprintf(tree->ctx, "%d:trying parser for field '%s': %p", - (int) offs, cstr, node->parser); + ln_dbgprintf(tree->ctx, "%zu:trying parser for field '%s': %p", + offs, cstr, node->parser); free(cstr); } i = offs; if(node->isIPTables) { - localR = ln_iptablesParser(tree, str, &i, event); - ln_dbgprintf(tree->ctx, "%d iptables parser return, i=%d", - (int) offs, (int)i); + localR = ln_iptablesParser(tree, str, strLen, &i, json); + ln_dbgprintf(tree->ctx, "%zu iptables parser return, i=%zu", + offs, i); if(localR == 0) { /* potential hit, need to verify */ ln_dbgprintf(tree->ctx, "potential hit, trying subtree"); - left = ln_normalizeRec(node->subtree, str, i, event, endNode); + left = ln_normalizeRec(node->subtree, str, strLen, i, json, endNode); if(left == 0 && (*endNode)->flags.isTerminal) { - ln_dbgprintf(tree->ctx, "%d: parser matches at %d", (int) offs, (int)i); + ln_dbgprintf(tree->ctx, "%zu: parser matches at %zu", offs, i); r = 0; goto done; } - ln_dbgprintf(tree->ctx, "%d nonmatch, backtracking required, left=%d", - (int) offs, (int)left); + ln_dbgprintf(tree->ctx, "%zu nonmatch, backtracking required, left=%d", + offs, left); if(left < r) r = left; } } else { - localR = node->parser(tree->ctx->eectx, str, &i, node->data, &value); + value = NULL; + localR = node->parser(str, strLen, &i, node->data, &parsed, &value); + ln_dbgprintf(tree->ctx, "parser returns %d, parsed %zu", localR, parsed); if(localR == 0) { /* potential hit, need to verify */ ln_dbgprintf(tree->ctx, "potential hit, trying subtree"); - left = ln_normalizeRec(node->subtree, str, i, event, endNode); + left = ln_normalizeRec(node->subtree, str, strLen, i + parsed, json, endNode); if(left == 0 && (*endNode)->flags.isTerminal) { - ln_dbgprintf(tree->ctx, "%d: parser matches at %d", (int) offs, (int)i); - if(!es_strbufcmp(node->name, (unsigned char*)"-", 1)) - ee_deleteValue(value); /* filler, discard */ - else - CHKR(addField(tree->ctx, event, node->name, value)); + ln_dbgprintf(tree->ctx, "%zu: parser matches at %zu", offs, i); + if(es_strbufcmp(node->name, (unsigned char*)"-", 1)) { + /* Store the value here; create json if not already created */ + if (value == NULL) { + CHKN(cstr = strndup(str + i, parsed)); + value = json_object_new_string(cstr); + free(cstr); + } + if (value == NULL) { + ln_dbgprintf(tree->ctx, "unable to create json"); + goto done; + } + namestr = ln_es_str2cstr(&node->name); + json_object_object_add(json, namestr, value); + } else { + if (value != NULL) { + /* Free the unneeded value */ + json_object_put(value); + } + } r = 0; goto done; - } else { - ee_deleteValue(value); /* was created, now not needed */ } - ln_dbgprintf(tree->ctx, "%d nonmatch, backtracking required, left=%d", - (int) offs, (int)left); + ln_dbgprintf(tree->ctx, "%zu nonmatch, backtracking required, left=%d", + offs, left); + if (value != NULL) { + /* Free the value if it was created */ + json_object_put(value); + } if(left < r) r = left; } @@ -780,34 +759,44 @@ node = node->next; } -if(offs < es_strlen(str)) { -unsigned char cc = es_getBufAddr(str)[offs]; -ln_dbgprintf(tree->ctx, "%u no field, trying subtree char '%c': %p", offs, cc, tree->subtree[cc]); + if(offs == strLen) { + *endNode = tree; + r = 0; + goto done; + } + +if(offs < strLen) { +unsigned char cc = str[offs]; +ln_dbgprintf(tree->ctx, "%zu no field, trying subtree char '%c': %p", offs, cc, tree->subtree[cc]); } else { -ln_dbgprintf(tree->ctx, "%u no field, offset already beyond end", offs); +ln_dbgprintf(tree->ctx, "%zu no field, offset already beyond end", offs); } /* now let's see if we have a literal */ - if(tree->subtree[es_getBufAddr(str)[offs]] != NULL) { - left = ln_normalizeRec(tree->subtree[es_getBufAddr(str)[offs]], - str, offs + 1, event, endNode); + if(tree->subtree[(int)str[offs]] != NULL) { + left = ln_normalizeRec(tree->subtree[(int)str[offs]], + str, strLen, offs + 1, json, endNode); if(left < r) r = left; } done: - ln_dbgprintf(tree->ctx, "%d returns %d", (int) offs, (int) r); + ln_dbgprintf(tree->ctx, "%zu returns %d", offs, r); return r; } int -ln_normalize(ln_ctx ctx, es_str_t *str, struct ee_event **event) +ln_normalize(ln_ctx ctx, const char *str, size_t strLen, struct json_object **json_p) { int r; int left; - struct ln_ptree *endNode; + struct ln_ptree *endNode = NULL; - left = ln_normalizeRec(ctx->ptree, str, 0, event, &endNode); + if(*json_p == NULL) { + CHKN(*json_p = json_object_new_object()); + } + + left = ln_normalizeRec(ctx->ptree, str, strLen, 0, *json_p, &endNode); if(ctx->debug) { if(left == 0) { @@ -822,18 +811,17 @@ if(left != 0 || !endNode->flags.isTerminal) { /* we could not successfully parse, some unparsed items left */ if(left < 0) { - addUnparsedField(ctx, str, es_strlen(str), event); + addUnparsedField(str, strLen, strLen, *json_p); } else { - addUnparsedField(ctx, str, es_strlen(str) - left, event); + addUnparsedField(str, strLen, strLen - left, *json_p); } } else { /* success, finalize event */ if(endNode->tags != NULL) { - if(*event == NULL) { - CHKN(*event = ee_newEvent(ctx->eectx)); - } - CHKR(ee_assignTagbucketToEvent(*event, ee_addRefTagbucket(endNode->tags))); - CHKR(ln_annotateEvent(ctx, *event)); + /* add tags to an event */ + json_object_get(endNode->tags); + json_object_object_add(*json_p, "event.tags", endNode->tags); + CHKR(ln_annotate(ctx, *json_p, endNode->tags)); } } diff -Nru liblognorm-0.3.7/src/ptree.h liblognorm-1.0.1/src/ptree.h --- liblognorm-0.3.7/src/ptree.h 2012-04-04 09:11:54.000000000 +0000 +++ liblognorm-1.0.1/src/ptree.h 2014-01-22 15:52:06.000000000 +0000 @@ -3,7 +3,9 @@ * @brief The parse tree object. * @class ln_ptree ptree.h *//* - * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. + * Copyright 2013 by Rainer Gerhards and Adiscon GmbH. + * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 * * This file is meant to be included by applications using liblognorm. * For lognorm library files themselves, include "lognorm.h". @@ -29,7 +31,6 @@ #ifndef LIBLOGNORM_PTREE_H_INCLUDED #define LIBLOGNORM_PTREE_H_INCLUDED #include -#include typedef struct ln_ptree ln_ptree; /**< the parse tree object */ typedef struct ln_fieldList_s ln_fieldList_t; @@ -44,12 +45,13 @@ * optimize it so that frequently used fields are moved "up" towards * the root of the list. In any case, we do NOT expect this list to * be long, as the parser should already have gotten quite specific when - * we hit a field. + * we hit a fieldconst . */ struct ln_fieldList_s { es_str_t *name; /**< field name */ es_str_t *data; /**< extra data to be passed to parser */ - int (*parser)(ee_ctx, es_str_t*, es_size_t*, es_str_t*, struct ee_value**); + int (*parser)(const char*, size_t, size_t*, es_str_t*, size_t*, + struct json_object **); /**< parser to use */ ln_ptree *subtree; /**< subtree to follow if parser succeeded */ ln_fieldList_t *next; /**< list housekeeping, next node (or NULL) */ @@ -68,7 +70,7 @@ struct { unsigned isTerminal:1; /**< designates this node a terminal sequence? */ } flags; - struct ee_tagbucket *tags; /* tags to assign to events of this type */ + struct json_object *tags; /* tags to assign to events of this type */ /* the respresentation below requires a lof of memory but is * very fast. As an alternate approach, we can use a hash table * where we ignore control characters. That should work quite well. @@ -123,30 +125,6 @@ /** - * Traverse a (sub) tree according to a string. - * - * This functions traverses the provided tree according to the - * provided string. It navigates to the deepest node possible. - * Then, it returns this node as well as the position until which - * the string could be parsed. If there is no match at all, - * NULL is returned instead of a tree node. Note that this is - * different from the case where the root of the subtree is - * returned. In that case, there was at least a single match - * inside that root. - * @memberof ln_ptree - * - * @param[in] subtree root of subtree to traverse - * @param[in] str string to parse - * @param[in/out] parsedTo on entry: start position within string, - * on exist position of first unmatched byte - * - * @return pointer to found tree node or NULL if there was no match at all - */ -struct ln_ptree* ln_traversePTree(struct ln_ptree *subtree, - es_str_t *str, es_size_t *parsedTo); - - -/** * Add a literal to a ptree. * Creates new tree nodes as necessary. * @memberof ln_ptree @@ -158,7 +136,7 @@ * @return NULL on error, otherwise pointer to deepest tree added */ struct ln_ptree* -ln_addPTree(struct ln_ptree *tree, es_str_t *str, es_size_t offs); +ln_addPTree(struct ln_ptree *tree, es_str_t *str, size_t offs); /** @@ -202,6 +180,6 @@ * @return NULL on error, otherwise the ptree leaf that * corresponds to the parameters passed. */ -struct ln_ptree * ln_buildPTree(struct ln_ptree *tree, es_str_t *str, es_size_t offs); +struct ln_ptree * ln_buildPTree(struct ln_ptree *tree, es_str_t *str, size_t offs); #endif /* #ifndef LOGNORM_PTREE_H_INCLUDED */ diff -Nru liblognorm-0.3.7/src/samp.c liblognorm-1.0.1/src/samp.c --- liblognorm-0.3.7/src/samp.c 2013-07-18 07:37:17.000000000 +0000 +++ liblognorm-1.0.1/src/samp.c 2014-01-22 15:52:06.000000000 +0000 @@ -2,6 +2,8 @@ * * Copyright 2010 by Rainer Gerhards and Adiscon GmbH. * + * Modified by Pavel Levshin (pavel@levshin.spb.ru) in 2013 + * * This file is part of liblognorm. * * This library is free software; you can redistribute it and/or @@ -27,16 +29,15 @@ #include #include #include -#include -#include #include "liblognorm.h" #include "lognorm.h" #include "samp.h" #include "internal.h" +#include "parser.h" struct ln_sampRepos* -ln_sampOpen(ln_ctx __attribute((unused)) ctx, char *name) +ln_sampOpen(ln_ctx __attribute((unused)) ctx, const char *name) { struct ln_sampRepos *repo = NULL; FILE *fp; @@ -162,29 +163,34 @@ node->isIPTables = 0; /* first assume no special parser is used */ if(!es_strconstcmp(*str, "date-rfc3164")) { - node->parser = ee_parseRFC3164Date; + node->parser = ln_parseRFC3164Date; } else if(!es_strconstcmp(*str, "date-rfc5424")) { - node->parser = ee_parseRFC5424Date; + node->parser = ln_parseRFC5424Date; } else if(!es_strconstcmp(*str, "number")) { - node->parser = ee_parseNumber; + node->parser = ln_parseNumber; } else if(!es_strconstcmp(*str, "ipv4")) { - node->parser = ee_parseIPv4; + node->parser = ln_parseIPv4; } else if(!es_strconstcmp(*str, "word")) { - node->parser = ee_parseWord; + node->parser = ln_parseWord; + } else if(!es_strconstcmp(*str, "rest")) { + node->parser = ln_parseRest; } else if(!es_strconstcmp(*str, "quoted-string")) { - node->parser = ee_parseQuotedString; + node->parser = ln_parseQuotedString; } else if(!es_strconstcmp(*str, "date-iso")) { - node->parser = ee_parseISODate; + node->parser = ln_parseISODate; } else if(!es_strconstcmp(*str, "time-24hr")) { - node->parser = ee_parseTime24hr; + node->parser = ln_parseTime24hr; } else if(!es_strconstcmp(*str, "time-12hr")) { - node->parser = ee_parseTime12hr; + node->parser = ln_parseTime12hr; } else if(!es_strconstcmp(*str, "iptables")) { node->parser = NULL; node->isIPTables = 1; } else if(!es_strconstcmp(*str, "char-to")) { // TODO: check extra data!!!! (very important) - node->parser = ee_parseCharTo; + node->parser = ln_parseCharTo; + } else if(!es_strconstcmp(*str, "char-sep")) { + // TODO: check extra data!!!! (very important) + node->parser = ln_parseCharSeparated; } else { cstr = es_str2cstr(*str, NULL); ln_dbgprintf(ctx, "ERROR: invalid field type '%s'", cstr); @@ -201,9 +207,7 @@ while(i < lenBuf) { if(buf[i] == '%') { ++i; - if(i == lenBuf || buf[i] != '%') { - break; /* end of field */ - } + break; /* end of field */ } CHKR(es_addChar(&node->data, buf[i++])); } @@ -260,8 +264,6 @@ CHKR(es_addChar(str, buf[i])); ++i; } - if(es_strlen(*str) == 0) - goto done; es_unescapeStr(*str); if(ctx->debug) { @@ -293,7 +295,7 @@ * @returns the new subtree root (or NULL in case of error) */ static inline int -addSampToTree(ln_ctx ctx, es_str_t *rule, struct ee_tagbucket *tagBucket) +addSampToTree(ln_ctx ctx, es_str_t *rule, struct json_object *tagBucket) { int r; struct ln_ptree* subtree; @@ -304,14 +306,19 @@ CHKN(str = es_newStr(256)); i = 0; while(i < es_strlen(rule)) { -ln_dbgprintf(ctx, "addSampToTree %d of %d", i, es_strlen(rule)); + ln_dbgprintf(ctx, "addSampToTree %d of %d", i, es_strlen(rule)); CHKR(parseLiteral(ctx, &subtree, rule, &i, &str)); - if(es_strlen(str) == 0) { - /* we had no literal, so let's parse a field description */ + /* After the literal there can be field only*/ + if (i < es_strlen(rule)) { CHKR(parseFieldDescr(ctx, &subtree, rule, &i, &str)); + if (i == es_strlen(rule)) { + /* finish the tree with empty literal to avoid false merging*/ + CHKR(parseLiteral(ctx, &subtree, rule, &i, &str)); + } } } -ln_dbgprintf(ctx, "end addSampToTree %d of %d", i, es_strlen(rule)); + + ln_dbgprintf(ctx, "end addSampToTree %d of %d", i, es_strlen(rule)); /* we are at the end of rule processing, so this node is a terminal */ subtree->flags.isTerminal = 1; subtree->tags = tagBucket; @@ -334,7 +341,7 @@ * @returns 0 on success, something else otherwise */ static inline int -getLineType(char *buf, es_size_t lenBuf, es_size_t *offs, es_str_t **str) +getLineType(const char *buf, es_size_t lenBuf, es_size_t *offs, es_str_t **str) { int r; es_size_t i; @@ -364,7 +371,7 @@ * @returns 0 on success, something else otherwise */ static inline int -getPrefix(char *buf, es_size_t lenBuf, es_size_t offs, es_str_t **str) +getPrefix(const char *buf, es_size_t lenBuf, es_size_t offs, es_str_t **str) { int r; @@ -374,7 +381,7 @@ es_emptyStr(*str); } - r = es_addBuf(str, buf + offs, lenBuf - offs); + r = es_addBuf(str, (char*)buf + offs, lenBuf - offs); done: return r; } @@ -390,9 +397,9 @@ * @returns 0 on success, something else otherwise */ static inline int -extendPrefix(ln_ctx ctx, char *buf, es_size_t lenBuf, es_size_t offs) +extendPrefix(ln_ctx ctx, const char *buf, es_size_t lenBuf, es_size_t offs) { - return es_addBuf(&ctx->rulePrefix, buf+offs, lenBuf - offs); + return es_addBuf(&ctx->rulePrefix, (char*)buf+offs, lenBuf - offs); } @@ -405,18 +412,20 @@ * @returns 0 on success, something else otherwise */ static inline int -addTagStrToBucket(ln_ctx ctx, es_str_t *tagname, struct ee_tagbucket **tagBucket) +addTagStrToBucket(ln_ctx ctx, es_str_t *tagname, struct json_object **tagBucket) { int r = -1; char *cstr; + struct json_object *tag; if(*tagBucket == NULL) { - CHKN(*tagBucket = ee_newTagbucket(ctx->eectx)); + CHKN(*tagBucket = json_object_new_array()); } cstr = es_str2cstr(tagname, NULL); ln_dbgprintf(ctx, "tag found: '%s'", cstr); + CHKN(tag = json_object_new_string(cstr)); + json_object_array_add(*tagBucket, tag); free(cstr); - CHKR(ee_addTagToBucket(*tagBucket, tagname)); r = 0; done: return r; @@ -436,7 +445,7 @@ * @returns 0 on success, something else otherwise */ static inline int -processTags(ln_ctx ctx, char *buf, es_size_t lenBuf, es_size_t *poffs, struct ee_tagbucket **tagBucket) +processTags(ln_ctx ctx, const char *buf, es_size_t lenBuf, es_size_t *poffs, struct json_object **tagBucket) { int r = -1; es_str_t *str = NULL; @@ -448,6 +457,7 @@ if(buf[i] == ',') { /* end of this tag */ CHKR(addTagStrToBucket(ctx, str, tagBucket)); + es_deleteStr(str); str = NULL; } else { if(str == NULL) { @@ -464,6 +474,7 @@ if(str != NULL) { CHKR(addTagStrToBucket(ctx, str, tagBucket)); + es_deleteStr(str); } *poffs = i; @@ -483,11 +494,11 @@ * @returns 0 on success, something else otherwise */ static inline int -processRule(ln_ctx ctx, char *buf, es_size_t lenBuf, es_size_t offs) +processRule(ln_ctx ctx, const char *buf, es_size_t lenBuf, es_size_t offs) { int r = -1; es_str_t *str; - struct ee_tagbucket *tagBucket = NULL; + struct json_object *tagBucket = NULL; ln_dbgprintf(ctx, "sample line to add: '%s'\n", buf+offs); CHKR(processTags(ctx, buf, lenBuf, &offs, &tagBucket)); @@ -502,7 +513,7 @@ } else { CHKN(str = es_strdup(ctx->rulePrefix)); } - CHKR(es_addBuf(&str, buf + offs, lenBuf - offs)); + CHKR(es_addBuf(&str, (char*)buf + offs, lenBuf - offs)); addSampToTree(ctx, str, tagBucket); es_deleteStr(str); r = 0; @@ -522,7 +533,7 @@ * @returns 0 on success, something else otherwise */ static inline int -getFieldName(ln_ctx __attribute__((unused)) ctx, char *buf, es_size_t lenBuf, es_size_t *offs, es_str_t **strTag) +getFieldName(ln_ctx __attribute__((unused)) ctx, const char *buf, es_size_t lenBuf, es_size_t *offs, es_str_t **strTag) { int r = -1; es_size_t i; @@ -552,7 +563,7 @@ * @param[in/out] offs on entry: offset first unprocessed position */ static inline void -skipWhitespace(ln_ctx __attribute__((unused)) ctx, char *buf, es_size_t lenBuf, es_size_t *offs) +skipWhitespace(ln_ctx __attribute__((unused)) ctx, const char *buf, es_size_t lenBuf, es_size_t *offs) { while(*offs < lenBuf && isspace(buf[*offs])) { (*offs)++; @@ -580,7 +591,7 @@ * @returns 0 on success, something else otherwise */ static inline int -getAnnotationOp(ln_ctx ctx, ln_annot *annot, char *buf, es_size_t lenBuf, es_size_t *offs) +getAnnotationOp(ln_ctx ctx, ln_annot *annot, const char *buf, es_size_t lenBuf, es_size_t *offs) { int r = -1; es_size_t i; @@ -642,7 +653,7 @@ * @returns 0 on success, something else otherwise */ static inline int -processAnnotate(ln_ctx ctx, char *buf, es_size_t lenBuf, es_size_t offs) +processAnnotate(ln_ctx ctx, const char *buf, es_size_t lenBuf, es_size_t offs) { int r; es_str_t *tag = NULL; @@ -651,7 +662,7 @@ ln_dbgprintf(ctx, "sample annotation to add: '%s'", buf+offs); CHKR(getFieldName(ctx, buf, lenBuf, &offs, &tag)); skipWhitespace(ctx, buf, lenBuf, &offs); - if(buf[offs] != ':') { + if(buf[offs] != ':' || tag == NULL) { ln_dbgprintf(ctx, "invalid tag field in annotation, line is '%s'", buf); r=-1; goto done; @@ -671,7 +682,7 @@ } struct ln_samp * -ln_processSamp(ln_ctx ctx, char *buf, es_size_t lenBuf) +ln_processSamp(ln_ctx ctx, const char *buf, es_size_t lenBuf) { struct ln_samp *samp = NULL; es_str_t *typeStr = NULL; diff -Nru liblognorm-0.3.7/src/samp.h liblognorm-1.0.1/src/samp.h --- liblognorm-0.3.7/src/samp.h 2013-07-18 07:37:17.000000000 +0000 +++ liblognorm-1.0.1/src/samp.h 2014-01-22 15:52:06.000000000 +0000 @@ -59,7 +59,7 @@ * @return repository object or NULL if failure */ struct ln_sampRepos * -ln_sampOpen(ln_ctx ctx, char *name); +ln_sampOpen(ln_ctx ctx, const char *name); /** @@ -86,7 +86,7 @@ * @return Newly create object or NULL if an error occured. */ struct ln_samp * -ln_processSamp(ln_ctx ctx, char *buf, es_size_t lenBuf); +ln_processSamp(ln_ctx ctx, const char *buf, es_size_t lenBuf); /**