# Configure script for Ganeti
m4_define([gnt_version_major], [2])
-m4_define([gnt_version_minor], [6])
-m4_define([gnt_version_revision], [1])
-m4_define([gnt_version_suffix], [])
+m4_define([gnt_version_minor], [7])
+m4_define([gnt_version_revision], [0])
+m4_define([gnt_version_suffix], [~beta1])
m4_define([gnt_version_full],
m4_format([%d.%d.%d%s],
gnt_version_major, gnt_version_minor,
[ssh_config_dir="/etc/ssh"])
AC_SUBST(SSH_CONFIG_DIR, $ssh_config_dir)
+# --with-xen-config-dir=...
+AC_ARG_WITH([xen-config-dir],
+ [AS_HELP_STRING([--with-xen-config-dir=DIR],
+ m4_normalize([Xen configuration directory
+ (default: /etc/xen)]))],
+ [xen_config_dir="$withval"],
+ [xen_config_dir=/etc/xen])
+AC_SUBST(XEN_CONFIG_DIR, $xen_config_dir)
+
# --with-os-search-path=...
# do a bit of black sed magic to for quoting of the strings in the list
AC_ARG_WITH([os-search-path],
[os_search_path="'/srv/ganeti/os'"])
AC_SUBST(OS_SEARCH_PATH, $os_search_path)
+# --with-extstorage-search-path=...
+# same black sed magic for quoting of the strings in the list
+AC_ARG_WITH([extstorage-search-path],
+ [AS_HELP_STRING([--with-extstorage-search-path=LIST],
+ [comma separated list of directories to]
+ [ search for External Storage Providers]
+ [ (default is /srv/ganeti/extstorage)]
+ )],
+ [es_search_path=`echo -n "$withval" | sed -e "s/\([[^,]]*\)/'\1'/g"`],
+ [es_search_path="'/srv/ganeti/extstorage'"])
+AC_SUBST(ES_SEARCH_PATH, $es_search_path)
+
# --with-iallocator-search-path=...
# do a bit of black sed magic to for quoting of the strings in the list
AC_ARG_WITH([iallocator-search-path],
then
AC_MSG_WARN(m4_normalize([sphinx-build not found, documentation rebuild will
not be possible]))
+else
+ # Sphinx exits with code 1 when it prints its usage
+ sphinxver=`{ $SPHINX --version 2>&1 || :; } | head -n 3`
+
+ if ! echo "$sphinxver" | grep -q -w -e '^Sphinx' -e '^Usage:'; then
+ AC_MSG_ERROR([Unable to determine Sphinx version])
+
+ # Note: Character classes ([...]) need to be double quoted due to autoconf
+ # using m4
+ elif ! echo "$sphinxver" | grep -q -E '^Sphinx[[[:space:]]]+v[[1-9]]\>'; then
+ AC_MSG_ERROR([Sphinx 1.0 or higher is required])
+ fi
+fi
+
+AC_ARG_ENABLE([manpages-in-doc],
+ [AS_HELP_STRING([--enable-manpages-in-doc],
+ m4_normalize([include man pages in HTML documentation
+ (requires sphinx; default disabled)]))],
+ [case "$enableval" in
+ yes) manpages_in_doc=yes ;;
+ no) manpages_in_doc= ;;
+ *)
+ AC_MSG_ERROR([Bad value $enableval for --enable-manpages-in-doc])
+ ;;
+ esac
+ ],
+ [manpages_in_doc=])
+AM_CONDITIONAL([MANPAGES_IN_DOC], [test -n "$manpages_in_doc"])
+AC_SUBST(MANPAGES_IN_DOC, $manpages_in_doc)
+
+if test -z "$SPHINX" -a -n "$manpages_in_doc"; then
+ AC_MSG_ERROR([Including man pages in HTML documentation requires sphinx])
fi
# Check for graphviz (dot)
AC_MSG_WARN([qemu-img not found, using ovfconverter will not be possible])
fi
-# --enable-htools-rapi
-HTOOLS_RAPI=
-AC_ARG_ENABLE([htools-rapi],
- [AS_HELP_STRING([--enable-htools-rapi],
- [enable use of curl in the Haskell code (default: check)])],
- [],
- [enable_htools_rapi=check])
-
# --enable-confd
ENABLE_CONFD=
AC_ARG_ENABLE([confd],
# check for modules, first custom/special checks
AC_MSG_NOTICE([checking for required haskell modules])
-HTOOLS_NOCURL=-DNO_CURL
-if test "$enable_htools_rapi" != no; then
- AC_GHC_PKG_CHECK([curl], [HTOOLS_NOCURL=], [])
- if test -n "$HTOOLS_NOCURL"; then
- if test "$enable_htools_rapi" = check; then
- AC_MSG_WARN(m4_normalize([The curl library was not found, Haskell
- code will be compiled without RAPI support]))
- else
- AC_MSG_FAILURE(m4_normalize([The curl library was not found, but it has
- been requested]))
- fi
- else
- AC_MSG_NOTICE([Enabling curl/RAPI/RPC usage in Haskell code])
- fi
-fi
-AC_SUBST(HTOOLS_NOCURL)
-
-HTOOLS_PARALLEL3=
-AC_GHC_PKG_CHECK([parallel-3.*], [HTOOLS_PARALLEL3=-DPARALLEL3],
+HS_PARALLEL3=
+AC_GHC_PKG_CHECK([parallel-3.*], [HS_PARALLEL3=-DPARALLEL3],
[AC_GHC_PKG_REQUIRE(parallel)], t)
-AC_SUBST(HTOOLS_PARALLEL3)
+AC_SUBST(HS_PARALLEL3)
# and now standard modules
+AC_GHC_PKG_REQUIRE(curl)
AC_GHC_PKG_REQUIRE(json)
AC_GHC_PKG_REQUIRE(network)
AC_GHC_PKG_REQUIRE(mtl)
AC_GHC_PKG_REQUIRE(utf8-string)
# extra modules for confd functionality
-HTOOLS_REGEX_PCRE=-DNO_REGEX_PCRE
+HS_REGEX_PCRE=-DNO_REGEX_PCRE
has_confd=False
if test "$enable_confd" != no; then
CONFD_PKG=
- AC_GHC_PKG_CHECK([regex-pcre], [HTOOLS_REGEX_PCRE=],
+ AC_GHC_PKG_CHECK([regex-pcre], [HS_REGEX_PCRE=],
[CONFD_PKG="$CONFD_PKG regex-pcre"])
AC_GHC_PKG_CHECK([hslogger], [], [CONFD_PKG="$CONFD_PKG hslogger"])
AC_GHC_PKG_CHECK([Crypto], [], [CONFD_PKG="$CONFD_PKG Crypto"])
$CONFD_PKG]))
fi
fi
-AC_SUBST(HTOOLS_REGEX_PCRE)
+AC_SUBST(HS_REGEX_PCRE)
if test "$has_confd" = True; then
AC_MSG_NOTICE([Enabling confd usage])
fi
AM_CONDITIONAL([ENABLE_MONITORING], [test "$has_monitoring" = True])
# development modules
-HTOOLS_NODEV=
-AC_GHC_PKG_CHECK([QuickCheck-2.*], [], [HTOOLS_NODEV=1], t)
-AC_GHC_PKG_CHECK([test-framework-0.6*], [], [HTOOLS_NODEV=1], t)
-AC_GHC_PKG_CHECK([test-framework-hunit], [], [HTOOLS_NODEV=1])
-AC_GHC_PKG_CHECK([test-framework-quickcheck2], [], [HTOOLS_NODEV=1])
-AC_GHC_PKG_CHECK([temporary], [], [HTOOLS_NODEV=1])
+HS_NODEV=
+AC_GHC_PKG_CHECK([QuickCheck-2.*], [], [HS_NODEV=1], t)
+AC_GHC_PKG_CHECK([test-framework-0.6*], [], [HS_NODEV=1], t)
+AC_GHC_PKG_CHECK([test-framework-hunit], [], [HS_NODEV=1])
+AC_GHC_PKG_CHECK([test-framework-quickcheck2], [], [HS_NODEV=1])
+AC_GHC_PKG_CHECK([temporary], [], [HS_NODEV=1])
# FIXME: unify checks for non-test libraries (attoparsec, hinotify, ...)
# that are needed to execute the tests, avoiding the duplication
# of the checks.
-AC_GHC_PKG_CHECK([attoparsec], [], [HTOOLS_NODEV=1])
-AC_GHC_PKG_CHECK([vector], [], [HTOOLS_NODEV=1])
-if test -n "$HTOOLS_NODEV"; then
+AC_GHC_PKG_CHECK([attoparsec], [], [HS_NODEV=1])
+AC_GHC_PKG_CHECK([vector], [], [HS_NODEV=1])
+if test -n "$HS_NODEV"; then
AC_MSG_WARN(m4_normalize([Required development modules were not found,
you won't be able to run Haskell unittests]))
else
AC_MSG_NOTICE([Haskell development modules found, unittests enabled])
fi
-AC_SUBST(HTOOLS_NODEV)
+AC_SUBST(HS_NODEV)
HTOOLS=yes
AC_SUBST(HTOOLS)
;;
esac
]],
- [[case "x${has_confd}x${HTOOLS_NOCURL}x" in
- xTruexx)
+ [[case "x${has_confd}x" in
+ xTruex)
enable_split_query=True
;;
*)
AC_MSG_ERROR([Split queries require the confd daemon])
fi
-if test x$enable_split_query = xTrue -a x$HTOOLS_NOCURL != x; then
- AC_MSG_ERROR([Split queries require the htools-rapi feature (curl library)])
-fi
-
if test x$enable_split_query = xTrue; then
AC_MSG_NOTICE([Split query functionality enabled])
fi
# Check for HsColour
-HTOOLS_APIDOC=no
+HS_APIDOC=no
AC_ARG_VAR(HSCOLOUR, [HsColour path])
AC_PATH_PROG(HSCOLOUR, [HsColour], [])
if test -z "$HSCOLOUR"; then
not be generated]))
fi
if test -n "$HADDOCK" && test -n "$HSCOLOUR"; then
- HTOOLS_APIDOC=yes
+ HS_APIDOC=yes
fi
-AC_SUBST(HTOOLS_APIDOC)
+AC_SUBST(HS_APIDOC)
# Check for hlint
AC_ARG_VAR(HLINT, [hlint path])
fi
AM_CONDITIONAL([WANT_HTOOLS], [test "$HTOOLS" = yes])
-AM_CONDITIONAL([WANT_HTOOLSTESTS], [test "x$HTOOLS_NODEV" = x])
-AM_CONDITIONAL([WANT_HTOOLSAPIDOC], [test "$HTOOLS_APIDOC" = yes])
+AM_CONDITIONAL([WANT_HSTESTS], [test "x$HS_NODEV" = x])
+AM_CONDITIONAL([WANT_HSAPIDOC], [test "$HS_APIDOC" = yes])
AM_CONDITIONAL([HAS_HLINT], [test "$HLINT"])
# Check for fakeroot
AC_SUBST(MAN_HAS_WARNINGS)
# Check for Python
-AM_PATH_PYTHON(2.4)
+AM_PATH_PYTHON(2.6)
AC_PYTHON_MODULE(OpenSSL, t)
AC_PYTHON_MODULE(simplejson, t)